Actual source code: pbvec.c

  1: /*$Id: pbvec.c,v 1.173 2001/09/12 03:26:59 bsmith Exp $*/

  3: /*
  4:    This file contains routines for Parallel vector operations.
  5:  */
 6:  #include src/vec/impls/mpi/pvecimpl.h

  8: /*
  9:        Note this code is very similar to VecPublish_Seq()
 10: */
 13: static int VecPublish_MPI(PetscObject obj)
 14: {
 15: #if defined(PETSC_HAVE_AMS)
 16:   Vec          v = (Vec) obj;
 17:   Vec_MPI      *s = (Vec_MPI*)v->data;
 18:   int          ierr,(*f)(AMS_Memory,char *,Vec);
 19: #endif  

 22: #if defined(PETSC_HAVE_AMS)
 23:   /* if it is already published then return */
 24:   if (v->amem >=0) return(0);

 26:   PetscObjectPublishBaseBegin(obj);
 27:   AMS_Memory_add_field((AMS_Memory)v->amem,"values",s->array,v->n,AMS_DOUBLE,AMS_READ,
 28:                                 AMS_DISTRIBUTED,AMS_REDUCT_UNDEF);

 30:   /*
 31:      If the vector knows its "layout" let it set it, otherwise it defaults
 32:      to correct 1d distribution
 33:   */
 34:   PetscObjectQueryFunction(obj,"AMSSetFieldBlock_C",(void (**)(void))&f);
 35:   if (f) {
 36:     (*f)((AMS_Memory)v->amem,"values",v);
 37:   }
 38:   PetscObjectPublishBaseEnd(obj);
 39: #endif
 40:   return(0);
 41: }

 45: int VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
 46: {
 47:   PetscScalar  sum,work;
 48:   int          ierr;

 51:   VecDot_Seq(xin,yin,&work);
 52:   MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,xin->comm);
 53:   *z = sum;
 54:   return(0);
 55: }

 59: int VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
 60: {
 61:   PetscScalar  sum,work;
 62:   int          ierr;

 65:   VecTDot_Seq(xin,yin,&work);
 66:   MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,xin->comm);
 67:   *z = sum;
 68:   return(0);
 69: }

 73: int VecSetOption_MPI(Vec v,VecOption op)
 74: {
 76:   if (op == VEC_IGNORE_OFF_PROC_ENTRIES) {
 77:     v->stash.donotstash = PETSC_TRUE;
 78:   } else if (op == VEC_TREAT_OFF_PROC_ENTRIES) {
 79:     v->stash.donotstash = PETSC_FALSE;
 80:   }
 81:   return(0);
 82: }
 83: 
 84: EXTERN int VecDuplicate_MPI(Vec,Vec *);
 85: EXTERN_C_BEGIN
 86: EXTERN int VecView_MPI_Draw(Vec,PetscViewer);
 87: EXTERN_C_END

 91: int VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
 92: {
 93:   int     ierr;
 94:   Vec_MPI *v = (Vec_MPI *)vin->data;

 97:   v->array = (PetscScalar *)a;
 98:   if (v->localrep) {
 99:     VecPlaceArray(v->localrep,a);
100:   }
101:   return(0);
102: }

104: extern int VecLoad_Binary(PetscViewer,const VecType, Vec*);

106: static struct _VecOps DvOps = { VecDuplicate_MPI,
107:             VecDuplicateVecs_Default,
108:             VecDestroyVecs_Default,
109:             VecDot_MPI,
110:             VecMDot_MPI,
111:             VecNorm_MPI,
112:             VecTDot_MPI,
113:             VecMTDot_MPI,
114:             VecScale_Seq,
115:             VecCopy_Seq,
116:             VecSet_Seq,
117:             VecSwap_Seq,
118:             VecAXPY_Seq,
119:             VecAXPBY_Seq,
120:             VecMAXPY_Seq,
121:             VecAYPX_Seq,
122:             VecWAXPY_Seq,
123:             VecPointwiseMult_Seq,
124:             VecPointwiseDivide_Seq,
125:             VecSetValues_MPI,
126:             VecAssemblyBegin_MPI,
127:             VecAssemblyEnd_MPI,
128:             VecGetArray_Seq,
129:             VecGetSize_MPI,
130:             VecGetSize_Seq,
131:             VecRestoreArray_Seq,
132:             VecMax_MPI,
133:             VecMin_MPI,
134:             VecSetRandom_Seq,
135:             VecSetOption_MPI,
136:             VecSetValuesBlocked_MPI,
137:             VecDestroy_MPI,
138:             VecView_MPI,
139:             VecPlaceArray_MPI,
140:             VecReplaceArray_Seq,
141:             VecDot_Seq,
142:             VecTDot_Seq,
143:             VecNorm_Seq,
144:             VecLoadIntoVector_Default,
145:             VecReciprocal_Default,
146:             0, /* VecViewNative... */
147:             VecConjugate_Seq,
148:             0,
149:             0,
150:             VecResetArray_Seq,
151:             0,
152:             VecMaxPointwiseDivide_Seq,
153:             VecLoad_Binary};

157: /*
158:     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
159:     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
160:     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
161: */
162: int VecCreate_MPI_Private(Vec v,int nghost,const PetscScalar array[],PetscMap map)
163: {
164:   Vec_MPI *s;
165:   int     ierr,size,rank;

168:   MPI_Comm_size(v->comm,&size);
169:   MPI_Comm_rank(v->comm,&rank);

171:   v->bops->publish   = VecPublish_MPI;
172:   PetscLogObjectMemory(v,sizeof(Vec_MPI) + (v->n+nghost+1)*sizeof(PetscScalar));
173:   PetscNew(Vec_MPI,&s);
174:   PetscMemzero(s,sizeof(Vec_MPI));
175:   PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));
176:   v->data        = (void*)s;
177:   s->nghost      = nghost;
178:   v->mapping     = 0;
179:   v->bmapping    = 0;
180:   v->petscnative = PETSC_TRUE;

182:   if (array) {
183:     s->array           = (PetscScalar *)array;
184:     s->array_allocated = 0;
185:   } else {
186:     int n              = ((v->n+nghost) > 0) ? v->n+nghost : 1;
187:     PetscMalloc(n*sizeof(PetscScalar),&s->array);
188:     s->array_allocated = s->array;
189:     PetscMemzero(s->array,v->n*sizeof(PetscScalar));
190:   }

192:   /* By default parallel vectors do not have local representation */
193:   s->localrep    = 0;
194:   s->localupdate = 0;

196:   v->stash.insertmode  = NOT_SET_VALUES;

198:   if (!v->map) {
199:     if (!map) {
200:       PetscMapCreateMPI(v->comm,v->n,v->N,&v->map);
201:     } else {
202:       v->map = map;
203:       PetscObjectReference((PetscObject)map);
204:     }
205:   }
206:   /* create the stashes. The block-size for bstash is set later when 
207:      VecSetValuesBlocked is called.
208:   */
209:   VecStashCreate_Private(v->comm,1,&v->stash);
210:   VecStashCreate_Private(v->comm,v->bs,&v->bstash);
211: 
212: #if defined(PETSC_HAVE_MATLAB) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_SINGLE)
213:   PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);
214:   PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);
215: #endif
216:   PetscObjectChangeTypeName((PetscObject)v,VECMPI);
217:   PetscPublishAll(v);
218:   return(0);
219: }

221: EXTERN_C_BEGIN
224: int VecCreate_MPI(Vec vv)
225: {

229:   if (vv->bs > 0) {
230:     PetscSplitOwnershipBlock(vv->comm,vv->bs,&vv->n,&vv->N);
231:   } else {
232:     PetscSplitOwnership(vv->comm,&vv->n,&vv->N);
233:   }
234:   VecCreate_MPI_Private(vv,0,0,PETSC_NULL);
235:   return(0);
236: }
237: EXTERN_C_END

241: /*@C
242:    VecCreateMPIWithArray - Creates a parallel, array-style vector,
243:    where the user provides the array space to store the vector values.

245:    Collective on MPI_Comm

247:    Input Parameters:
248: +  comm  - the MPI communicator to use
249: .  n     - local vector length, cannot be PETSC_DECIDE
250: .  N     - global vector length (or PETSC_DECIDE to have calculated)
251: -  array - the user provided array to store the vector values

253:    Output Parameter:
254: .  vv - the vector
255:  
256:    Notes:
257:    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
258:    same type as an existing vector.

260:    If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used
261:    at a later stage to SET the array for storing the vector values.

263:    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
264:    The user should not free the array until the vector is destroyed.

266:    Level: intermediate

268:    Concepts: vectors^creating with array

270: .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
271:           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()

273: @*/
274: int VecCreateMPIWithArray(MPI_Comm comm,int n,int N,const PetscScalar array[],Vec *vv)
275: {

279:   if (n == PETSC_DECIDE) {
280:     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
281:   }
282:   PetscSplitOwnership(comm,&n,&N);
283:   VecCreate(comm,vv);
284:   VecSetSizes(*vv,n,N);
285:   VecCreate_MPI_Private(*vv,0,array,PETSC_NULL);
286:   return(0);
287: }

291: /*@C
292:     VecGhostGetLocalForm - Obtains the local ghosted representation of 
293:     a parallel vector created with VecCreateGhost().

295:     Not Collective

297:     Input Parameter:
298: .   g - the global vector. Vector must be have been obtained with either
299:         VecCreateGhost(), VecCreateGhostWithArray() or VecCreateSeq().

301:     Output Parameter:
302: .   l - the local (ghosted) representation

304:     Notes:
305:     This routine does not actually update the ghost values, but rather it
306:     returns a sequential vector that includes the locations for the ghost
307:     values and their current values. The returned vector and the original
308:     vector passed in share the same array that contains the actual vector data.

310:     One should call VecGhostRestoreLocalForm() or VecDestroy() once one is
311:     finished using the object.

313:     Level: advanced

315:    Concepts: vectors^ghost point access

317: .seealso: VecCreateGhost(), VecGhostRestoreLocalForm(), VecCreateGhostWithArray()

319: @*/
320: int VecGhostGetLocalForm(Vec g,Vec *l)
321: {
322:   int        ierr;
323:   PetscTruth isseq,ismpi;


329:   PetscTypeCompare((PetscObject)g,VECSEQ,&isseq);
330:   PetscTypeCompare((PetscObject)g,VECMPI,&ismpi);
331:   if (ismpi) {
332:     Vec_MPI *v  = (Vec_MPI*)g->data;
333:     if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
334:     *l = v->localrep;
335:   } else if (isseq) {
336:     *l = g;
337:   } else {
338:     SETERRQ1(1,"Vector type %s does not have local representation",g->type_name);
339:   }
340:   PetscObjectReference((PetscObject)*l);
341:   return(0);
342: }

346: /*@C
347:     VecGhostRestoreLocalForm - Restores the local ghosted representation of 
348:     a parallel vector obtained with VecGhostGetLocalForm().

350:     Not Collective

352:     Input Parameter:
353: +   g - the global vector
354: -   l - the local (ghosted) representation

356:     Notes:
357:     This routine does not actually update the ghost values, but rather it
358:     returns a sequential vector that includes the locations for the ghost values
359:     and their current values.

361:     Level: advanced

363: .seealso: VecCreateGhost(), VecGhostGetLocalForm(), VecCreateGhostWithArray()
364: @*/
365: int VecGhostRestoreLocalForm(Vec g,Vec *l)
366: {
368:   PetscObjectDereference((PetscObject)*l);
369:   return(0);
370: }

374: /*@
375:    VecGhostUpdateBegin - Begins the vector scatter to update the vector from
376:    local representation to global or global representation to local.

378:    Collective on Vec

380:    Input Parameters:
381: +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
382: .  insertmode - one of ADD_VALUES or INSERT_VALUES
383: -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE

385:    Notes:
386:    Use the following to update the ghost regions with correct values from the owning process
387: .vb
388:        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
389:        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
390: .ve

392:    Use the following to accumulate the ghost region values onto the owning processors
393: .vb
394:        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
395:        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
396: .ve

398:    To accumulate the ghost region values onto the owning processors and then update
399:    the ghost regions correctly, call the later followed by the former, i.e.,
400: .vb
401:        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
402:        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
403:        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
404:        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
405: .ve

407:    Level: advanced

409: .seealso: VecCreateGhost(), VecGhostUpdateEnd(), VecGhostGetLocalForm(),
410:           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()

412: @*/
413: int VecGhostUpdateBegin(Vec g,InsertMode insertmode,ScatterMode scattermode)
414: {
415:   Vec_MPI *v;
416:   int     ierr;


421:   v  = (Vec_MPI*)g->data;
422:   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
423:   if (!v->localupdate) return(0);
424: 
425:   if (scattermode == SCATTER_REVERSE) {
426:     VecScatterBegin(v->localrep,g,insertmode,scattermode,v->localupdate);
427:   } else {
428:     VecScatterBegin(g,v->localrep,insertmode,scattermode,v->localupdate);
429:   }
430:   return(0);
431: }

435: /*@
436:    VecGhostUpdateEnd - End the vector scatter to update the vector from
437:    local representation to global or global representation to local.

439:    Collective on Vec

441:    Input Parameters:
442: +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
443: .  insertmode - one of ADD_VALUES or INSERT_VALUES
444: -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE

446:    Notes:

448:    Use the following to update the ghost regions with correct values from the owning process
449: .vb
450:        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
451:        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
452: .ve

454:    Use the following to accumulate the ghost region values onto the owning processors
455: .vb
456:        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
457:        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
458: .ve

460:    To accumulate the ghost region values onto the owning processors and then update
461:    the ghost regions correctly, call the later followed by the former, i.e.,
462: .vb
463:        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
464:        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
465:        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
466:        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
467: .ve

469:    Level: advanced

471: .seealso: VecCreateGhost(), VecGhostUpdateBegin(), VecGhostGetLocalForm(),
472:           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()

474: @*/
475: int VecGhostUpdateEnd(Vec g,InsertMode insertmode,ScatterMode scattermode)
476: {
477:   Vec_MPI *v;
478:   int     ierr;


483:   v  = (Vec_MPI*)g->data;
484:   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
485:   if (!v->localupdate) return(0);

487:   if (scattermode == SCATTER_REVERSE) {
488:     VecScatterEnd(v->localrep,g,insertmode,scattermode,v->localupdate);
489:   } else {
490:     VecScatterEnd(g,v->localrep,insertmode,scattermode,v->localupdate);
491:   }
492:   return(0);
493: }

497: /*@C
498:    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
499:    the caller allocates the array space.

501:    Collective on MPI_Comm

503:    Input Parameters:
504: +  comm - the MPI communicator to use
505: .  n - local vector length 
506: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
507: .  nghost - number of local ghost points
508: .  ghosts - global indices of ghost points (or PETSC_NULL if not needed)
509: -  array - the space to store the vector values (as long as n + nghost)

511:    Output Parameter:
512: .  vv - the global vector representation (without ghost points as part of vector)
513:  
514:    Notes:
515:    Use VecGhostGetLocalForm() to access the local, ghosted representation 
516:    of the vector.

518:    Level: advanced

520:    Concepts: vectors^creating with array
521:    Concepts: vectors^ghosted

523: .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 
524:           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
525:           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()

527: @*/
528: int VecCreateGhostWithArray(MPI_Comm comm,int n,int N,int nghost,const int ghosts[],const PetscScalar array[],Vec *vv)
529: {
530:   int          ierr;
531:   Vec_MPI      *w;
532:   PetscScalar  *larray;
533:   IS           from,to;

536:   *vv = 0;

538:   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
539:   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
540:   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
541:   PetscSplitOwnership(comm,&n,&N);
542:   /* Create global representation */
543:   VecCreate(comm,vv);
544:   VecSetSizes(*vv,n,N);
545:   VecCreate_MPI_Private(*vv,nghost,array,PETSC_NULL);
546:   w    = (Vec_MPI *)(*vv)->data;
547:   /* Create local representation */
548:   VecGetArray(*vv,&larray);
549:   VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);
550:   PetscLogObjectParent(*vv,w->localrep);
551:   VecRestoreArray(*vv,&larray);

553:   /*
554:        Create scatter context for scattering (updating) ghost values 
555:   */
556:   ISCreateGeneral(comm,nghost,ghosts,&from);
557:   ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);
558:   VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);
559:   PetscLogObjectParent(*vv,w->localupdate);
560:   ISDestroy(to);
561:   ISDestroy(from);

563:   return(0);
564: }

568: /*@C
569:    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.

571:    Collective on MPI_Comm

573:    Input Parameters:
574: +  comm - the MPI communicator to use
575: .  n - local vector length 
576: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
577: .  nghost - number of local ghost points
578: -  ghosts - global indices of ghost points

580:    Output Parameter:
581: .  vv - the global vector representation (without ghost points as part of vector)
582:  
583:    Notes:
584:    Use VecGhostGetLocalForm() to access the local, ghosted representation 
585:    of the vector.

587:    Level: advanced

589:    Concepts: vectors^ghosted

591: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
592:           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
593:           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
594:           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()

596: @*/
597: int VecCreateGhost(MPI_Comm comm,int n,int N,int nghost,const int ghosts[],Vec *vv)
598: {

602:   VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);
603:   return(0);
604: }

608: int VecDuplicate_MPI(Vec win,Vec *v)
609: {
610:   int          ierr;
611:   Vec_MPI      *vw,*w = (Vec_MPI *)win->data;
612:   PetscScalar  *array;
613: #if defined(PETSC_HAVE_AMS)
614:   int          (*f)(AMS_Memory,char *,Vec);
615: #endif

618:   VecCreate(win->comm,v);
619:   VecSetSizes(*v,win->n,win->N);
620:   VecCreate_MPI_Private(*v,w->nghost,0,win->map);
621:   vw   = (Vec_MPI *)(*v)->data;
622:   PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));

624:   /* save local representation of the parallel vector (and scatter) if it exists */
625:   if (w->localrep) {
626:     VecGetArray(*v,&array);
627:     VecCreateSeqWithArray(PETSC_COMM_SELF,win->n+w->nghost,array,&vw->localrep);
628:     PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));
629:     VecRestoreArray(*v,&array);
630:     PetscLogObjectParent(*v,vw->localrep);
631:     vw->localupdate = w->localupdate;
632:     if (vw->localupdate) {
633:       PetscObjectReference((PetscObject)vw->localupdate);
634:     }
635:   }

637:   /* New vector should inherit stashing property of parent */
638:   (*v)->stash.donotstash = win->stash.donotstash;
639: 
640:   PetscOListDuplicate(win->olist,&(*v)->olist);
641:   PetscFListDuplicate(win->qlist,&(*v)->qlist);
642:   if (win->mapping) {
643:     (*v)->mapping = win->mapping;
644:     PetscObjectReference((PetscObject)win->mapping);
645:   }
646:   if (win->bmapping) {
647:     (*v)->bmapping = win->bmapping;
648:     PetscObjectReference((PetscObject)win->bmapping);
649:   }
650:   (*v)->bs        = win->bs;
651:   (*v)->bstash.bs = win->bstash.bs;

653: #if defined(PETSC_HAVE_AMS)
654:   /*
655:      If the vector knows its "layout" let it set it, otherwise it defaults
656:      to correct 1d distribution
657:   */
658:   PetscObjectQueryFunction((PetscObject)(*v),"AMSSetFieldBlock_C",(void (**)(void))&f);
659:   if (f) {
660:     (*f)((AMS_Memory)(*v)->amem,"values",*v);
661:   }
662: #endif
663:   return(0);
664: }

666: /* ------------------------------------------------------------------------------------------*/
669: /*@C
670:    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
671:    the caller allocates the array space. Indices in the ghost region are based on blocks.

673:    Collective on MPI_Comm

675:    Input Parameters:
676: +  comm - the MPI communicator to use
677: .  bs - block size
678: .  n - local vector length 
679: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
680: .  nghost - number of local ghost blocks
681: .  ghosts - global indices of ghost blocks (or PETSC_NULL if not needed)
682: -  array - the space to store the vector values (as long as n + nghost*bs)

684:    Output Parameter:
685: .  vv - the global vector representation (without ghost points as part of vector)
686:  
687:    Notes:
688:    Use VecGhostGetLocalForm() to access the local, ghosted representation 
689:    of the vector.

691:    n is the local vector size (total local size not the number of blocks) while nghost
692:    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
693:    portion is bs*nghost

695:    Level: advanced

697:    Concepts: vectors^creating ghosted
698:    Concepts: vectors^creating with array

700: .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 
701:           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
702:           VecCreateGhostWithArray(), VecCreateGhostBlocked()

704: @*/
705: int VecCreateGhostBlockWithArray(MPI_Comm comm,int bs,int n,int N,int nghost,const int ghosts[],const PetscScalar array[],Vec *vv)
706: {
707:   int          ierr;
708:   Vec_MPI      *w;
709:   PetscScalar  *larray;
710:   IS           from,to;

713:   *vv = 0;

715:   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
716:   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
717:   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
718:   PetscSplitOwnership(comm,&n,&N);
719:   /* Create global representation */
720:   VecCreate(comm,vv);
721:   VecSetSizes(*vv,n,N);
722:   VecCreate_MPI_Private(*vv,nghost*bs,array,PETSC_NULL);
723:   VecSetBlockSize(*vv,bs);
724:   w    = (Vec_MPI *)(*vv)->data;
725:   /* Create local representation */
726:   VecGetArray(*vv,&larray);
727:   VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);
728:   VecSetBlockSize(w->localrep,bs);
729:   PetscLogObjectParent(*vv,w->localrep);
730:   VecRestoreArray(*vv,&larray);

732:   /*
733:        Create scatter context for scattering (updating) ghost values 
734:   */
735:   ISCreateBlock(comm,bs,nghost,ghosts,&from);
736:   ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);
737:   VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);
738:   PetscLogObjectParent(*vv,w->localupdate);
739:   ISDestroy(to);
740:   ISDestroy(from);

742:   return(0);
743: }

747: /*@C
748:    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
749:         The indicing of the ghost points is done with blocks.

751:    Collective on MPI_Comm

753:    Input Parameters:
754: +  comm - the MPI communicator to use
755: .  bs - the block size
756: .  n - local vector length 
757: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
758: .  nghost - number of local ghost blocks
759: -  ghosts - global indices of ghost blocks

761:    Output Parameter:
762: .  vv - the global vector representation (without ghost points as part of vector)
763:  
764:    Notes:
765:    Use VecGhostGetLocalForm() to access the local, ghosted representation 
766:    of the vector.

768:    n is the local vector size (total local size not the number of blocks) while nghost
769:    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
770:    portion is bs*nghost

772:    Level: advanced

774:    Concepts: vectors^ghosted

776: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
777:           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
778:           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()

780: @*/
781: int VecCreateGhostBlock(MPI_Comm comm,int bs,int n,int N,int nghost,const int ghosts[],Vec *vv)
782: {

786:   VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);
787:   return(0);
788: }

790: /*
791:     These introduce a ghosted vector where the ghosting is determined by the call to 
792:   VecSetLocalToGlobalMapping()
793: */

797: int VecSetLocalToGlobalMapping_FETI(Vec vv,ISLocalToGlobalMapping map)
798: {
799:   int     ierr;
800:   Vec_MPI *v = (Vec_MPI *)vv->data;

803:   v->nghost = map->n - vv->n;

805:   /* we need to make longer the array space that was allocated when the vector was created */
806:   PetscFree(v->array_allocated);
807:   PetscMalloc(map->n*sizeof(PetscScalar),&v->array_allocated);
808:   v->array = v->array_allocated;
809: 
810:   /* Create local representation */
811:   VecCreateSeqWithArray(PETSC_COMM_SELF,map->n,v->array,&v->localrep);
812:   PetscLogObjectParent(vv,v->localrep);

814:   return(0);
815: }


820: int VecSetValuesLocal_FETI(Vec vv,int n,const int *ix,const PetscScalar *values,InsertMode mode)
821: {
822:   int      ierr;
823:   Vec_MPI *v = (Vec_MPI *)vv->data;

826:   VecSetValues(v->localrep,n,ix,values,mode);
827:   return(0);
828: }

830: EXTERN_C_BEGIN
833: int VecCreate_FETI(Vec vv)
834: {

838:   VecSetType(vv,VECMPI);
839: 
840:   /* overwrite the functions to handle setting values locally */
841:   vv->ops->setlocaltoglobalmapping = VecSetLocalToGlobalMapping_FETI;
842:   vv->ops->setvalueslocal          = VecSetValuesLocal_FETI;
843:   vv->ops->assemblybegin           = 0;
844:   vv->ops->assemblyend             = 0;
845:   vv->ops->setvaluesblocked        = 0;
846:   vv->ops->setvaluesblocked        = 0;

848:   return(0);
849: }
850: EXTERN_C_END