Actual source code: pdvec.c
1: /* $Id: pdvec.c,v 1.154 2001/09/11 16:32:01 bsmith Exp $*/
2: /*
3: Code for some of the parallel vector primatives.
4: */
5: #include src/vec/impls/mpi/pvecimpl.h
6: #if defined(PETSC_HAVE_PNETCDF)
7: EXTERN_C_BEGIN
8: #include "pnetcdf.h"
9: EXTERN_C_END
10: #endif
14: int VecDestroy_MPI(Vec v)
15: {
16: Vec_MPI *x = (Vec_MPI*)v->data;
17: int ierr;
21: /* if memory was published with AMS then destroy it */
22: PetscObjectDepublish(v);
24: #if defined(PETSC_USE_LOG)
25: PetscLogObjectState((PetscObject)v,"Length=%d",v->N);
26: #endif
27: if (x->array_allocated) {PetscFree(x->array_allocated);}
29: /* Destroy local representation of vector if it exists */
30: if (x->localrep) {
31: VecDestroy(x->localrep);
32: if (x->localupdate) {VecScatterDestroy(x->localupdate);}
33: }
34: /* Destroy the stashes: note the order - so that the tags are freed properly */
35: VecStashDestroy_Private(&v->bstash);
36: VecStashDestroy_Private(&v->stash);
37: PetscFree(x);
38: return(0);
39: }
43: int VecView_MPI_ASCII(Vec xin,PetscViewer viewer)
44: {
45: int i,rank,len,work = xin->n,n,j,size,ierr,cnt,tag = ((PetscObject)viewer)->tag;
46: MPI_Status status;
47: PetscScalar *values,*xarray;
48: char *name;
49: PetscViewerFormat format;
52: VecGetArray(xin,&xarray);
53: /* determine maximum message to arrive */
54: MPI_Comm_rank(xin->comm,&rank);
55: MPI_Reduce(&work,&len,1,MPI_INT,MPI_MAX,0,xin->comm);
56: MPI_Comm_size(xin->comm,&size);
58: if (!rank) {
59: PetscMalloc((len+1)*sizeof(PetscScalar),&values);
60: PetscViewerGetFormat(viewer,&format);
61: /*
62: Matlab format and ASCII format are very similar except
63: Matlab uses %18.16e format while ASCII uses %g
64: */
65: if (format == PETSC_VIEWER_ASCII_MATLAB) {
66: PetscObjectGetName((PetscObject)xin,&name);
67: PetscViewerASCIIPrintf(viewer,"%s = [\n",name);
68: for (i=0; i<xin->n; i++) {
69: #if defined(PETSC_USE_COMPLEX)
70: if (PetscImaginaryPart(xarray[i]) > 0.0) {
71: PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16ei\n",PetscRealPart(xarray[i]),PetscImaginaryPart(xarray[i]));
72: } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
73: PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16ei\n",PetscRealPart(xarray[i]),-PetscImaginaryPart(xarray[i]));
74: } else {
75: PetscViewerASCIIPrintf(viewer,"%18.16e\n",PetscRealPart(xarray[i]));
76: }
77: #else
78: PetscViewerASCIIPrintf(viewer,"%18.16e\n",xarray[i]);
79: #endif
80: }
81: /* receive and print messages */
82: for (j=1; j<size; j++) {
83: MPI_Recv(values,len,MPIU_SCALAR,j,tag,xin->comm,&status);
84: MPI_Get_count(&status,MPIU_SCALAR,&n);
85: for (i=0; i<n; i++) {
86: #if defined(PETSC_USE_COMPLEX)
87: if (PetscImaginaryPart(values[i]) > 0.0) {
88: PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16e i\n",PetscRealPart(values[i]),PetscImaginaryPart(values[i]));
89: } else if (PetscImaginaryPart(values[i]) < 0.0) {
90: PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16e i\n",PetscRealPart(values[i]),-PetscImaginaryPart(values[i]));
91: } else {
92: PetscViewerASCIIPrintf(viewer,"%18.16e\n",PetscRealPart(values[i]));
93: }
94: #else
95: PetscViewerASCIIPrintf(viewer,"%18.16e\n",values[i]);
96: #endif
97: }
98: }
99: PetscViewerASCIIPrintf(viewer,"];\n");
101: } else if (format == PETSC_VIEWER_ASCII_SYMMODU) {
102: for (i=0; i<xin->n; i++) {
103: #if defined(PETSC_USE_COMPLEX)
104: PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",PetscRealPart(xarray[i]),PetscImaginaryPart(xarray[i]));
105: #else
106: PetscViewerASCIIPrintf(viewer,"%18.16e\n",xarray[i]);
107: #endif
108: }
109: /* receive and print messages */
110: for (j=1; j<size; j++) {
111: MPI_Recv(values,len,MPIU_SCALAR,j,tag,xin->comm,&status);
112: MPI_Get_count(&status,MPIU_SCALAR,&n);
113: for (i=0; i<n; i++) {
114: #if defined(PETSC_USE_COMPLEX)
115: PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",PetscRealPart(values[i]),PetscImaginaryPart(values[i]));
116: #else
117: PetscViewerASCIIPrintf(viewer,"%18.16e\n",values[i]);
118: #endif
119: }
120: }
122: } else {
123: if (format != PETSC_VIEWER_ASCII_COMMON) {PetscViewerASCIIPrintf(viewer,"Process [%d]\n",rank);}
124: cnt = 0;
125: for (i=0; i<xin->n; i++) {
126: if (format == PETSC_VIEWER_ASCII_INDEX) {
127: PetscViewerASCIIPrintf(viewer,"%d: ",cnt++);
128: }
129: #if defined(PETSC_USE_COMPLEX)
130: if (PetscImaginaryPart(xarray[i]) > 0.0) {
131: PetscViewerASCIIPrintf(viewer,"%g + %g i\n",PetscRealPart(xarray[i]),PetscImaginaryPart(xarray[i]));
132: } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
133: PetscViewerASCIIPrintf(viewer,"%g - %g i\n",PetscRealPart(xarray[i]),-PetscImaginaryPart(xarray[i]));
134: } else {
135: PetscViewerASCIIPrintf(viewer,"%g\n",PetscRealPart(xarray[i]));
136: }
137: #else
138: PetscViewerASCIIPrintf(viewer,"%g\n",xarray[i]);
139: #endif
140: }
141: /* receive and print messages */
142: for (j=1; j<size; j++) {
143: MPI_Recv(values,len,MPIU_SCALAR,j,tag,xin->comm,&status);
144: MPI_Get_count(&status,MPIU_SCALAR,&n);
145: if (format != PETSC_VIEWER_ASCII_COMMON) {
146: PetscViewerASCIIPrintf(viewer,"Process [%d]\n",j);
147: }
148: for (i=0; i<n; i++) {
149: if (format == PETSC_VIEWER_ASCII_INDEX) {
150: PetscViewerASCIIPrintf(viewer,"%d: ",cnt++);
151: }
152: #if defined(PETSC_USE_COMPLEX)
153: if (PetscImaginaryPart(values[i]) > 0.0) {
154: PetscViewerASCIIPrintf(viewer,"%g + %g i\n",PetscRealPart(values[i]),PetscImaginaryPart(values[i]));
155: } else if (PetscImaginaryPart(values[i]) < 0.0) {
156: PetscViewerASCIIPrintf(viewer,"%g - %g i\n",PetscRealPart(values[i]),-PetscImaginaryPart(values[i]));
157: } else {
158: PetscViewerASCIIPrintf(viewer,"%g\n",PetscRealPart(values[i]));
159: }
160: #else
161: PetscViewerASCIIPrintf(viewer,"%g\n",values[i]);
162: #endif
163: }
164: }
165: }
166: PetscFree(values);
167: } else {
168: /* send values */
169: MPI_Send(xarray,xin->n,MPIU_SCALAR,0,tag,xin->comm);
170: }
171: PetscViewerFlush(viewer);
172: VecRestoreArray(xin,&xarray);
173: return(0);
174: }
178: int VecView_MPI_Binary(Vec xin,PetscViewer viewer)
179: {
180: int rank,ierr,len,work = xin->n,n,j,size,fdes,tag = ((PetscObject)viewer)->tag;
181: MPI_Status status;
182: PetscScalar *values,*xarray;
183: FILE *file;
186: VecGetArray(xin,&xarray);
187: PetscViewerBinaryGetDescriptor(viewer,&fdes);
189: /* determine maximum message to arrive */
190: MPI_Comm_rank(xin->comm,&rank);
191: MPI_Reduce(&work,&len,1,MPI_INT,MPI_MAX,0,xin->comm);
192: MPI_Comm_size(xin->comm,&size);
194: if (!rank) {
195: int cookie = VEC_FILE_COOKIE;
196: PetscBinaryWrite(fdes,&cookie,1,PETSC_INT,0);
197: PetscBinaryWrite(fdes,&xin->N,1,PETSC_INT,0);
198: PetscBinaryWrite(fdes,xarray,xin->n,PETSC_SCALAR,0);
200: PetscMalloc((len+1)*sizeof(PetscScalar),&values);
201: /* receive and print messages */
202: for (j=1; j<size; j++) {
203: MPI_Recv(values,len,MPIU_SCALAR,j,tag,xin->comm,&status);
204: MPI_Get_count(&status,MPIU_SCALAR,&n);
205: PetscBinaryWrite(fdes,values,n,PETSC_SCALAR,0);
206: }
207: PetscFree(values);
208: PetscViewerBinaryGetInfoPointer(viewer,&file);
209: if (file && xin->bs > 1) {
210: if (xin->prefix) {
211: fprintf(file,"-%svecload_block_size %d\n",xin->prefix,xin->bs);
212: } else {
213: fprintf(file,"-vecload_block_size %d\n",xin->bs);
214: }
215: }
216: } else {
217: /* send values */
218: MPI_Send(xarray,xin->n,MPIU_SCALAR,0,tag,xin->comm);
219: }
220: VecRestoreArray(xin,&xarray);
221: return(0);
222: }
226: int VecView_MPI_Draw_LG(Vec xin,PetscViewer viewer)
227: {
228: int i,rank,size,N = xin->N,*lens,ierr;
229: PetscDraw draw;
230: PetscReal *xx,*yy;
231: PetscDrawLG lg;
232: PetscTruth isnull;
233: PetscScalar *xarray;
236: PetscViewerDrawGetDraw(viewer,0,&draw);
237: PetscDrawIsNull(draw,&isnull);
238: if (isnull) return(0);
240: VecGetArray(xin,&xarray);
241: PetscViewerDrawGetDrawLG(viewer,0,&lg);
242: PetscDrawCheckResizedWindow(draw);
243: MPI_Comm_rank(xin->comm,&rank);
244: MPI_Comm_size(xin->comm,&size);
245: if (!rank) {
246: PetscDrawLGReset(lg);
247: PetscMalloc(2*(N+1)*sizeof(PetscReal),&xx);
248: for (i=0; i<N; i++) {xx[i] = (PetscReal) i;}
249: yy = xx + N;
250: PetscMalloc(size*sizeof(int),&lens);
251: for (i=0; i<size; i++) {
252: lens[i] = xin->map->range[i+1] - xin->map->range[i];
253: }
254: #if !defined(PETSC_USE_COMPLEX)
255: MPI_Gatherv(xarray,xin->n,MPIU_REAL,yy,lens,xin->map->range,MPIU_REAL,0,xin->comm);
256: #else
257: {
258: PetscReal *xr;
259: PetscMalloc((xin->n+1)*sizeof(PetscReal),&xr);
260: for (i=0; i<xin->n; i++) {
261: xr[i] = PetscRealPart(xarray[i]);
262: }
263: MPI_Gatherv(xr,xin->n,MPIU_REAL,yy,lens,xin->map->range,MPIU_REAL,0,xin->comm);
264: PetscFree(xr);
265: }
266: #endif
267: PetscFree(lens);
268: PetscDrawLGAddPoints(lg,N,&xx,&yy);
269: PetscFree(xx);
270: } else {
271: #if !defined(PETSC_USE_COMPLEX)
272: MPI_Gatherv(xarray,xin->n,MPIU_REAL,0,0,0,MPIU_REAL,0,xin->comm);
273: #else
274: {
275: PetscReal *xr;
276: PetscMalloc((xin->n+1)*sizeof(PetscReal),&xr);
277: for (i=0; i<xin->n; i++) {
278: xr[i] = PetscRealPart(xarray[i]);
279: }
280: MPI_Gatherv(xr,xin->n,MPIU_REAL,0,0,0,MPIU_REAL,0,xin->comm);
281: PetscFree(xr);
282: }
283: #endif
284: }
285: PetscDrawLGDraw(lg);
286: PetscDrawSynchronizedFlush(draw);
287: VecRestoreArray(xin,&xarray);
288: return(0);
289: }
291: EXTERN_C_BEGIN
294: int VecView_MPI_Draw(Vec xin,PetscViewer viewer)
295: {
296: int i,rank,size,ierr,start,end,tag = ((PetscObject)viewer)->tag;
297: MPI_Status status;
298: PetscReal coors[4],ymin,ymax,xmin,xmax,tmp;
299: PetscDraw draw;
300: PetscTruth isnull;
301: PetscDrawAxis axis;
302: PetscScalar *xarray;
305: PetscViewerDrawGetDraw(viewer,0,&draw);
306: PetscDrawIsNull(draw,&isnull); if (isnull) return(0);
308: VecGetArray(xin,&xarray);
309: PetscDrawCheckResizedWindow(draw);
310: xmin = 1.e20; xmax = -1.e20;
311: for (i=0; i<xin->n; i++) {
312: #if defined(PETSC_USE_COMPLEX)
313: if (PetscRealPart(xarray[i]) < xmin) xmin = PetscRealPart(xarray[i]);
314: if (PetscRealPart(xarray[i]) > xmax) xmax = PetscRealPart(xarray[i]);
315: #else
316: if (xarray[i] < xmin) xmin = xarray[i];
317: if (xarray[i] > xmax) xmax = xarray[i];
318: #endif
319: }
320: if (xmin + 1.e-10 > xmax) {
321: xmin -= 1.e-5;
322: xmax += 1.e-5;
323: }
324: MPI_Reduce(&xmin,&ymin,1,MPIU_REAL,MPI_MIN,0,xin->comm);
325: MPI_Reduce(&xmax,&ymax,1,MPIU_REAL,MPI_MAX,0,xin->comm);
326: MPI_Comm_size(xin->comm,&size);
327: MPI_Comm_rank(xin->comm,&rank);
328: PetscDrawAxisCreate(draw,&axis);
329: PetscLogObjectParent(draw,axis);
330: if (!rank) {
331: PetscDrawClear(draw);
332: PetscDrawFlush(draw);
333: PetscDrawAxisSetLimits(axis,0.0,(double)xin->N,ymin,ymax);
334: PetscDrawAxisDraw(axis);
335: PetscDrawGetCoordinates(draw,coors,coors+1,coors+2,coors+3);
336: }
337: PetscDrawAxisDestroy(axis);
338: MPI_Bcast(coors,4,MPIU_REAL,0,xin->comm);
339: if (rank) {PetscDrawSetCoordinates(draw,coors[0],coors[1],coors[2],coors[3]);}
340: /* draw local part of vector */
341: VecGetOwnershipRange(xin,&start,&end);
342: if (rank < size-1) { /*send value to right */
343: MPI_Send(&xarray[xin->n-1],1,MPIU_REAL,rank+1,tag,xin->comm);
344: }
345: for (i=1; i<xin->n; i++) {
346: #if !defined(PETSC_USE_COMPLEX)
347: PetscDrawLine(draw,(PetscReal)(i-1+start),xarray[i-1],(PetscReal)(i+start),
348: xarray[i],PETSC_DRAW_RED);
349: #else
350: PetscDrawLine(draw,(PetscReal)(i-1+start),PetscRealPart(xarray[i-1]),(PetscReal)(i+start),
351: PetscRealPart(xarray[i]),PETSC_DRAW_RED);
352: #endif
353: }
354: if (rank) { /* receive value from right */
355: MPI_Recv(&tmp,1,MPIU_REAL,rank-1,tag,xin->comm,&status);
356: #if !defined(PETSC_USE_COMPLEX)
357: PetscDrawLine(draw,(PetscReal)start-1,tmp,(PetscReal)start,xarray[0],PETSC_DRAW_RED);
358: #else
359: PetscDrawLine(draw,(PetscReal)start-1,tmp,(PetscReal)start,PetscRealPart(xarray[0]),PETSC_DRAW_RED);
360: #endif
361: }
362: PetscDrawSynchronizedFlush(draw);
363: PetscDrawPause(draw);
364: VecRestoreArray(xin,&xarray);
365: return(0);
366: }
367: EXTERN_C_END
371: int VecView_MPI_Socket(Vec xin,PetscViewer viewer)
372: {
373: int i,rank,size,N = xin->N,*lens,ierr;
374: PetscScalar *xx,*xarray;
377: VecGetArray(xin,&xarray);
378: MPI_Comm_rank(xin->comm,&rank);
379: MPI_Comm_size(xin->comm,&size);
380: if (!rank) {
381: PetscMalloc((N+1)*sizeof(PetscScalar),&xx);
382: PetscMalloc(size*sizeof(int),&lens);
383: for (i=0; i<size; i++) {
384: lens[i] = xin->map->range[i+1] - xin->map->range[i];
385: }
386: MPI_Gatherv(xarray,xin->n,MPIU_SCALAR,xx,lens,xin->map->range,MPIU_SCALAR,0,xin->comm);
387: PetscFree(lens);
388: PetscViewerSocketPutScalar(viewer,N,1,xx);
389: PetscFree(xx);
390: } else {
391: MPI_Gatherv(xarray,xin->n,MPIU_REAL,0,0,0,MPIU_REAL,0,xin->comm);
392: }
393: VecRestoreArray(xin,&xarray);
394: return(0);
395: }
399: int VecView_MPI_Matlab(Vec xin,PetscViewer viewer)
400: {
401: #if defined(PETSC_HAVE_MATLAB) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_SINGLE)
402: int i,rank,size,N = xin->N,*lens,ierr;
403: PetscScalar *xx,*xarray;
406: VecGetArray(xin,&xarray);
407: MPI_Comm_rank(xin->comm,&rank);
408: MPI_Comm_size(xin->comm,&size);
409: if (!rank) {
410: PetscMalloc((N+1)*sizeof(PetscScalar),&xx);
411: PetscMalloc(size*sizeof(int),&lens);
412: for (i=0; i<size; i++) {
413: lens[i] = xin->map->range[i+1] - xin->map->range[i];
414: }
415: MPI_Gatherv(xarray,xin->n,MPIU_SCALAR,xx,lens,xin->map->range,MPIU_SCALAR,0,xin->comm);
416: PetscFree(lens);
418: PetscObjectName((PetscObject)xin);
419: PetscViewerMatlabPutArray(viewer,N,1,xx,xin->name);
421: PetscFree(xx);
422: } else {
423: MPI_Gatherv(xarray,xin->n,MPIU_REAL,0,0,0,MPIU_REAL,0,xin->comm);
424: }
425: VecRestoreArray(xin,&xarray);
426: return(0);
427: #else
429: SETERRQ(1,"Build PETSc with Matlab to use this viewer");
430: #endif
431: }
435: int VecView_MPI_Netcdf(Vec xin,PetscViewer v)
436: {
437: #if defined(PETSC_HAVE_PNETCDF)
438: int n = xin->n,ierr,ncid,xdim,xdim_num=1,xin_id,xstart;
439: MPI_Comm comm = xin->comm;
440: PetscScalar *xarray;
443: #if !defined(PETSC_USE_COMPLEX)
444: VecGetArray(xin,&xarray);
445: PetscViewerNetcdfGetID(v,&ncid);
446: if (ncid < 0) SETERRQ(1,"First call PetscViewerNetcdfOpen to create NetCDF dataset");
447: /* define dimensions */
448: ncmpi_def_dim(ncid,"PETSc_Vector_Global_Size",xin->N,&xdim);
449: /* define variables */
450: ncmpi_def_var(ncid,"PETSc_Vector_MPI",NC_DOUBLE,xdim_num,&xdim,&xin_id);
451: /* leave define mode */
452: ncmpi_enddef(ncid);
453: /* store the vector */
454: VecGetOwnershipRange(xin,&xstart,PETSC_NULL);
455: ncmpi_put_vara_double_all(ncid,xin_id,(const size_t*)&xstart,(const size_t*)&n,xarray);
456: VecRestoreArray(xin,&xarray);
457: #else
458: PetscPrintf(PETSC_COMM_WORLD,"NetCDF viewer not supported for complex numbers\n");
459: #endif
460: return(0);
461: #else /* !defined(PETSC_HAVE_PNETCDF) */
463: SETERRQ(1,"Build PETSc with NetCDF to use this viewer");
464: #endif
465: }
469: int VecView_MPI_HDF4_Ex(Vec X, PetscViewer viewer, int d, int *dims)
470: {
471: #if defined(PETSC_HAVE_HDF4) && !defined(PETSC_USE_COMPLEX)
472: int rank, ierr, len, i, j, k, size, cur, bs, n, N;
473: int tag = ((PetscObject)viewer)->tag;
474: MPI_Status status;
475: PetscScalar *x;
476: float *xlf, *xf;
480: bs = X->bs > 0 ? X->bs : 1;
481: N = X->N / bs;
482: n = X->n / bs;
484: // For now, always convert to float
485: PetscMalloc(N * sizeof(float), &xf);
486: PetscMalloc(n * sizeof(float), &xlf);
488: MPI_Comm_rank(X->comm, &rank);
489: MPI_Comm_size(X->comm, &size);
491: VecGetArray(X, &x);
493: for (k = 0; k < bs; k++) {
494: for (i = 0; i < n; i++) {
495: xlf[i] = (float) x[i*bs + k];
496: }
497: if (rank == 0) {
498: cur = 0;
499: PetscMemcpy(xf + cur, xlf, n * sizeof(float));
500: cur += n;
501: for (j = 1; j < size; j++) {
502: MPI_Recv(xf + cur, N - cur, MPI_FLOAT, j, tag, X->comm,
503: &status);
504: MPI_Get_count(&status, MPI_FLOAT, &len);
505: cur += len;
506: }
507: if (cur != N) {
508: SETERRQ2(1, "? %d %d", cur, N);
509: }
510: PetscViewerHDF4WriteSDS(viewer, xf, 2, dims, bs);
511: } else {
512: MPI_Send(xlf, n, MPI_FLOAT, 0, tag, X->comm);
513: }
514: }
515: VecRestoreArray(X, &x);
516: PetscFree(xlf);
517: PetscFree(xf);
518: return(0);
519: #else /* !defined(PETSC_HAVE_HDF4) */
521: SETERRQ(1,"Build PETSc with HDF4 to use this viewer");
522:
523: #endif
524: }
528: int VecView_MPI_HDF4(Vec xin,PetscViewer viewer)
529: {
530: #if defined(PETSC_HAVE_HDF4) && !defined(PETSC_USE_COMPLEX)
531: int ierr, bs, dims[1];
533: bs = xin->bs > 0 ? xin->bs : 1;
534: dims[0] = xin->N / bs;
535: VecView_MPI_HDF4_Ex(xin, viewer, 1, dims);
536: return(0);
537: #else /* !defined(PETSC_HAVE_HDF4) */
539: SETERRQ(1,"Build PETSc with HDF4 to use this viewer");
540: #endif
541: }
545: int VecView_MPI(Vec xin,PetscViewer viewer)
546: {
547: int ierr;
548: PetscTruth isascii,issocket,isbinary,isdraw,ismathematica,isnetcdf,ishdf4,ismatlab;
551: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);
552: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_SOCKET,&issocket);
553: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);
554: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);
555: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_MATHEMATICA,&ismathematica);
556: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_NETCDF,&isnetcdf);
557: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_HDF4,&ishdf4);
558: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_MATLAB,&ismatlab);
559: if (isascii){
560: VecView_MPI_ASCII(xin,viewer);
561: } else if (issocket) {
562: VecView_MPI_Socket(xin,viewer);
563: } else if (isbinary) {
564: VecView_MPI_Binary(xin,viewer);
565: } else if (isdraw) {
566: PetscViewerFormat format;
568: PetscViewerGetFormat(viewer,&format);
569: if (format == PETSC_VIEWER_DRAW_LG) {
570: VecView_MPI_Draw_LG(xin,viewer);
571: } else {
572: VecView_MPI_Draw(xin,viewer);
573: }
574: } else if (ismathematica) {
575: PetscViewerMathematicaPutVector(viewer,xin);
576: } else if (isnetcdf) {
577: VecView_MPI_Netcdf(xin,viewer);
578: } else if (ishdf4) {
579: VecView_MPI_HDF4(xin,viewer);
580: } else if (ismatlab) {
581: VecView_MPI_Matlab(xin,viewer);
582: } else {
583: SETERRQ1(1,"Viewer type %s not supported for this object",((PetscObject)viewer)->type_name);
584: }
585: return(0);
586: }
590: int VecGetSize_MPI(Vec xin,int *N)
591: {
593: *N = xin->N;
594: return(0);
595: }
599: int VecSetValues_MPI(Vec xin,int ni,const int ix[],const PetscScalar y[],InsertMode addv)
600: {
601: int rank = xin->stash.rank, *owners = xin->map->range,start = owners[rank];
602: int end = owners[rank+1],i,row,ierr;
603: PetscScalar *xx;
606: #if defined(PETSC_USE_BOPT_g)
607: if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) {
608: SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
609: } else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) {
610: SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
611: }
612: #endif
613: VecGetArray(xin,&xx);
614: xin->stash.insertmode = addv;
616: if (addv == INSERT_VALUES) {
617: for (i=0; i<ni; i++) {
618: if ((row = ix[i]) >= start && row < end) {
619: xx[row-start] = y[i];
620: } else if (!xin->stash.donotstash) {
621: if (ix[i] < 0) continue;
622: #if defined(PETSC_USE_BOPT_g)
623: if (ix[i] >= xin->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %d maximum %d",ix[i],xin->N);
624: #endif
625: VecStashValue_Private(&xin->stash,row,y[i]);
626: }
627: }
628: } else {
629: for (i=0; i<ni; i++) {
630: if ((row = ix[i]) >= start && row < end) {
631: xx[row-start] += y[i];
632: } else if (!xin->stash.donotstash) {
633: if (ix[i] < 0) continue;
634: #if defined(PETSC_USE_BOPT_g)
635: if (ix[i] > xin->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %d maximum %d",ix[i],xin->N);
636: #endif
637: VecStashValue_Private(&xin->stash,row,y[i]);
638: }
639: }
640: }
641: VecRestoreArray(xin,&xx);
642: return(0);
643: }
647: int VecSetValuesBlocked_MPI(Vec xin,int ni,const int ix[],const PetscScalar yin[],InsertMode addv)
648: {
649: int rank = xin->stash.rank,*owners = xin->map->range,start = owners[rank];
650: int end = owners[rank+1],i,row,bs = xin->bs,j,ierr;
651: PetscScalar *xx,*y = (PetscScalar*)yin;
654: VecGetArray(xin,&xx);
655: #if defined(PETSC_USE_BOPT_g)
656: if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) {
657: SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
658: }
659: else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) {
660: SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
661: }
662: #endif
663: xin->stash.insertmode = addv;
665: if (addv == INSERT_VALUES) {
666: for (i=0; i<ni; i++) {
667: if ((row = bs*ix[i]) >= start && row < end) {
668: for (j=0; j<bs; j++) {
669: xx[row-start+j] = y[j];
670: }
671: } else if (!xin->stash.donotstash) {
672: if (ix[i] < 0) continue;
673: #if defined(PETSC_USE_BOPT_g)
674: if (ix[i] >= xin->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %d max %d",ix[i],xin->N);
675: #endif
676: VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
677: }
678: y += bs;
679: }
680: } else {
681: for (i=0; i<ni; i++) {
682: if ((row = bs*ix[i]) >= start && row < end) {
683: for (j=0; j<bs; j++) {
684: xx[row-start+j] += y[j];
685: }
686: } else if (!xin->stash.donotstash) {
687: if (ix[i] < 0) continue;
688: #if defined(PETSC_USE_BOPT_g)
689: if (ix[i] > xin->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %d max %d",ix[i],xin->N);
690: #endif
691: VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
692: }
693: y += bs;
694: }
695: }
696: VecRestoreArray(xin,&xx);
697: return(0);
698: }
700: /*
701: Since nsends or nreceives may be zero we add 1 in certain mallocs
702: to make sure we never malloc an empty one.
703: */
706: int VecAssemblyBegin_MPI(Vec xin)
707: {
708: int *owners = xin->map->range,*bowners,ierr,size,i,bs,nstash,reallocs;
709: InsertMode addv;
710: MPI_Comm comm = xin->comm;
713: if (xin->stash.donotstash) {
714: return(0);
715: }
717: MPI_Allreduce(&xin->stash.insertmode,&addv,1,MPI_INT,MPI_BOR,comm);
718: if (addv == (ADD_VALUES|INSERT_VALUES)) {
719: SETERRQ(PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
720: }
721: xin->stash.insertmode = addv; /* in case this processor had no cache */
722:
723: bs = xin->bs;
724: MPI_Comm_size(xin->comm,&size);
725: if (!xin->bstash.bowners && xin->bs != -1) {
726: PetscMalloc((size+1)*sizeof(int),&bowners);
727: for (i=0; i<size+1; i++){ bowners[i] = owners[i]/bs;}
728: xin->bstash.bowners = bowners;
729: } else {
730: bowners = xin->bstash.bowners;
731: }
732: VecStashScatterBegin_Private(&xin->stash,owners);
733: VecStashScatterBegin_Private(&xin->bstash,bowners);
734: VecStashGetInfo_Private(&xin->stash,&nstash,&reallocs);
735: PetscLogInfo(0,"VecAssemblyBegin_MPI:Stash has %d entries, uses %d mallocs.\n",nstash,reallocs);
736: VecStashGetInfo_Private(&xin->bstash,&nstash,&reallocs);
737: PetscLogInfo(0,"VecAssemblyBegin_MPI:Block-Stash has %d entries, uses %d mallocs.\n",nstash,reallocs);
739: return(0);
740: }
744: int VecAssemblyEnd_MPI(Vec vec)
745: {
746: int ierr,base,i,j,n,*row,flg,bs;
747: PetscScalar *val,*vv,*array,*xarray;
750: if (!vec->stash.donotstash) {
751: VecGetArray(vec,&xarray);
752: base = vec->map->range[vec->stash.rank];
753: bs = vec->bs;
755: /* Process the stash */
756: while (1) {
757: VecStashScatterGetMesg_Private(&vec->stash,&n,&row,&val,&flg);
758: if (!flg) break;
759: if (vec->stash.insertmode == ADD_VALUES) {
760: for (i=0; i<n; i++) { xarray[row[i] - base] += val[i]; }
761: } else if (vec->stash.insertmode == INSERT_VALUES) {
762: for (i=0; i<n; i++) { xarray[row[i] - base] = val[i]; }
763: } else {
764: SETERRQ(PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
765: }
766: }
767: VecStashScatterEnd_Private(&vec->stash);
769: /* now process the block-stash */
770: while (1) {
771: VecStashScatterGetMesg_Private(&vec->bstash,&n,&row,&val,&flg);
772: if (!flg) break;
773: for (i=0; i<n; i++) {
774: array = xarray+row[i]*bs-base;
775: vv = val+i*bs;
776: if (vec->stash.insertmode == ADD_VALUES) {
777: for (j=0; j<bs; j++) { array[j] += vv[j];}
778: } else if (vec->stash.insertmode == INSERT_VALUES) {
779: for (j=0; j<bs; j++) { array[j] = vv[j]; }
780: } else {
781: SETERRQ(PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
782: }
783: }
784: }
785: VecStashScatterEnd_Private(&vec->bstash);
786: VecRestoreArray(vec,&xarray);
787: }
788: vec->stash.insertmode = NOT_SET_VALUES;
789: return(0);
790: }