Actual source code: mpiadj.c
1: /*$Id: mpiadj.c,v 1.66 2001/08/07 03:02:59 balay Exp $*/
3: /*
4: Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
5: */
6: #include src/mat/impls/adj/mpi/mpiadj.h
7: #include petscsys.h
11: int MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
12: {
13: Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
14: int ierr,i,j,m = A->m;
15: char *name;
16: PetscViewerFormat format;
19: PetscObjectGetName((PetscObject)A,&name);
20: PetscViewerGetFormat(viewer,&format);
21: if (format == PETSC_VIEWER_ASCII_INFO) {
22: return(0);
23: } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
24: SETERRQ(PETSC_ERR_SUP,"Matlab format not supported");
25: } else {
26: PetscViewerASCIIUseTabs(viewer,PETSC_NO);
27: for (i=0; i<m; i++) {
28: PetscViewerASCIISynchronizedPrintf(viewer,"row %d:",i+a->rstart);
29: for (j=a->i[i]; j<a->i[i+1]; j++) {
30: PetscViewerASCIISynchronizedPrintf(viewer," %d ",a->j[j]);
31: }
32: PetscViewerASCIISynchronizedPrintf(viewer,"\n");
33: }
34: PetscViewerASCIIUseTabs(viewer,PETSC_YES);
35: }
36: PetscViewerFlush(viewer);
37: return(0);
38: }
42: int MatView_MPIAdj(Mat A,PetscViewer viewer)
43: {
44: int ierr;
45: PetscTruth isascii;
48: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);
49: if (isascii) {
50: MatView_MPIAdj_ASCII(A,viewer);
51: } else {
52: SETERRQ1(1,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
53: }
54: return(0);
55: }
59: int MatDestroy_MPIAdj(Mat mat)
60: {
61: Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data;
62: int ierr;
65: #if defined(PETSC_USE_LOG)
66: PetscLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz);
67: #endif
68: if (a->diag) {PetscFree(a->diag);}
69: if (a->freeaij) {
70: PetscFree(a->i);
71: PetscFree(a->j);
72: if (a->values) {PetscFree(a->values);}
73: }
74: PetscFree(a->rowners);
75: PetscFree(a);
76: return(0);
77: }
81: int MatSetOption_MPIAdj(Mat A,MatOption op)
82: {
83: Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
86: switch (op) {
87: case MAT_SYMMETRIC:
88: case MAT_STRUCTURALLY_SYMMETRIC:
89: case MAT_HERMITIAN:
90: a->symmetric = PETSC_TRUE;
91: break;
92: case MAT_NOT_SYMMETRIC:
93: case MAT_NOT_STRUCTURALLY_SYMMETRIC:
94: case MAT_NOT_HERMITIAN:
95: a->symmetric = PETSC_FALSE;
96: break;
97: case MAT_SYMMETRY_ETERNAL:
98: case MAT_NOT_SYMMETRY_ETERNAL:
99: break;
100: default:
101: PetscLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
102: break;
103: }
104: return(0);
105: }
108: /*
109: Adds diagonal pointers to sparse matrix structure.
110: */
114: int MatMarkDiagonal_MPIAdj(Mat A)
115: {
116: Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
117: int i,j,*diag,m = A->m,ierr;
120: PetscMalloc((m+1)*sizeof(int),&diag);
121: PetscLogObjectMemory(A,(m+1)*sizeof(int));
122: for (i=0; i<A->m; i++) {
123: for (j=a->i[i]; j<a->i[i+1]; j++) {
124: if (a->j[j] == i) {
125: diag[i] = j;
126: break;
127: }
128: }
129: }
130: a->diag = diag;
131: return(0);
132: }
136: int MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,PetscScalar **v)
137: {
138: Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
139: int *itmp;
142: row -= a->rstart;
144: if (row < 0 || row >= A->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
146: *nz = a->i[row+1] - a->i[row];
147: if (v) *v = PETSC_NULL;
148: if (idx) {
149: itmp = a->j + a->i[row];
150: if (*nz) {
151: *idx = itmp;
152: }
153: else *idx = 0;
154: }
155: return(0);
156: }
160: int MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,PetscScalar **v)
161: {
163: return(0);
164: }
168: int MatGetBlockSize_MPIAdj(Mat A,int *bs)
169: {
171: *bs = 1;
172: return(0);
173: }
178: int MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
179: {
180: Mat_MPIAdj *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
181: int ierr;
182: PetscTruth flag;
185: /* If the matrix dimensions are not equal,or no of nonzeros */
186: if ((A->m != B->m) ||(a->nz != b->nz)) {
187: flag = PETSC_FALSE;
188: }
189:
190: /* if the a->i are the same */
191: PetscMemcmp(a->i,b->i,(A->m+1)*sizeof(int),&flag);
192:
193: /* if a->j are the same */
194: PetscMemcmp(a->j,b->j,(a->nz)*sizeof(int),&flag);
196: MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);
197: return(0);
198: }
202: int MatGetRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int *ia[],int *ja[],PetscTruth *done)
203: {
204: int ierr,size,i;
205: Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
208: MPI_Comm_size(A->comm,&size);
209: if (size > 1) {*done = PETSC_FALSE; return(0);}
210: *m = A->m;
211: *ia = a->i;
212: *ja = a->j;
213: *done = PETSC_TRUE;
214: if (oshift) {
215: for (i=0; i<(*ia)[*m]; i++) {
216: (*ja)[i]++;
217: }
218: for (i=0; i<=(*m); i++) (*ia)[i]++;
219: }
220: return(0);
221: }
225: int MatRestoreRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int *ia[],int *ja[],PetscTruth *done)
226: {
227: int i;
228: Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
231: if (ia && a->i != *ia) SETERRQ(1,"ia passed back is not one obtained with MatGetRowIJ()");
232: if (ja && a->j != *ja) SETERRQ(1,"ja passed back is not one obtained with MatGetRowIJ()");
233: if (oshift) {
234: for (i=0; i<=(*m); i++) (*ia)[i]--;
235: for (i=0; i<(*ia)[*m]; i++) {
236: (*ja)[i]--;
237: }
238: }
239: return(0);
240: }
242: /* -------------------------------------------------------------------*/
243: static struct _MatOps MatOps_Values = {0,
244: MatGetRow_MPIAdj,
245: MatRestoreRow_MPIAdj,
246: 0,
247: /* 4*/ 0,
248: 0,
249: 0,
250: 0,
251: 0,
252: 0,
253: /*10*/ 0,
254: 0,
255: 0,
256: 0,
257: 0,
258: /*15*/ 0,
259: MatEqual_MPIAdj,
260: 0,
261: 0,
262: 0,
263: /*20*/ 0,
264: 0,
265: 0,
266: MatSetOption_MPIAdj,
267: 0,
268: /*25*/ 0,
269: 0,
270: 0,
271: 0,
272: 0,
273: /*30*/ 0,
274: 0,
275: 0,
276: 0,
277: 0,
278: /*35*/ 0,
279: 0,
280: 0,
281: 0,
282: 0,
283: /*40*/ 0,
284: 0,
285: 0,
286: 0,
287: 0,
288: /*45*/ 0,
289: 0,
290: 0,
291: 0,
292: 0,
293: /*50*/ MatGetBlockSize_MPIAdj,
294: MatGetRowIJ_MPIAdj,
295: MatRestoreRowIJ_MPIAdj,
296: 0,
297: 0,
298: /*55*/ 0,
299: 0,
300: 0,
301: 0,
302: 0,
303: /*60*/ 0,
304: MatDestroy_MPIAdj,
305: MatView_MPIAdj,
306: MatGetPetscMaps_Petsc,
307: 0,
308: /*65*/ 0,
309: 0,
310: 0,
311: 0,
312: 0,
313: /*70*/ 0,
314: 0,
315: 0,
316: 0,
317: 0,
318: /*75*/ 0,
319: 0,
320: 0,
321: 0,
322: 0,
323: /*80*/ 0,
324: 0,
325: 0,
326: 0,
327: /*85*/ 0
328: };
330: EXTERN_C_BEGIN
333: int MatMPIAdjSetPreallocation_MPIAdj(Mat B,int *i,int *j,int *values)
334: {
335: Mat_MPIAdj *b = (Mat_MPIAdj *)B->data;
336: int ierr;
337: #if defined(PETSC_USE_BOPT_g)
338: int ii;
339: #endif
342: B->preallocated = PETSC_TRUE;
343: #if defined(PETSC_USE_BOPT_g)
344: if (i[0] != 0) SETERRQ1(1,"First i[] index must be zero, instead it is %d\n",i[0]);
345: for (ii=1; ii<B->m; ii++) {
346: if (i[ii] < 0 || i[ii] < i[ii-1]) {
347: SETERRQ4(1,"i[%d]=%d index is out of range: i[%d]=%d",ii,i[ii],ii-1,i[ii-1]);
348: }
349: }
350: for (ii=0; ii<i[B->m]; ii++) {
351: if (j[ii] < 0 || j[ii] >= B->N) {
352: SETERRQ2(1,"Column index %d out of range %d\n",ii,j[ii]);
353: }
354: }
355: #endif
357: b->j = j;
358: b->i = i;
359: b->values = values;
361: b->nz = i[B->m];
362: b->diag = 0;
363: b->symmetric = PETSC_FALSE;
364: b->freeaij = PETSC_TRUE;
366: MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
367: MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
368: return(0);
369: }
370: EXTERN_C_END
372: /*MC
373: MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
374: intended for use constructing orderings and partitionings.
376: Level: beginner
378: .seealso: MatCreateMPIAdj
379: M*/
381: EXTERN_C_BEGIN
384: int MatCreate_MPIAdj(Mat B)
385: {
386: Mat_MPIAdj *b;
387: int ii,ierr,size,rank;
390: MPI_Comm_size(B->comm,&size);
391: MPI_Comm_rank(B->comm,&rank);
393: PetscNew(Mat_MPIAdj,&b);
394: B->data = (void*)b;
395: PetscMemzero(b,sizeof(Mat_MPIAdj));
396: PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));
397: B->factor = 0;
398: B->lupivotthreshold = 1.0;
399: B->mapping = 0;
400: B->assembled = PETSC_FALSE;
401:
402: MPI_Allreduce(&B->m,&B->M,1,MPI_INT,MPI_SUM,B->comm);
403: B->n = B->N;
405: /* the information in the maps duplicates the information computed below, eventually
406: we should remove the duplicate information that is not contained in the maps */
407: PetscMapCreateMPI(B->comm,B->m,B->M,&B->rmap);
408: /* we don't know the "local columns" so just use the row information :-(*/
409: PetscMapCreateMPI(B->comm,B->m,B->M,&B->cmap);
411: PetscMalloc((size+1)*sizeof(int),&b->rowners);
412: PetscLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
413: MPI_Allgather(&B->m,1,MPI_INT,b->rowners+1,1,MPI_INT,B->comm);
414: b->rowners[0] = 0;
415: for (ii=2; ii<=size; ii++) {
416: b->rowners[ii] += b->rowners[ii-1];
417: }
418: b->rstart = b->rowners[rank];
419: b->rend = b->rowners[rank+1];
420: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C",
421: "MatMPIAdjSetPreallocation_MPIAdj",
422: MatMPIAdjSetPreallocation_MPIAdj);
423: return(0);
424: }
425: EXTERN_C_END
429: /*@C
430: MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
432: Collective on MPI_Comm
434: Input Parameters:
435: + A - the matrix
436: . i - the indices into j for the start of each row
437: . j - the column indices for each row (sorted for each row).
438: The indices in i and j start with zero (NOT with one).
439: - values - [optional] edge weights
441: Level: intermediate
443: .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
444: @*/
445: int MatMPIAdjSetPreallocation(Mat B,int *i,int *j,int *values)
446: {
447: int ierr,(*f)(Mat,int*,int*,int*);
450: PetscObjectQueryFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",(void (**)(void))&f);
451: if (f) {
452: (*f)(B,i,j,values);
453: }
454: return(0);
455: }
459: /*@C
460: MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
461: The matrix does not have numerical values associated with it, but is
462: intended for ordering (to reduce bandwidth etc) and partitioning.
464: Collective on MPI_Comm
466: Input Parameters:
467: + comm - MPI communicator
468: . m - number of local rows
469: . n - number of columns
470: . i - the indices into j for the start of each row
471: . j - the column indices for each row (sorted for each row).
472: The indices in i and j start with zero (NOT with one).
473: - values -[optional] edge weights
475: Output Parameter:
476: . A - the matrix
478: Level: intermediate
480: Notes: This matrix object does not support most matrix operations, include
481: MatSetValues().
482: You must NOT free the ii, values and jj arrays yourself. PETSc will free them
483: when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
484: call from Fortran you need not create the arrays with PetscMalloc().
485: Should not include the matrix diagonals.
487: If you already have a matrix, you can create its adjacency matrix by a call
488: to MatConvert, specifying a type of MATMPIADJ.
490: Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
492: .seealso: MatCreate(), MatConvert(), MatGetOrdering()
493: @*/
494: int MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j,int *values,Mat *A)
495: {
496: int ierr;
499: MatCreate(comm,m,n,PETSC_DETERMINE,n,A);
500: MatSetType(*A,MATMPIADJ);
501: MatMPIAdjSetPreallocation(*A,i,j,values);
502: return(0);
503: }
505: EXTERN_C_BEGIN
508: int MatConvertTo_MPIAdj(Mat A,MatType type,Mat *newmat) {
509: Mat B;
510: int i,ierr,m,N,nzeros = 0,*ia,*ja,*rj,len,rstart,cnt,j,*a;
511: PetscScalar *ra;
512: MPI_Comm comm;
515: MatGetSize(A,PETSC_NULL,&N);
516: MatGetLocalSize(A,&m,PETSC_NULL);
517: MatGetOwnershipRange(A,&rstart,PETSC_NULL);
518:
519: /* count the number of nonzeros per row */
520: for (i=0; i<m; i++) {
521: MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);
522: for (j=0; j<len; j++) {
523: if (rj[j] == i+rstart) {len--; break;} /* don't count diagonal */
524: }
525: MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);
526: nzeros += len;
527: }
529: /* malloc space for nonzeros */
530: PetscMalloc((nzeros+1)*sizeof(int),&a);
531: PetscMalloc((N+1)*sizeof(int),&ia);
532: PetscMalloc((nzeros+1)*sizeof(int),&ja);
534: nzeros = 0;
535: ia[0] = 0;
536: for (i=0; i<m; i++) {
537: MatGetRow(A,i+rstart,&len,&rj,&ra);
538: cnt = 0;
539: for (j=0; j<len; j++) {
540: if (rj[j] != i+rstart) { /* if not diagonal */
541: a[nzeros+cnt] = (int) PetscAbsScalar(ra[j]);
542: ja[nzeros+cnt++] = rj[j];
543: }
544: }
545: MatRestoreRow(A,i+rstart,&len,&rj,&ra);
546: nzeros += cnt;
547: ia[i+1] = nzeros;
548: }
550: PetscObjectGetComm((PetscObject)A,&comm);
551: MatCreate(comm,m,N,PETSC_DETERMINE,N,&B);
552: MatSetType(B,type);
553: MatMPIAdjSetPreallocation(B,ia,ja,a);
555: /* Fake support for "inplace" convert. */
556: if (*newmat == A) {
557: MatDestroy(A);
558: }
559: *newmat = B;
560: return(0);
561: }
562: EXTERN_C_END