Actual source code: bdiag.c
1: /*$Id: bdiag.c,v 1.198 2001/08/07 03:02:53 balay Exp $*/
3: /* Block diagonal matrix format */
5: #include src/mat/impls/bdiag/seq/bdiag.h
6: #include src/inline/ilu.h
10: int MatDestroy_SeqBDiag(Mat A)
11: {
12: Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
13: int i,bs = a->bs,ierr;
16: #if defined(PETSC_USE_LOG)
17: PetscLogObjectState((PetscObject)A,"Rows=%d, Cols=%d, NZ=%d, BSize=%d, NDiag=%d",A->m,A->n,a->nz,a->bs,a->nd);
18: #endif
19: if (!a->user_alloc) { /* Free the actual diagonals */
20: for (i=0; i<a->nd; i++) {
21: if (a->diag[i] > 0) {
22: PetscFree(a->diagv[i] + bs*bs*a->diag[i]);
23: } else {
24: PetscFree(a->diagv[i]);
25: }
26: }
27: }
28: if (a->pivot) {PetscFree(a->pivot);}
29: PetscFree(a->diagv);
30: PetscFree(a->diag);
31: PetscFree(a->colloc);
32: PetscFree(a->dvalue);
33: PetscFree(a);
34: return(0);
35: }
39: int MatAssemblyEnd_SeqBDiag(Mat A,MatAssemblyType mode)
40: {
41: Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
42: int i,k,temp,*diag = a->diag,*bdlen = a->bdlen;
43: PetscScalar *dtemp,**dv = a->diagv;
46: if (mode == MAT_FLUSH_ASSEMBLY) return(0);
48: /* Sort diagonals */
49: for (i=0; i<a->nd; i++) {
50: for (k=i+1; k<a->nd; k++) {
51: if (diag[i] < diag[k]) {
52: temp = diag[i];
53: diag[i] = diag[k];
54: diag[k] = temp;
55: temp = bdlen[i];
56: bdlen[i] = bdlen[k];
57: bdlen[k] = temp;
58: dtemp = dv[i];
59: dv[i] = dv[k];
60: dv[k] = dtemp;
61: }
62: }
63: }
65: /* Set location of main diagonal */
66: for (i=0; i<a->nd; i++) {
67: if (!a->diag[i]) {a->mainbd = i; break;}
68: }
69: PetscLogInfo(A,"MatAssemblyEnd_SeqBDiag:Number diagonals %d,memory used %d, block size %d\n",a->nd,a->maxnz,a->bs);
70: return(0);
71: }
75: int MatSetOption_SeqBDiag(Mat A,MatOption op)
76: {
77: Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
80: switch (op) {
81: case MAT_NO_NEW_NONZERO_LOCATIONS:
82: a->nonew = 1;
83: break;
84: case MAT_YES_NEW_NONZERO_LOCATIONS:
85: a->nonew = 0;
86: break;
87: case MAT_NO_NEW_DIAGONALS:
88: a->nonew_diag = 1;
89: break;
90: case MAT_YES_NEW_DIAGONALS:
91: a->nonew_diag = 0;
92: break;
93: case MAT_COLUMN_ORIENTED:
94: a->roworiented = PETSC_FALSE;
95: break;
96: case MAT_ROW_ORIENTED:
97: a->roworiented = PETSC_TRUE;
98: break;
99: case MAT_ROWS_SORTED:
100: case MAT_ROWS_UNSORTED:
101: case MAT_COLUMNS_SORTED:
102: case MAT_COLUMNS_UNSORTED:
103: case MAT_IGNORE_OFF_PROC_ENTRIES:
104: case MAT_NEW_NONZERO_LOCATION_ERR:
105: case MAT_NEW_NONZERO_ALLOCATION_ERR:
106: case MAT_USE_HASH_TABLE:
107: PetscLogInfo(A,"MatSetOption_SeqBDiag:Option ignored\n");
108: break;
109: case MAT_SYMMETRIC:
110: case MAT_STRUCTURALLY_SYMMETRIC:
111: case MAT_NOT_SYMMETRIC:
112: case MAT_NOT_STRUCTURALLY_SYMMETRIC:
113: case MAT_HERMITIAN:
114: case MAT_NOT_HERMITIAN:
115: case MAT_SYMMETRY_ETERNAL:
116: case MAT_NOT_SYMMETRY_ETERNAL:
117: break;
118: default:
119: SETERRQ(PETSC_ERR_SUP,"unknown option");
120: }
121: return(0);
122: }
126: int MatPrintHelp_SeqBDiag(Mat A)
127: {
128: static PetscTruth called = PETSC_FALSE;
129: MPI_Comm comm = A->comm;
130: int ierr;
133: if (called) {return(0);} else called = PETSC_TRUE;
134: (*PetscHelpPrintf)(comm," Options for MATSEQBDIAG and MATMPIBDIAG matrix formats:\n");
135: (*PetscHelpPrintf)(comm," -mat_block_size <block_size>\n");
136: (*PetscHelpPrintf)(comm," -mat_bdiag_diags <d1,d2,d3,...> (diagonal numbers)\n");
137: (*PetscHelpPrintf)(comm," (for example) -mat_bdiag_diags -5,-1,0,1,5\n");
138: return(0);
139: }
143: static int MatGetDiagonal_SeqBDiag_N(Mat A,Vec v)
144: {
145: Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
146: int ierr,i,j,n,len,ibase,bs = a->bs,iloc;
147: PetscScalar *x,*dd,zero = 0.0;
150: if (A->factor) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
151: VecSet(&zero,v);
152: VecGetLocalSize(v,&n);
153: if (n != A->m) SETERRQ(PETSC_ERR_ARG_SIZ,"Nonconforming mat and vec");
154: if (a->mainbd == -1) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Main diagonal not set");
155: len = PetscMin(a->mblock,a->nblock);
156: dd = a->diagv[a->mainbd];
157: VecGetArray(v,&x);
158: for (i=0; i<len; i++) {
159: ibase = i*bs*bs; iloc = i*bs;
160: for (j=0; j<bs; j++) x[j + iloc] = dd[ibase + j*(bs+1)];
161: }
162: VecRestoreArray(v,&x);
163: return(0);
164: }
168: static int MatGetDiagonal_SeqBDiag_1(Mat A,Vec v)
169: {
170: Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
171: int ierr,i,n,len;
172: PetscScalar *x,*dd,zero = 0.0;
175: VecSet(&zero,v);
176: VecGetLocalSize(v,&n);
177: if (n != A->m) SETERRQ(PETSC_ERR_ARG_SIZ,"Nonconforming mat and vec");
178: if (a->mainbd == -1) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Main diagonal not set");
179: dd = a->diagv[a->mainbd];
180: len = PetscMin(A->m,A->n);
181: VecGetArray(v,&x);
182: for (i=0; i<len; i++) x[i] = dd[i];
183: VecRestoreArray(v,&x);
184: return(0);
185: }
189: int MatZeroEntries_SeqBDiag(Mat A)
190: {
191: Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
192: int d,i,len,bs = a->bs;
193: PetscScalar *dv;
196: for (d=0; d<a->nd; d++) {
197: dv = a->diagv[d];
198: if (a->diag[d] > 0) {
199: dv += bs*bs*a->diag[d];
200: }
201: len = a->bdlen[d]*bs*bs;
202: for (i=0; i<len; i++) dv[i] = 0.0;
203: }
204: return(0);
205: }
209: int MatGetBlockSize_SeqBDiag(Mat A,int *bs)
210: {
211: Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
214: *bs = a->bs;
215: return(0);
216: }
220: int MatZeroRows_SeqBDiag(Mat A,IS is,const PetscScalar *diag)
221: {
222: Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
223: int i,ierr,N,*rows,m = A->m - 1,nz,*col;
224: PetscScalar *dd,*val;
227: ISGetLocalSize(is,&N);
228: ISGetIndices(is,&rows);
229: for (i=0; i<N; i++) {
230: if (rows[i]<0 || rows[i]>m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"row out of range");
231: MatGetRow(A,rows[i],&nz,&col,&val);
232: PetscMemzero(val,nz*sizeof(PetscScalar));
233: MatSetValues(A,1,&rows[i],nz,col,val,INSERT_VALUES);
234: MatRestoreRow(A,rows[i],&nz,&col,&val);
235: }
236: if (diag) {
237: if (a->mainbd == -1) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Main diagonal does not exist");
238: dd = a->diagv[a->mainbd];
239: for (i=0; i<N; i++) dd[rows[i]] = *diag;
240: }
241: ISRestoreIndices(is,&rows);
242: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
243: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
244: return(0);
245: }
249: int MatGetSubMatrix_SeqBDiag(Mat A,IS isrow,IS iscol,MatReuse scall,Mat *submat)
250: {
251: Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
252: int nznew,*smap,i,j,ierr,oldcols = A->n;
253: int *irow,*icol,newr,newc,*cwork,*col,nz,bs;
254: PetscScalar *vwork,*val;
255: Mat newmat;
258: if (scall == MAT_REUSE_MATRIX) { /* no support for reuse so simply destroy all */
259: MatDestroy(*submat);
260: }
262: ISGetIndices(isrow,&irow);
263: ISGetIndices(iscol,&icol);
264: ISGetLocalSize(isrow,&newr);
265: ISGetLocalSize(iscol,&newc);
267: PetscMalloc((oldcols+1)*sizeof(int),&smap);
268: PetscMalloc((newc+1)*sizeof(int),&cwork);
269: PetscMalloc((newc+1)*sizeof(PetscScalar),&vwork);
270: PetscMemzero((char*)smap,oldcols*sizeof(int));
271: for (i=0; i<newc; i++) smap[icol[i]] = i+1;
273: /* Determine diagonals; then create submatrix */
274: bs = a->bs; /* Default block size remains the same */
275: MatCreate(A->comm,newr,newc,newr,newc,&newmat);
276: MatSetType(newmat,A->type_name);
277: MatSeqBDiagSetPreallocation(newmat,0,bs,PETSC_NULL,PETSC_NULL);
279: /* Fill new matrix */
280: for (i=0; i<newr; i++) {
281: MatGetRow(A,irow[i],&nz,&col,&val);
282: nznew = 0;
283: for (j=0; j<nz; j++) {
284: if (smap[col[j]]) {
285: cwork[nznew] = smap[col[j]] - 1;
286: vwork[nznew++] = val[j];
287: }
288: }
289: MatSetValues(newmat,1,&i,nznew,cwork,vwork,INSERT_VALUES);
290: MatRestoreRow(A,i,&nz,&col,&val);
291: }
292: MatAssemblyBegin(newmat,MAT_FINAL_ASSEMBLY);
293: MatAssemblyEnd(newmat,MAT_FINAL_ASSEMBLY);
295: /* Free work space */
296: PetscFree(smap);
297: PetscFree(cwork);
298: PetscFree(vwork);
299: ISRestoreIndices(isrow,&irow);
300: ISRestoreIndices(iscol,&icol);
301: *submat = newmat;
302: return(0);
303: }
307: int MatGetSubMatrices_SeqBDiag(Mat A,int n,const IS irow[],const IS icol[],MatReuse scall,Mat *B[])
308: {
309: int ierr,i;
312: if (scall == MAT_INITIAL_MATRIX) {
313: PetscMalloc((n+1)*sizeof(Mat),B);
314: }
316: for (i=0; i<n; i++) {
317: MatGetSubMatrix_SeqBDiag(A,irow[i],icol[i],scall,&(*B)[i]);
318: }
319: return(0);
320: }
324: int MatScale_SeqBDiag(const PetscScalar *alpha,Mat inA)
325: {
326: Mat_SeqBDiag *a = (Mat_SeqBDiag*)inA->data;
327: int one = 1,i,len,bs = a->bs;
330: for (i=0; i<a->nd; i++) {
331: len = bs*bs*a->bdlen[i];
332: if (a->diag[i] > 0) {
333: BLscal_(&len,(PetscScalar*)alpha,a->diagv[i] + bs*bs*a->diag[i],&one);
334: } else {
335: BLscal_(&len,(PetscScalar*)alpha,a->diagv[i],&one);
336: }
337: }
338: PetscLogFlops(a->nz);
339: return(0);
340: }
344: int MatDiagonalScale_SeqBDiag(Mat A,Vec ll,Vec rr)
345: {
346: Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
347: PetscScalar *l,*r,*dv;
348: int d,j,len,ierr;
349: int nd = a->nd,bs = a->bs,diag,m,n;
352: if (ll) {
353: VecGetSize(ll,&m);
354: if (m != A->m) SETERRQ(PETSC_ERR_ARG_SIZ,"Left scaling vector wrong length");
355: if (bs == 1) {
356: VecGetArray(ll,&l);
357: for (d=0; d<nd; d++) {
358: dv = a->diagv[d];
359: diag = a->diag[d];
360: len = a->bdlen[d];
361: if (diag > 0) for (j=0; j<len; j++) dv[j+diag] *= l[j+diag];
362: else for (j=0; j<len; j++) dv[j] *= l[j];
363: }
364: VecRestoreArray(ll,&l);
365: PetscLogFlops(a->nz);
366: } else SETERRQ(PETSC_ERR_SUP,"Not yet done for bs>1");
367: }
368: if (rr) {
369: VecGetSize(rr,&n);
370: if (n != A->n) SETERRQ(PETSC_ERR_ARG_SIZ,"Right scaling vector wrong length");
371: if (bs == 1) {
372: VecGetArray(rr,&r);
373: for (d=0; d<nd; d++) {
374: dv = a->diagv[d];
375: diag = a->diag[d];
376: len = a->bdlen[d];
377: if (diag > 0) for (j=0; j<len; j++) dv[j+diag] *= r[j];
378: else for (j=0; j<len; j++) dv[j] *= r[j-diag];
379: }
380: VecRestoreArray(rr,&r);
381: PetscLogFlops(a->nz);
382: } else SETERRQ(PETSC_ERR_SUP,"Not yet done for bs>1");
383: }
384: return(0);
385: }
387: static int MatDuplicate_SeqBDiag(Mat,MatDuplicateOption,Mat *);
391: int MatSetUpPreallocation_SeqBDiag(Mat A)
392: {
393: int ierr;
396: MatSeqBDiagSetPreallocation(A,PETSC_DEFAULT,PETSC_DEFAULT,0,0);
397: return(0);
398: }
400: /* -------------------------------------------------------------------*/
401: static struct _MatOps MatOps_Values = {MatSetValues_SeqBDiag_N,
402: MatGetRow_SeqBDiag,
403: MatRestoreRow_SeqBDiag,
404: MatMult_SeqBDiag_N,
405: /* 4*/ MatMultAdd_SeqBDiag_N,
406: MatMultTranspose_SeqBDiag_N,
407: MatMultTransposeAdd_SeqBDiag_N,
408: MatSolve_SeqBDiag_N,
409: 0,
410: 0,
411: /*10*/ 0,
412: 0,
413: 0,
414: MatRelax_SeqBDiag_N,
415: MatTranspose_SeqBDiag,
416: /*15*/ MatGetInfo_SeqBDiag,
417: 0,
418: MatGetDiagonal_SeqBDiag_N,
419: MatDiagonalScale_SeqBDiag,
420: MatNorm_SeqBDiag,
421: /*20*/ 0,
422: MatAssemblyEnd_SeqBDiag,
423: 0,
424: MatSetOption_SeqBDiag,
425: MatZeroEntries_SeqBDiag,
426: /*25*/ MatZeroRows_SeqBDiag,
427: 0,
428: MatLUFactorNumeric_SeqBDiag_N,
429: 0,
430: 0,
431: /*30*/ MatSetUpPreallocation_SeqBDiag,
432: MatILUFactorSymbolic_SeqBDiag,
433: 0,
434: 0,
435: 0,
436: /*35*/ MatDuplicate_SeqBDiag,
437: 0,
438: 0,
439: MatILUFactor_SeqBDiag,
440: 0,
441: /*40*/ 0,
442: MatGetSubMatrices_SeqBDiag,
443: 0,
444: MatGetValues_SeqBDiag_N,
445: 0,
446: /*45*/ MatPrintHelp_SeqBDiag,
447: MatScale_SeqBDiag,
448: 0,
449: 0,
450: 0,
451: /*50*/ MatGetBlockSize_SeqBDiag,
452: 0,
453: 0,
454: 0,
455: 0,
456: /*55*/ 0,
457: 0,
458: 0,
459: 0,
460: 0,
461: /*60*/ 0,
462: MatDestroy_SeqBDiag,
463: MatView_SeqBDiag,
464: MatGetPetscMaps_Petsc,
465: 0,
466: /*65*/ 0,
467: 0,
468: 0,
469: 0,
470: 0,
471: /*70*/ 0,
472: 0,
473: 0,
474: 0,
475: 0,
476: /*75*/ 0,
477: 0,
478: 0,
479: 0,
480: 0,
481: /*80*/ 0,
482: 0,
483: 0,
484: 0,
485: /*85*/ MatLoad_SeqBDiag
486: };
490: /*@C
491: MatSeqBDiagSetPreallocation - Sets the nonzero structure and (optionally) arrays.
493: Collective on MPI_Comm
495: Input Parameters:
496: + B - the matrix
497: . nd - number of block diagonals (optional)
498: . bs - each element of a diagonal is an bs x bs dense matrix
499: . diag - optional array of block diagonal numbers (length nd).
500: For a matrix element A[i,j], where i=row and j=column, the
501: diagonal number is
502: $ diag = i/bs - j/bs (integer division)
503: Set diag=PETSC_NULL on input for PETSc to dynamically allocate memory as
504: needed (expensive).
505: - diagv - pointer to actual diagonals (in same order as diag array),
506: if allocated by user. Otherwise, set diagv=PETSC_NULL on input for PETSc
507: to control memory allocation.
509: Options Database Keys:
510: . -mat_block_size <bs> - Sets blocksize
511: . -mat_bdiag_diags <s1,s2,s3,...> - Sets diagonal numbers
513: Notes:
514: See the users manual for further details regarding this storage format.
516: Fortran Note:
517: Fortran programmers cannot set diagv; this value is ignored.
519: Level: intermediate
521: .keywords: matrix, block, diagonal, sparse
523: .seealso: MatCreate(), MatCreateMPIBDiag(), MatSetValues()
524: @*/
525: int MatSeqBDiagSetPreallocation(Mat B,int nd,int bs,const int diag[],PetscScalar *diagv[])
526: {
527: int ierr,(*f)(Mat,int,int,const int[],PetscScalar*[]);
530: PetscObjectQueryFunction((PetscObject)B,"MatSeqBDiagSetPreallocation_C",(void (**)(void))&f);
531: if (f) {
532: (*f)(B,nd,bs,diag,diagv);
533: }
534: return(0);
535: }
537: EXTERN_C_BEGIN
540: int MatSeqBDiagSetPreallocation_SeqBDiag(Mat B,int nd,int bs,int *diag,PetscScalar **diagv)
541: {
542: Mat_SeqBDiag *b;
543: int i,nda,sizetot,ierr, nd2 = 128,idiag[128];
544: PetscTruth flg1;
548: B->preallocated = PETSC_TRUE;
549: if (bs == PETSC_DEFAULT) bs = 1;
550: if (bs == 0) SETERRQ(1,"Blocksize cannot be zero");
551: if (nd == PETSC_DEFAULT) nd = 0;
552: PetscOptionsGetInt(PETSC_NULL,"-mat_block_size",&bs,PETSC_NULL);
553: PetscOptionsGetIntArray(PETSC_NULL,"-mat_bdiag_diags",idiag,&nd2,&flg1);
554: if (flg1) {
555: diag = idiag;
556: nd = nd2;
557: }
559: if ((B->n%bs) || (B->m%bs)) SETERRQ(PETSC_ERR_ARG_SIZ,"Invalid block size");
560: if (!nd) nda = nd + 1;
561: else nda = nd;
562: b = (Mat_SeqBDiag*)B->data;
564: PetscOptionsHasName(PETSC_NULL,"-mat_no_unroll",&flg1);
565: if (!flg1) {
566: switch (bs) {
567: case 1:
568: B->ops->setvalues = MatSetValues_SeqBDiag_1;
569: B->ops->getvalues = MatGetValues_SeqBDiag_1;
570: B->ops->getdiagonal = MatGetDiagonal_SeqBDiag_1;
571: B->ops->mult = MatMult_SeqBDiag_1;
572: B->ops->multadd = MatMultAdd_SeqBDiag_1;
573: B->ops->multtranspose = MatMultTranspose_SeqBDiag_1;
574: B->ops->multtransposeadd= MatMultTransposeAdd_SeqBDiag_1;
575: B->ops->relax = MatRelax_SeqBDiag_1;
576: B->ops->solve = MatSolve_SeqBDiag_1;
577: B->ops->lufactornumeric = MatLUFactorNumeric_SeqBDiag_1;
578: break;
579: case 2:
580: B->ops->mult = MatMult_SeqBDiag_2;
581: B->ops->multadd = MatMultAdd_SeqBDiag_2;
582: B->ops->solve = MatSolve_SeqBDiag_2;
583: break;
584: case 3:
585: B->ops->mult = MatMult_SeqBDiag_3;
586: B->ops->multadd = MatMultAdd_SeqBDiag_3;
587: B->ops->solve = MatSolve_SeqBDiag_3;
588: break;
589: case 4:
590: B->ops->mult = MatMult_SeqBDiag_4;
591: B->ops->multadd = MatMultAdd_SeqBDiag_4;
592: B->ops->solve = MatSolve_SeqBDiag_4;
593: break;
594: case 5:
595: B->ops->mult = MatMult_SeqBDiag_5;
596: B->ops->multadd = MatMultAdd_SeqBDiag_5;
597: B->ops->solve = MatSolve_SeqBDiag_5;
598: break;
599: }
600: }
602: b->mblock = B->m/bs;
603: b->nblock = B->n/bs;
604: b->nd = nd;
605: b->bs = bs;
606: b->ndim = 0;
607: b->mainbd = -1;
608: b->pivot = 0;
610: PetscMalloc(2*nda*sizeof(int),&b->diag);
611: b->bdlen = b->diag + nda;
612: PetscMalloc((B->n+1)*sizeof(int),&b->colloc);
613: PetscMalloc(nda*sizeof(PetscScalar*),&b->diagv);
614: sizetot = 0;
616: if (diagv) { /* user allocated space */
617: b->user_alloc = PETSC_TRUE;
618: for (i=0; i<nd; i++) b->diagv[i] = diagv[i];
619: } else b->user_alloc = PETSC_FALSE;
621: for (i=0; i<nd; i++) {
622: b->diag[i] = diag[i];
623: if (diag[i] > 0) { /* lower triangular */
624: b->bdlen[i] = PetscMin(b->nblock,b->mblock - diag[i]);
625: } else { /* upper triangular */
626: b->bdlen[i] = PetscMin(b->mblock,b->nblock + diag[i]);
627: }
628: sizetot += b->bdlen[i];
629: }
630: sizetot *= bs*bs;
631: b->maxnz = sizetot;
632: PetscMalloc((B->n+1)*sizeof(PetscScalar),&b->dvalue);
633: PetscLogObjectMemory(B,(nda*(bs+2))*sizeof(int) + bs*nda*sizeof(PetscScalar)
634: + nda*sizeof(PetscScalar*) + sizeof(Mat_SeqBDiag)
635: + sizeof(struct _p_Mat) + sizetot*sizeof(PetscScalar));
637: if (!b->user_alloc) {
638: for (i=0; i<nd; i++) {
639: PetscMalloc(bs*bs*b->bdlen[i]*sizeof(PetscScalar),&b->diagv[i]);
640: PetscMemzero(b->diagv[i],bs*bs*b->bdlen[i]*sizeof(PetscScalar));
641: }
642: b->nonew = 0; b->nonew_diag = 0;
643: } else { /* diagonals are set on input; don't allow dynamic allocation */
644: b->nonew = 1; b->nonew_diag = 1;
645: }
647: /* adjust diagv so one may access rows with diagv[diag][row] for all rows */
648: for (i=0; i<nd; i++) {
649: if (diag[i] > 0) {
650: b->diagv[i] -= bs*bs*diag[i];
651: }
652: }
654: b->nz = b->maxnz; /* Currently not keeping track of exact count */
655: b->roworiented = PETSC_TRUE;
656: B->info.nz_unneeded = (double)b->maxnz;
657: return(0);
658: }
659: EXTERN_C_END
663: static int MatDuplicate_SeqBDiag(Mat A,MatDuplicateOption cpvalues,Mat *matout)
664: {
665: Mat_SeqBDiag *newmat,*a = (Mat_SeqBDiag*)A->data;
666: int i,ierr,len,diag,bs = a->bs;
667: Mat mat;
670: MatCreate(A->comm,A->m,A->n,A->m,A->n,matout);
671: MatSetType(*matout,A->type_name);
672: MatSeqBDiagSetPreallocation(*matout,a->nd,bs,a->diag,PETSC_NULL);
674: /* Copy contents of diagonals */
675: mat = *matout;
676: newmat = (Mat_SeqBDiag*)mat->data;
677: if (cpvalues == MAT_COPY_VALUES) {
678: for (i=0; i<a->nd; i++) {
679: len = a->bdlen[i] * bs * bs * sizeof(PetscScalar);
680: diag = a->diag[i];
681: if (diag > 0) {
682: PetscMemcpy(newmat->diagv[i]+bs*bs*diag,a->diagv[i]+bs*bs*diag,len);
683: } else {
684: PetscMemcpy(newmat->diagv[i],a->diagv[i],len);
685: }
686: }
687: }
688: MatAssemblyBegin(mat,MAT_FINAL_ASSEMBLY);
689: MatAssemblyEnd(mat,MAT_FINAL_ASSEMBLY);
690: return(0);
691: }
695: int MatLoad_SeqBDiag(PetscViewer viewer,const MatType type,Mat *A)
696: {
697: Mat B;
698: int *scols,i,nz,ierr,fd,header[4],size,nd = 128;
699: int bs,*rowlengths = 0,M,N,*cols,extra_rows,*diag = 0;
700: int idiag[128];
701: PetscScalar *vals,*svals;
702: MPI_Comm comm;
703: PetscTruth flg;
704:
706: PetscObjectGetComm((PetscObject)viewer,&comm);
707: MPI_Comm_size(comm,&size);
708: if (size > 1) SETERRQ(PETSC_ERR_ARG_SIZ,"view must have one processor");
709: PetscViewerBinaryGetDescriptor(viewer,&fd);
710: PetscBinaryRead(fd,header,4,PETSC_INT);
711: if (header[0] != MAT_FILE_COOKIE) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"Not matrix object");
712: M = header[1]; N = header[2]; nz = header[3];
713: if (M != N) SETERRQ(PETSC_ERR_SUP,"Can only load square matrices");
714: if (header[3] < 0) {
715: SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"Matrix stored in special format, cannot load as SeqBDiag");
716: }
718: /*
719: This code adds extra rows to make sure the number of rows is
720: divisible by the blocksize
721: */
722: bs = 1;
723: PetscOptionsGetInt(PETSC_NULL,"-matload_block_size",&bs,PETSC_NULL);
724: extra_rows = bs - M + bs*(M/bs);
725: if (extra_rows == bs) extra_rows = 0;
726: if (extra_rows) {
727: PetscLogInfo(0,"MatLoad_SeqBDiag:Padding loaded matrix to match blocksize\n");
728: }
730: /* read row lengths */
731: PetscMalloc((M+extra_rows)*sizeof(int),&rowlengths);
732: PetscBinaryRead(fd,rowlengths,M,PETSC_INT);
733: for (i=0; i<extra_rows; i++) rowlengths[M+i] = 1;
735: /* load information about diagonals */
736: PetscOptionsGetIntArray(PETSC_NULL,"-matload_bdiag_diags",idiag,&nd,&flg);
737: if (flg) {
738: diag = idiag;
739: }
741: /* create our matrix */
742: MatCreate(comm,M+extra_rows,M+extra_rows,M+extra_rows,M+extra_rows,A);
743: MatSetType(*A,type);
744: MatSeqBDiagSetPreallocation(*A,nd,bs,diag,PETSC_NULL);
745: B = *A;
747: /* read column indices and nonzeros */
748: PetscMalloc(nz*sizeof(int),&scols);
749: cols = scols;
750: PetscBinaryRead(fd,cols,nz,PETSC_INT);
751: PetscMalloc(nz*sizeof(PetscScalar),&svals);
752: vals = svals;
753: PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);
754: /* insert into matrix */
756: for (i=0; i<M; i++) {
757: MatSetValues(B,1,&i,rowlengths[i],scols,svals,INSERT_VALUES);
758: scols += rowlengths[i]; svals += rowlengths[i];
759: }
760: vals[0] = 1.0;
761: for (i=M; i<M+extra_rows; i++) {
762: MatSetValues(B,1,&i,1,&i,vals,INSERT_VALUES);
763: }
765: PetscFree(cols);
766: PetscFree(vals);
767: PetscFree(rowlengths);
769: MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
770: MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
771: return(0);
772: }
774: /*MC
775: MATSEQBDIAG - MATSEQBDIAG = "seqbdiag" - A matrix type to be used for sequential block diagonal matrices.
777: Options Database Keys:
778: . -mat_type seqbdiag - sets the matrix type to "seqbdiag" during a call to MatSetFromOptions()
780: Level: beginner
782: .seealso: MatCreateSeqBDiag
783: M*/
785: EXTERN_C_BEGIN
788: int MatCreate_SeqBDiag(Mat B)
789: {
790: Mat_SeqBDiag *b;
791: int ierr,size;
794: MPI_Comm_size(B->comm,&size);
795: if (size > 1) SETERRQ(PETSC_ERR_ARG_WRONG,"Comm must be of size 1");
797: B->m = B->M = PetscMax(B->m,B->M);
798: B->n = B->N = PetscMax(B->n,B->N);
800: PetscNew(Mat_SeqBDiag,&b);
801: B->data = (void*)b;
802: PetscMemzero(b,sizeof(Mat_SeqBDiag));
803: PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));
804: B->factor = 0;
805: B->mapping = 0;
807: PetscMapCreateMPI(B->comm,B->m,B->m,&B->rmap);
808: PetscMapCreateMPI(B->comm,B->n,B->n,&B->cmap);
810: b->ndim = 0;
811: b->mainbd = -1;
812: b->pivot = 0;
814: b->roworiented = PETSC_TRUE;
815: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatSeqBDiagSetPreallocation_C",
816: "MatSeqBDiagSetPreallocation_SeqBDiag",
817: MatSeqBDiagSetPreallocation_SeqBDiag);
819: return(0);
820: }
821: EXTERN_C_END
825: /*@C
826: MatCreateSeqBDiag - Creates a sequential block diagonal matrix.
828: Collective on MPI_Comm
830: Input Parameters:
831: + comm - MPI communicator, set to PETSC_COMM_SELF
832: . m - number of rows
833: . n - number of columns
834: . nd - number of block diagonals (optional)
835: . bs - each element of a diagonal is an bs x bs dense matrix
836: . diag - optional array of block diagonal numbers (length nd).
837: For a matrix element A[i,j], where i=row and j=column, the
838: diagonal number is
839: $ diag = i/bs - j/bs (integer division)
840: Set diag=PETSC_NULL on input for PETSc to dynamically allocate memory as
841: needed (expensive).
842: - diagv - pointer to actual diagonals (in same order as diag array),
843: if allocated by user. Otherwise, set diagv=PETSC_NULL on input for PETSc
844: to control memory allocation.
846: Output Parameters:
847: . A - the matrix
849: Options Database Keys:
850: . -mat_block_size <bs> - Sets blocksize
851: . -mat_bdiag_diags <s1,s2,s3,...> - Sets diagonal numbers
853: Notes:
854: See the users manual for further details regarding this storage format.
856: Fortran Note:
857: Fortran programmers cannot set diagv; this value is ignored.
859: Level: intermediate
861: .keywords: matrix, block, diagonal, sparse
863: .seealso: MatCreate(), MatCreateMPIBDiag(), MatSetValues()
864: @*/
865: int MatCreateSeqBDiag(MPI_Comm comm,int m,int n,int nd,int bs,const int diag[],PetscScalar *diagv[],Mat *A)
866: {
870: MatCreate(comm,m,n,m,n,A);
871: MatSetType(*A,MATSEQBDIAG);
872: MatSeqBDiagSetPreallocation(*A,nd,bs,diag,diagv);
873: return(0);
874: }