Actual source code: petscfunc.c
1: /*$Id: petscfunc.c,v 1.13 2001/09/11 18:37:23 bsmith Exp $*/
3: #include petscfunc.h
4: #include ramgfunc.h
5: #include petscksp.h
7: /* ------------------------------------------------------------------- */
10: /*
11: KSPMonitorWriteConvHist - Write convergence history to external ASCII file.
13: Input Parameters:
14: ksp - iterative context
15: n - iteration number
16: rnorm - 2-norm (preconditioned) residual value (may be estimated)
17: dummy - optional user-defined monitor context (unused here)
18: */
19: /*
20: Sample usage:
21: KSPSetMonitor(ksp, KSPMonitorWriteConvHist,PETSC_NULL,
22: PETSC_NULL);
23:
24: Note: the tolerance file can viewed using gnuplot, e.g.
25: gnuplot plotpetsctol
27: */
28: int KSPMonitorWriteConvHist(KSP ksp,int n,double rnorm,void* ctx)
29: {
30: char filename[161];
31: FILE *ftol;
32: /*
33: CONVHIST *convhist;
35: convhist = (CONVHIST*)(ctx);
36: bnrm2 = (*convhist).BNRM2;
37: */
39: sprintf(filename,"petsctol");
41: if (n == 0){
42: PetscFOpen(MPI_COMM_WORLD,filename,"w",&ftol);
43: /* PetscFPrintf(MPI_COMM_WORLD,ftol,"%14.12e \n",rnorm/bnrm2); */
44: PetscFPrintf(MPI_COMM_WORLD,ftol,"%14.12e \n",rnorm);
45: PetscFClose(MPI_COMM_WORLD,ftol);
46: }
47: else if (n > 0) {
48: PetscFOpen(MPI_COMM_WORLD,filename,"a",&ftol);
49: /* PetscFPrintf(MPI_COMM_WORLD,ftol,"%14.12e \n",rnorm/bnrm2); */
50: PetscFPrintf(MPI_COMM_WORLD,ftol,"%14.12e \n",rnorm);
51: PetscFClose(MPI_COMM_WORLD,ftol);
52: }
53: return 0;
54: }
56: /* ------------------------------------------------------------------- */
59: /*
60: KSPMonitorWriteConvHist - Write convergence history to AMG-PETSc
61: interface external ASCII file. This routine differs from the previous one
62: by the fact that the index of each iteration is put in front of each
63: residual.
64:
65: Input Parameters:
66: ksp - iterative context
67: n - iteration number
68: rnorm - 2-norm (preconditioned) residual value (may be estimated)
69: dummy - optional user-defined monitor context (unused here)
70: */
71: int KSPMonitorAmg(KSP ksp,int n,double rnorm,void* ctx)
72: {
73: char filename[161];
74: FILE *ftol;
75: /*
76: CONVHIST *convhist;
78: convhist = (CONVHIST*)(ctx);
79: bnrm2 = (*convhist).BNRM2;
80: */
82: sprintf(filename,"petsctol");
84: if (n == 0){
85: PetscFOpen(MPI_COMM_WORLD,filename,"w",&ftol);
86: /* PetscFPrintf(MPI_COMM_WORLD,ftol,"%14.12e \n",rnorm/bnrm2); */
87: PetscFPrintf(MPI_COMM_WORLD,ftol,"%d %14.12e \n",n, rnorm);
88: PetscFClose(MPI_COMM_WORLD,ftol);
89: }
90: else if (n > 0) {
91: PetscFOpen(MPI_COMM_WORLD,filename,"a",&ftol);
92: /* PetscFPrintf(MPI_COMM_WORLD,ftol,"%14.12e \n",rnorm/bnrm2); */
93: PetscFPrintf(MPI_COMM_WORLD,ftol,"%d %14.12e \n",n, rnorm);
94: PetscFClose(MPI_COMM_WORLD,ftol);
95: }
96: return 0;
97: }
99: /* ------------------------------------------------------------------- */
102: /*
103: KSPMonitorWriteResVecs - Write residual vectors to file.
104: */
105: int KSPMonitorWriteResVecs(KSP ksp,int n,double rnorm,void* ctx)
106: {
107: PetscScalar *values;
108: Vec t, v, V;
109: char filename[161];
110: int ierr, i, numnodes;
111: CONVHIST *convhist;
112: FILE *fout;
114: convhist = (CONVHIST*)(ctx);
115: numnodes = convhist->NUMNODES;
117: sprintf(filename,"/home/domenico/Antas/Output/residual.%u",n);
118: VecCreate(MPI_COMM_WORLD,&t);
119: VecSetSizes(t,numnodes,numnodes);
120: VecSetType(t,VECSEQ);
121: VecDuplicate(t,&v);
123: KSPBuildResidual(ksp, t, v, &V);
124:
125: /* PetscViewerFileOpenASCII(MPI_COMM_WORLD,filename,&viewer);
126: PetscViewerSetFormat(viewer,PETSC_VIEWER_ASCII_MATLAB);
127:
128: VecView(V, viewer);
129: PetscViewerDestroy(viewer); */
130: VecGetArray(V,&values);
131: PetscFOpen(MPI_COMM_WORLD,filename,"w",&fout);
132: for (i=0;i<numnodes;i++)
133: PetscFPrintf(MPI_COMM_WORLD,fout,"%14.12e \n", values[i]);
134: PetscFClose(MPI_COMM_WORLD,fout);
136: VecRestoreArray(V,&values);
138: return 0;
139: }
142: /* ------------------------------------------------------------------- */
145: int ConvhistCtxDestroy(CONVHIST *convhist)
146: {
147: PetscFree(convhist);
148: return 0;
149: }
151: /* ------------------------------------------------------------------- */
154: int MyConvTest(KSP ksp,int n, double rnorm, KSPConvergedReason *reason,
155: void* ctx)
156: {
158: double bnrm2, rtol;
159: CONVHIST *convhist = (CONVHIST*) ctx;
160:
161: bnrm2 = convhist->BNRM2;
162: KSPGetTolerances(ksp, &rtol, PETSC_NULL, PETSC_NULL, PETSC_NULL);
163:
164: if (rnorm/bnrm2 < rtol){
165: PetscPrintf(MPI_COMM_WORLD,"[test] %d %g \r",n,rnorm/bnrm2);
166: return 1; }
167: else{
168: PetscPrintf(MPI_COMM_WORLD,"[test] %d %g \r",n,rnorm/bnrm2);
169: return 0;}
170: }
172: /* ------------------------------------------------------------------- */
175: int ReorderSubmatrices(PC pc,int nsub,IS *row,IS *col,Mat *submat,void *dummy)
176: {
177: int i, ierr;
178: IS isrow,iscol; /* row and column permutations */
179: MatOrderingType rtype = MATORDERING_RCM;
181: for (i=0; i<nsub; i++) {
182: MatGetOrdering(submat[i], rtype, &isrow, &iscol);
183: }
185: return 0;
186: }
188: /* ------------------------------------------------------------------- */
191: int PrintSubMatrices(PC pc,int nsub,IS *row,IS *col,Mat *submat,void *dummy)
192: {
193: int i, j, ierr, nloc, *glo_row_ind;
195: PetscPrintf(PETSC_COMM_WORLD,"*** Overzicht van opdeling matrix *** \n");
196: for (i=0; i<nsub; i++) {
197: PetscPrintf(PETSC_COMM_WORLD,"\n** Jacobi Blok %d \n",i);
198: ISGetSize(row[i],&nloc);
199: ISGetIndices(row[i], &glo_row_ind);
200: for (j=0; j< nloc; j++) {
201: PetscPrintf(PETSC_COMM_WORLD,"[%d] global row %d\n",j,glo_row_ind[j]);
202: }
203: ISRestoreIndices(row[i],&glo_row_ind);
204: }
206: return 0;
207: }
209: /* ------------------------------------------------------------------- */
212: int ViewSubMatrices(PC pc,int nsub,IS *row,IS *col,Mat *submat,void *dummy)
213: {
214: int i, ierr;
215: PetscViewer viewer;
216: PetscDraw draw;
218: for (i=0; i<nsub; i++) {
219: /* MatView(submat[i],PETSC_NULL); */
220: PetscViewerDrawOpen(MPI_COMM_WORLD,PETSC_NULL, PETSC_NULL,
221: 0, 0, 500,500,&viewer);
222: PetscViewerDrawGetDraw(viewer, 0, &draw);
223: MatView(submat[i], viewer);
224: PetscDrawPause(draw);
225: PetscViewerDestroy(viewer);
226: }
228: return 0;
229: }
231: /* ------------------------------------------------------------------- */
234: int MyMatView(Mat mat,void *dummy)
235: {
236: int ierr;
237: PetscViewer viewer;
238: PetscDraw draw;
240: PetscViewerDrawOpen(MPI_COMM_WORLD,PETSC_NULL, PETSC_NULL, 0, 0, 500,500,&viewer);
241: PetscViewerDrawGetDraw(viewer, 0, &draw);
242: MatView(mat, viewer);
243: PetscDrawSetPause(draw, 5);
244: PetscDrawPause(draw);
245: PetscViewerDestroy(viewer);
247: return 0;
248: }
250: /* ------------------------------------------------------------------- */
253: int PrintMatrix(Mat mat, char* path, char* base)
254: {
255: int ierr,numrows, numcols, numnonzero, I, j, ncols_getrow, *cols_getrow;
256: PetscViewer viewer;
257: char filename[80];
258: PetscScalar *vals_getrow;
259: MatInfo info;
261: /*..Get size and number of unknowns of matrix..*/
262: MatGetSize(mat, &numrows, &numcols);
263: MatGetInfo(mat,MAT_LOCAL,&info);
264: numnonzero = (int)info.nz_used;
266: /*..Set file and open file for reading..*/
267: sprintf(filename, "%s%s", path, base);
268: printf(" [PrintMatrix]::Generating file %s ...\n", filename);
269: PetscViewerASCIIOpen(MPI_COMM_WORLD,filename,&viewer);
270:
271:
272: /*..Print file header..*/
273: PetscViewerASCIIPrintf(viewer,"%% \n");
274: if (numrows==numcols) /*..square matrix..*/
275: PetscViewerASCIIPrintf(viewer,"%% %d %d \n", numrows, numnonzero);
276: else /*..rectangular matrix..*/
277: PetscViewerASCIIPrintf(viewer,"%% %d %d %d \n", numrows, numcols,
278: numnonzero);
279:
280: /*..Print matrix to rowwise file..*/
281: for (I=0;I<numrows;I++){
282: /*....Get row I of matrix....*/
283: MatGetRow(mat,I,&ncols_getrow,&cols_getrow,&vals_getrow);
284:
285: /*....Print row I of matrix....*/
286: for (j=0;j<ncols_getrow;j++){
287: #if defined(PETSC_USE_COMPLEX)
288: PetscViewerASCIIPrintf( viewer,"%d %d %22.18e %22.18e\n",
289: I+1, cols_getrow[j]+1, real(vals_getrow[j]),
290: imag(vals_getrow[j]));
291: #else
292: PetscViewerASCIIPrintf( viewer,"%d %d %22.18e \n",
293: I+1, cols_getrow[j]+1, vals_getrow[j]);
294: #endif
295: }
296: }
298: /*..Close file..*/
299: PetscViewerDestroy(viewer);
300: printf(" [PrintMatrix]::Done Generating file ! %s\n", filename);
301: return 0;
302: }
304: /* ------------------------------------------------------------------- */
307: int PrintVector(Vec vec, char* path, char* base)
308: {
309: int ierr,size,i;
310: PetscViewer viewer;
311: char filename[80];
312: PetscScalar *values;
314: sprintf(filename, "%s%s%s", path, base, ".m");
315: printf(" [PrintVector]::Generating file %s ...\n", filename);
316: VecGetSize(vec, &size);
317: PetscMalloc(size * sizeof(PetscScalar),&values);
318: VecGetArray(vec, &values);
319: PetscViewerASCIIOpen(MPI_COMM_WORLD,filename,&viewer);
320: PetscViewerASCIIPrintf(viewer,"function [z] = %s()\n",base);
321: PetscViewerASCIIPrintf(viewer,"z = [\n");
322: for (i=0;i<size;i++){
323: #if defined(PETSC_USE_COMPLEX)
324: PetscViewerASCIIPrintf(viewer, "%22.18e %22.18e \n",
325: real( values[i] ), imag( values[i]);
326: #else
327: PetscViewerASCIIPrintf(viewer, "%22.18e \n", values[i]);
328: #endif
329: }
330: PetscViewerASCIIPrintf(viewer,"]; \n");
331: PetscViewerDestroy(viewer);
332: VecRestoreArray(vec, &values);
333: printf(" [PrintVector]::Done Generating file ! %s\n", filename);
334: return 0;
335: }