Actual source code: redundant.c
1: /*$Id: redundant.c,v 1.29 2001/04/10 19:36:17 bsmith Exp $*/
2: /*
3: This file defines a "solve the problem redundantly on each processor" preconditioner.
5: */
6: #include src/ksp/pc/pcimpl.h
7: #include petscksp.h
9: typedef struct {
10: PC pc; /* actual preconditioner used on each processor */
11: Vec x,b; /* sequential vectors to hold parallel vectors */
12: Mat *pmats; /* matrix and optional preconditioner matrix */
13: VecScatter scatterin,scatterout; /* scatter used to move all values to each processor */
14: PetscTruth useparallelmat;
15: } PC_Redundant;
19: static int PCView_Redundant(PC pc,PetscViewer viewer)
20: {
21: PC_Redundant *red = (PC_Redundant*)pc->data;
22: int ierr,rank;
23: PetscTruth isascii,isstring;
24: PetscViewer sviewer;
27: MPI_Comm_rank(pc->comm,&rank);
28: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);
29: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_STRING,&isstring);
30: if (isascii) {
31: PetscViewerASCIIPrintf(viewer," Redundant solver preconditioner: Actual PC follows\n");
32: PetscViewerGetSingleton(viewer,&sviewer);
33: if (!rank) {
34: PetscViewerASCIIPushTab(viewer);
35: PCView(red->pc,sviewer);
36: PetscViewerASCIIPopTab(viewer);
37: }
38: PetscViewerRestoreSingleton(viewer,&sviewer);
39: } else if (isstring) {
40: PetscViewerStringSPrintf(viewer," Redundant solver preconditioner");
41: PetscViewerGetSingleton(viewer,&sviewer);
42: if (!rank) {
43: PCView(red->pc,sviewer);
44: }
45: PetscViewerRestoreSingleton(viewer,&sviewer);
46: } else {
47: SETERRQ1(1,"Viewer type %s not supported for PC redundant",((PetscObject)viewer)->type_name);
48: }
49: return(0);
50: }
54: static int PCSetUp_Redundant(PC pc)
55: {
56: PC_Redundant *red = (PC_Redundant*)pc->data;
57: int ierr,mstart,mlocal,m,size;
58: IS isl;
59: MatReuse reuse = MAT_INITIAL_MATRIX;
60: MatStructure str = DIFFERENT_NONZERO_PATTERN;
61: MPI_Comm comm;
64: PCSetFromOptions(red->pc);
65: VecGetSize(pc->vec,&m);
66: if (!pc->setupcalled) {
67: VecGetLocalSize(pc->vec,&mlocal);
68: VecCreateSeq(PETSC_COMM_SELF,m,&red->x);
69: VecDuplicate(red->x,&red->b);
70: PCSetVector(red->pc,red->x);
71: if (!red->scatterin) {
73: /*
74: Create the vectors and vector scatter to get the entire vector onto each processor
75: */
76: VecGetOwnershipRange(pc->vec,&mstart,PETSC_NULL);
77: VecScatterCreate(pc->vec,0,red->x,0,&red->scatterin);
78: ISCreateStride(pc->comm,mlocal,mstart,1,&isl);
79: VecScatterCreate(red->x,isl,pc->vec,isl,&red->scatterout);
80: ISDestroy(isl);
81: }
82: }
84: /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix*/
86: PetscObjectGetComm((PetscObject)pc->pmat,&comm);
87: MPI_Comm_size(comm,&size);
88: if (size == 1) {
89: red->useparallelmat = PETSC_FALSE;
90: }
92: if (red->useparallelmat) {
93: if (pc->setupcalled == 1 && pc->flag == DIFFERENT_NONZERO_PATTERN) {
94: /* destroy old matrices */
95: if (red->pmats) {
96: MatDestroyMatrices(1,&red->pmats);
97: }
98: } else if (pc->setupcalled == 1) {
99: reuse = MAT_REUSE_MATRIX;
100: str = SAME_NONZERO_PATTERN;
101: }
102:
103: /*
104: grab the parallel matrix and put it on each processor
105: */
106: ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);
107: MatGetSubMatrices(pc->pmat,1,&isl,&isl,reuse,&red->pmats);
108: ISDestroy(isl);
110: /* tell sequential PC its operators */
111: PCSetOperators(red->pc,red->pmats[0],red->pmats[0],str);
112: } else {
113: PCSetOperators(red->pc,pc->mat,pc->pmat,pc->flag);
114: }
115: PCSetFromOptions(red->pc);
116: PCSetVector(red->pc,red->b);
117: PCSetUp(red->pc);
118: return(0);
119: }
124: static int PCApply_Redundant(PC pc,Vec x,Vec y)
125: {
126: PC_Redundant *red = (PC_Redundant*)pc->data;
127: int ierr;
130: /* move all values to each processor */
131: VecScatterBegin(x,red->b,INSERT_VALUES,SCATTER_FORWARD,red->scatterin);
132: VecScatterEnd(x,red->b,INSERT_VALUES,SCATTER_FORWARD,red->scatterin);
134: /* apply preconditioner on each processor */
135: PCApply(red->pc,red->b,red->x,PC_LEFT);
137: /* move local part of values into y vector */
138: VecScatterBegin(red->x,y,INSERT_VALUES,SCATTER_FORWARD,red->scatterout);
139: VecScatterEnd(red->x,y,INSERT_VALUES,SCATTER_FORWARD,red->scatterout);
140: return(0);
141: }
146: static int PCDestroy_Redundant(PC pc)
147: {
148: PC_Redundant *red = (PC_Redundant*)pc->data;
149: int ierr;
152: if (red->scatterin) {VecScatterDestroy(red->scatterin);}
153: if (red->scatterout) {VecScatterDestroy(red->scatterout);}
154: if (red->x) {VecDestroy(red->x);}
155: if (red->b) {VecDestroy(red->b);}
156: if (red->pmats) {
157: MatDestroyMatrices(1,&red->pmats);
158: }
159: PCDestroy(red->pc);
160: PetscFree(red);
161: return(0);
162: }
166: static int PCSetFromOptions_Redundant(PC pc)
167: {
169: return(0);
170: }
172: EXTERN_C_BEGIN
175: int PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out)
176: {
177: PC_Redundant *red = (PC_Redundant*)pc->data;
178: int ierr;
181: red->scatterin = in;
182: red->scatterout = out;
183: PetscObjectReference((PetscObject)in);
184: PetscObjectReference((PetscObject)out);
185: return(0);
186: }
187: EXTERN_C_END
191: /*@
192: PCRedundantSetScatter - Sets the scatter used to copy values into the
193: redundant local solve and the scatter to move them back into the global
194: vector.
196: Collective on PC
198: Input Parameters:
199: + pc - the preconditioner context
200: . in - the scatter to move the values in
201: - out - the scatter to move them out
203: Level: advanced
205: .keywords: PC, redundant solve
206: @*/
207: int PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out)
208: {
209: int ierr,(*f)(PC,VecScatter,VecScatter);
215: PetscObjectQueryFunction((PetscObject)pc,"PCRedundantSetScatter_C",(void (**)(void))&f);
216: if (f) {
217: (*f)(pc,in,out);
218: }
219: return(0);
220: }
222: EXTERN_C_BEGIN
225: int PCRedundantGetPC_Redundant(PC pc,PC *innerpc)
226: {
227: PC_Redundant *red = (PC_Redundant*)pc->data;
230: *innerpc = red->pc;
231: return(0);
232: }
233: EXTERN_C_END
237: /*@
238: PCRedundantGetPC - Gets the sequential PC created by the redundant PC.
240: Not Collective
242: Input Parameter:
243: . pc - the preconditioner context
245: Output Parameter:
246: . innerpc - the sequential PC
248: Level: advanced
250: .keywords: PC, redundant solve
251: @*/
252: int PCRedundantGetPC(PC pc,PC *innerpc)
253: {
254: int ierr,(*f)(PC,PC*);
259: PetscObjectQueryFunction((PetscObject)pc,"PCRedundantGetPC_C",(void (**)(void))&f);
260: if (f) {
261: (*f)(pc,innerpc);
262: }
263: return(0);
264: }
266: EXTERN_C_BEGIN
269: int PCRedundantGetOperators_Redundant(PC pc,Mat *mat,Mat *pmat)
270: {
271: PC_Redundant *red = (PC_Redundant*)pc->data;
274: if (mat) *mat = red->pmats[0];
275: if (pmat) *pmat = red->pmats[0];
276: return(0);
277: }
278: EXTERN_C_END
282: /*@
283: PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
285: Not Collective
287: Input Parameter:
288: . pc - the preconditioner context
290: Output Parameters:
291: + mat - the matrix
292: - pmat - the (possibly different) preconditioner matrix
294: Level: advanced
296: .keywords: PC, redundant solve
297: @*/
298: int PCRedundantGetOperators(PC pc,Mat *mat,Mat *pmat)
299: {
300: int ierr,(*f)(PC,Mat*,Mat*);
306: PetscObjectQueryFunction((PetscObject)pc,"PCRedundantGetOperators_C",(void (**)(void))&f);
307: if (f) {
308: (*f)(pc,mat,pmat);
309: }
310: return(0);
311: }
313: /* -------------------------------------------------------------------------------------*/
314: /*MC
315: PCREDUNDANT - Runs a preconditioner for the entire problem on each processor
318: Options for the redundant preconditioners can be set with -redundant_pc_xxx
320: Level: intermediate
323: .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PCRedundantSetScatter(),
324: PCRedundantGetPC(), PCRedundantGetOperators()
326: M*/
328: EXTERN_C_BEGIN
331: int PCCreate_Redundant(PC pc)
332: {
333: int ierr;
334: PC_Redundant *red;
335: char *prefix;
338: PetscNew(PC_Redundant,&red);
339: PetscLogObjectMemory(pc,sizeof(PC_Redundant));
340: PetscMemzero(red,sizeof(PC_Redundant));
341: red->useparallelmat = PETSC_TRUE;
343: /* create the sequential PC that each processor has copy of */
344: PCCreate(PETSC_COMM_SELF,&red->pc);
345: PCSetType(red->pc,PCLU);
346: PCGetOptionsPrefix(pc,&prefix);
347: PCSetOptionsPrefix(red->pc,prefix);
348: PCAppendOptionsPrefix(red->pc,"redundant_");
350: pc->ops->apply = PCApply_Redundant;
351: pc->ops->applytranspose = 0;
352: pc->ops->setup = PCSetUp_Redundant;
353: pc->ops->destroy = PCDestroy_Redundant;
354: pc->ops->setfromoptions = PCSetFromOptions_Redundant;
355: pc->ops->setuponblocks = 0;
356: pc->ops->view = PCView_Redundant;
357: pc->ops->applyrichardson = 0;
359: pc->data = (void*)red;
361: PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantSetScatter_C","PCRedundantSetScatter_Redundant",
362: PCRedundantSetScatter_Redundant);
363: PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetPC_C","PCRedundantGetPC_Redundant",
364: PCRedundantGetPC_Redundant);
365: PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetOperators_C","PCRedundantGetOperators_Redundant",
366: PCRedundantGetOperators_Redundant);
368: return(0);
369: }
370: EXTERN_C_END