Actual source code: dagtona.c
1: /*$Id: dagtona.c,v 1.10 2001/03/23 23:25:00 balay Exp $*/
2:
3: /*
4: Tools to help solve the coarse grid problem redundantly.
5: Provides two scatter contexts that (1) map from the usual global vector
6: to all processors the entire vector in NATURAL numbering and (2)
7: from the entire vector on each processor in natural numbering extracts
8: out this processors piece in GLOBAL numbering
9: */
11: #include src/dm/da/daimpl.h
15: /*@
16: DAGlobalToNaturalAllCreate - Creates a scatter context that maps from the
17: global vector the entire vector to each processor in natural numbering
19: Collective on DA
21: Input Parameter:
22: . da - the distributed array context
24: Output Parameter:
25: . scatter - the scatter context
27: Level: advanced
29: .keywords: distributed array, global to local, begin, coarse problem
31: .seealso: DAGlobalToNaturalEnd(), DALocalToGlobal(), DACreate2d(),
32: DAGlobalToLocalBegin(), DAGlobalToLocalEnd(), DACreateNaturalVector()
33: @*/
34: int DAGlobalToNaturalAllCreate(DA da,VecScatter *scatter)
35: {
36: int ierr,N;
37: IS from,to;
38: Vec tmplocal,global;
39: AO ao;
44: DAGetAO(da,&ao);
46: /* create the scatter context */
47: ISCreateStride(da->comm,da->Nlocal,0,1,&to);
48: AOPetscToApplicationIS(ao,to);
49: ISCreateStride(da->comm,da->Nlocal,0,1,&from);
50: MPI_Allreduce(&da->Nlocal,&N,1,MPI_INT,MPI_SUM,da->comm);
51: VecCreateSeqWithArray(PETSC_COMM_SELF,N,0,&tmplocal);
52: VecCreateMPIWithArray(da->comm,da->Nlocal,PETSC_DETERMINE,0,&global);
53: VecScatterCreate(global,from,tmplocal,to,scatter);
54: VecDestroy(tmplocal);
55: VecDestroy(global);
56: ISDestroy(from);
57: ISDestroy(to);
58: return(0);
59: }
63: /*@
64: DANaturalAllToGlobalCreate - Creates a scatter context that maps from a copy
65: of the entire vector on each processor to its local part in the global vector.
67: Collective on DA
69: Input Parameter:
70: . da - the distributed array context
72: Output Parameter:
73: . scatter - the scatter context
75: Level: advanced
77: .keywords: distributed array, global to local, begin, coarse problem
79: .seealso: DAGlobalToNaturalEnd(), DALocalToGlobal(), DACreate2d(),
80: DAGlobalToLocalBegin(), DAGlobalToLocalEnd(), DACreateNaturalVector()
81: @*/
82: int DANaturalAllToGlobalCreate(DA da,VecScatter *scatter)
83: {
84: int ierr,M,m = da->Nlocal,start;
85: IS from,to;
86: Vec tmplocal,global;
87: AO ao;
92: DAGetAO(da,&ao);
94: /* create the scatter context */
95: MPI_Allreduce(&m,&M,1,MPI_INT,MPI_SUM,da->comm);
96: VecCreateMPIWithArray(da->comm,m,PETSC_DETERMINE,0,&global);
97: VecGetOwnershipRange(global,&start,PETSC_NULL);
98: ISCreateStride(da->comm,m,start,1,&from);
99: AOPetscToApplicationIS(ao,from);
100: ISCreateStride(da->comm,m,start,1,&to);
101: VecCreateSeqWithArray(PETSC_COMM_SELF,M,0,&tmplocal);
102: VecScatterCreate(tmplocal,from,global,to,scatter);
103: VecDestroy(tmplocal);
104: VecDestroy(global);
105: ISDestroy(from);
106: ISDestroy(to);
107: return(0);
108: }