Actual source code: party.c
2: #include src/mat/impls/adj/mpi/mpiadj.h
4: #ifdef PETSC_HAVE_UNISTD_H
5: #include <unistd.h>
6: #endif
8: #ifdef PETSC_HAVE_STDLIB_H
9: #include <stdlib.h>
10: #endif
12: #include "petscfix.h"
14: /*
15: Currently using Party-1.99
16: */
17: EXTERN_C_BEGIN
18: #include "party_lib.h"
19: EXTERN_C_END
21: typedef struct {
22: char redm[15];
23: char redo[15];
24: int rec;
25: int output;
26: char global_method[15]; /* global method */
27: char local_method[15]; /* local method */
28: int nbvtxcoarsed; /* number of vertices for the coarse graph */
29: char *mesg_log;
30: } MatPartitioning_Party;
32: #define SIZE_LOG 10000 /* size of buffer for msg_log */
36: static int MatPartitioningApply_Party(MatPartitioning part, IS * partitioning)
37: {
38: int ierr, *locals, *parttab = NULL, rank, size;
39: Mat mat = part->adj, matMPI, matSeq;
40: int nb_locals;
41: Mat_MPIAdj *adj = (Mat_MPIAdj *) mat->data;
42: MatPartitioning_Party *party = (MatPartitioning_Party *) part->data;
43: PetscTruth flg;
44: #ifdef PETSC_HAVE_UNISTD_H
45: int fd_stdout, fd_pipe[2], count;
46: #endif
50: /* check if the matrix is sequential, use MatGetSubMatrices if necessary */
51: PetscTypeCompare((PetscObject) mat, MATMPIADJ, &flg);
52: MPI_Comm_size(mat->comm, &size);
53: MPI_Comm_rank(part->comm, &rank);
54: if (size > 1) {
55: int M, N;
56: IS isrow, iscol;
57: Mat *A;
59: if (flg) {
60: SETERRQ(0,
61: "Distributed matrix format MPIAdj is not supported for sequential partitioners");
62: }
63: ierr =
64: PetscPrintf(part->comm,
65: "Converting distributed matrix to sequential: this could be a performance loss\n");
67: MatGetSize(mat, &M, &N);
68: ISCreateStride(PETSC_COMM_SELF, M, 0, 1, &isrow);
69: ISCreateStride(PETSC_COMM_SELF, N, 0, 1, &iscol);
70: MatGetSubMatrices(mat, 1, &isrow, &iscol, MAT_INITIAL_MATRIX, &A);
71: ISDestroy(isrow);
72: ISDestroy(iscol);
73: matSeq = *A;
74: } else
75: matSeq = mat;
77: /* check for the input format that is supported only for a MPIADJ type
78: and set it to matMPI */
80: if (!flg) {
81: MatConvert(matSeq, MATMPIADJ, &matMPI);
82: } else
83: matMPI = matSeq;
85: adj = (Mat_MPIAdj *) matMPI->data; /* finaly adj contains adjacency graph */
87: {
88: /* Party library arguments definition */
89: int n = mat->M; /* number of vertices in full graph */
90: int *edge_p = adj->i; /* start of edge list for each vertex */
91: int *edge = adj->j; /* edge list data */
92: int *vertex_w = NULL; /* weights for all vertices */
93: int *edge_w = NULL; /* weights for all edges */
94: float *x = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
95: int p = part->n; /* number of parts to create */
96: int *part_party; /* set number of each vtx (length n) */
97: int cutsize; /* number of edge cut */
98: char *global = party->global_method; /* global partitioning algorithm */
99: char *local = party->local_method; /* local partitioning algorithm */
100: int redl = party->nbvtxcoarsed; /* how many vertices to coarsen down to? */
101: char *redm = party->redm;
102: char *redo = party->redo;
103: int rec = party->rec;
104: int output = party->output;
106: PetscMalloc((mat->M) * sizeof(int), &part_party);
108: /* redirect output to buffer party->mesg_log */
109: #ifdef PETSC_HAVE_UNISTD_H
110: fd_stdout = dup(1);
111: pipe(fd_pipe);
112: close(1);
113: dup2(fd_pipe[1], 1);
114: PetscMalloc(SIZE_LOG * sizeof(char), &(party->mesg_log));
115: #endif
117: /* library call */
119: party_lib_times_start();
121: party_lib(n, vertex_w, x, y, z, edge_p, edge, edge_w,
122: p, part_party, &cutsize, redl, redm, redo,
123: global, local, rec, output);
125: party_lib_times_output(output);
126: part_info(n, vertex_w, edge_p, edge, edge_w, p, part_party, output);
128: #ifdef PETSC_HAVE_UNISTD_H
129: fflush(stdout);
130: count =
131: read(fd_pipe[0], party->mesg_log, (SIZE_LOG - 1) * sizeof(char));
132: if (count < 0)
133: count = 0;
134: party->mesg_log[count] = 0;
135: close(1);
136: dup2(fd_stdout, 1);
137: close(fd_stdout);
138: close(fd_pipe[0]);
139: close(fd_pipe[1]);
140: #endif
141: /* if in the call we got an error, we say it */
143: if (ierr) {
144: SETERRQ(1, party->mesg_log);
145: }
147: parttab = part_party;
148: }
150: /* Creation of the index set */
151: MPI_Comm_rank(part->comm, &rank);
152: MPI_Comm_size(part->comm, &size);
153: nb_locals = mat->M / size;
154: locals = parttab + rank * nb_locals;
155: if (rank < mat->M % size) {
156: nb_locals++;
157: locals += rank;
158: } else
159: locals += mat->M % size;
160: ISCreateGeneral(part->comm, nb_locals, locals, partitioning);
162: /* destroying old objects */
163: PetscFree(parttab);
164: if (matSeq != mat) {
165: MatDestroy(matSeq);
166: }
167: if (matMPI != mat) {
168: MatDestroy(matMPI);
169: }
171: return(0);
172: }
177: int MatPartitioningView_Party(MatPartitioning part, PetscViewer viewer)
178: {
180: MatPartitioning_Party *party = (MatPartitioning_Party *) part->data;
181: int ierr, rank;
182: PetscTruth isascii;
186: MPI_Comm_rank(part->comm, &rank);
187: PetscTypeCompare((PetscObject) viewer, PETSC_VIEWER_ASCII, &isascii);
188: if (isascii) {
189: if (!rank && party->mesg_log) {
190: PetscViewerASCIIPrintf(viewer, "%s\n", party->mesg_log);
191: }
192: } else {
193: SETERRQ1(1, "Viewer type %s not supported for this Party partitioner",
194: ((PetscObject) viewer)->type_name);
195: }
197: return(0);
198: }
202: /*@C
203: MatPartitioningPartySetGlobal - Set method for global partitioning.
205: Input Parameter:
206: . part - the partitioning context
207: . method - May be one of MP_PARTY_OPT, MP_PARTY_LIN, MP_PARTY_SCA,
208: MP_PARTY_RAN, MP_PARTY_GBF, MP_PARTY_GCF, MP_PARTY_BUB or MP_PARTY_DEF, or
209: alternatively a string describing the method. Two or more methods can be
210: combined like "gbf,gcf". Check the Party Library Users Manual for details.
212: Level: advanced
214: @*/
215: int MatPartitioningPartySetGlobal(MatPartitioning part, const char *global)
216: {
217: MatPartitioning_Party *party = (MatPartitioning_Party *) part->data;
221: PetscStrcpy(party->global_method, global);
223: return(0);
224: }
228: /*@C
229: MatPartitioningPartySetLocal - Set method for local partitioning.
231: Input Parameter:
232: . part - the partitioning context
233: . method - One of MP_PARTY_HELPFUL_SETS, MP_PARTY_KERNIGHAN_LIN, or MP_PARTY_NONE.
234: Check the Party Library Users Manual for details.
236: Level: advanced
238: @*/
239: int MatPartitioningPartySetLocal(MatPartitioning part, const char *local)
240: {
241: MatPartitioning_Party *party = (MatPartitioning_Party *) part->data;
245: PetscStrcpy(party->local_method, local);
247: return(0);
248: }
252: /*@
253: MatPartitioningPartySetCoarseLevel - Set the coarse level
254:
255: Input Parameter:
256: . part - the partitioning context
257: . level - the coarse level in range [0.0,1.0]
259: Level: advanced
261: @*/
262: int MatPartitioningPartySetCoarseLevel(MatPartitioning part, PetscReal level)
263: {
264: MatPartitioning_Party *party = (MatPartitioning_Party *) part->data;
268: if (level < 0 || level > 1.0) {
269: SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,
270: "Party: level of coarsening out of range [0.01-1.0]");
271: } else
272: party->nbvtxcoarsed = part->adj->N * level;
274: if (party->nbvtxcoarsed < 20)
275: party->nbvtxcoarsed = 20;
277: return(0);
278: }
282: /*@
283: MatPartitioningPartySetMatchOptimization - Activate matching optimization for graph reduction
284:
285: Input Parameter:
286: . part - the partitioning context
287: . opt - activate optimization
289: Level: advanced
291: @*/
292: int MatPartitioningPartySetMatchOptimization(MatPartitioning part,
293: PetscTruth opt)
294: {
295: MatPartitioning_Party *party = (MatPartitioning_Party *) part->data;
299: if (opt)
300: PetscStrcpy(party->redo, "w3");
301: else
302: PetscStrcpy(party->redo, "");
304: return(0);
305: }
309: /*@
310: MatPartitioningPartySetBipart - Activate or deactivate recursive bisection.
311:
312: Input Parameter:
313: . part - the partitioning context
314: . bp - PETSC_TRUE to activate recursive bisection
316: Level: advanced
318: @*/
319: int MatPartitioningPartySetBipart(MatPartitioning part, PetscTruth bp)
320: {
321: MatPartitioning_Party *party = (MatPartitioning_Party *) part->data;
325: if (bp)
326: party->rec = 1;
327: else
328: party->rec = 0;
330: return(0);
331: }
335: int MatPartitioningSetFromOptions_Party(MatPartitioning part)
336: {
338: PetscTruth flag, b;
339: char value[15];
340: PetscReal r;
343: PetscOptionsHead("Set Party partitioning options");
345: PetscOptionsString("-mat_partitioning_party_global",
346: "Global method to use", "MatPartitioningPartySetGlobal", "gcf,gbf",
347: value, 15, &flag);
348: if (flag)
349: MatPartitioningPartySetGlobal(part, value);
351: PetscOptionsString("-mat_partitioning_party_local",
352: "Local method to use", "MatPartitioningPartySetLocal", "kl", value, 15,
353: &flag);
354: if (flag)
355: MatPartitioningPartySetLocal(part, value);
357: PetscOptionsReal("-mat_partitioning_party_coarse_level",
358: "Coarse level", "MatPartitioningPartySetCoarseLevel", 0, &r,
359: &flag);
360: if (flag)
361: MatPartitioningPartySetCoarseLevel(part, r);
363: PetscOptionsLogical("-mat_partitioning_party_match_optimization",
364: "Matching optimization on/off (boolean)",
365: "MatPartitioningPartySetMatchOptimization", PETSC_TRUE, &b, &flag);
366: if (flag)
367: MatPartitioningPartySetMatchOptimization(part, b);
369: PetscOptionsLogical("-mat_partitioning_party_bipart",
370: "Bipartitioning option on/off (boolean)",
371: "MatPartitioningPartySetBipart", PETSC_TRUE, &b, &flag);
372: if (flag)
373: MatPartitioningPartySetBipart(part, b);
375: PetscOptionsTail();
376: return(0);
377: }
382: int MatPartitioningDestroy_Party(MatPartitioning part)
383: {
384: MatPartitioning_Party *party = (MatPartitioning_Party *) part->data;
389: if (party->mesg_log) {
390: PetscFree(party->mesg_log);
391: }
393: PetscFree(party);
395: return(0);
396: }
398: EXTERN_C_BEGIN
401: int MatPartitioningCreate_Party(MatPartitioning part)
402: {
404: MatPartitioning_Party *party;
407: PetscNew(MatPartitioning_Party, &party);
409: PetscStrcpy(party->global_method, "gcf,gbf");
410: PetscStrcpy(party->local_method, "kl");
411: PetscStrcpy(party->redm, "lam");
412: PetscStrcpy(party->redo, "w3");
414: party->nbvtxcoarsed = 200;
415: party->rec = 1;
416: party->output = 1;
417: party->mesg_log = NULL;
419: part->ops->apply = MatPartitioningApply_Party;
420: part->ops->view = MatPartitioningView_Party;
421: part->ops->destroy = MatPartitioningDestroy_Party;
422: part->ops->setfromoptions = MatPartitioningSetFromOptions_Party;
423: part->data = (void *) party;
425: return(0);
426: }
428: EXTERN_C_END