Actual source code: plexpartition.c
petsc-3.7.1 2016-05-15
1: #include <petsc/private/dmpleximpl.h> /*I "petscdmplex.h" I*/
3: PetscClassId PETSCPARTITIONER_CLASSID = 0;
5: PetscFunctionList PetscPartitionerList = NULL;
6: PetscBool PetscPartitionerRegisterAllCalled = PETSC_FALSE;
8: PetscBool ChacoPartitionercite = PETSC_FALSE;
9: const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
10: " author = {Bruce Hendrickson and Robert Leland},\n"
11: " title = {A multilevel algorithm for partitioning graphs},\n"
12: " booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
13: " isbn = {0-89791-816-9},\n"
14: " pages = {28},\n"
15: " doi = {http://doi.acm.org/10.1145/224170.224228},\n"
16: " publisher = {ACM Press},\n"
17: " address = {New York},\n"
18: " year = {1995}\n}\n";
20: PetscBool ParMetisPartitionercite = PETSC_FALSE;
21: const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
22: " author = {George Karypis and Vipin Kumar},\n"
23: " title = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
24: " journal = {Journal of Parallel and Distributed Computing},\n"
25: " volume = {48},\n"
26: " pages = {71--85},\n"
27: " year = {1998}\n}\n";
31: /*@C
32: DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
34: Input Parameters:
35: + dm - The mesh DM dm
36: - height - Height of the strata from which to construct the graph
38: Output Parameter:
39: + numVertices - Number of vertices in the graph
40: - offsets - Point offsets in the graph
41: - adjacency - Point connectivity in the graph
43: The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
44: DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
45: representation on the mesh.
47: Level: developer
49: .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
50: @*/
51: PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
52: {
53: PetscInt p, pStart, pEnd, a, adjSize, idx, size, nroots;
54: PetscInt *adj = NULL, *vOffsets = NULL, *graph = NULL;
55: IS cellNumbering;
56: const PetscInt *cellNum;
57: PetscBool useCone, useClosure;
58: PetscSection section;
59: PetscSegBuffer adjBuffer;
60: PetscSF sfPoint;
61: PetscMPIInt rank;
65: MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);
66: DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);
67: DMGetPointSF(dm, &sfPoint);
68: PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);
69: /* Build adjacency graph via a section/segbuffer */
70: PetscSectionCreate(PetscObjectComm((PetscObject) dm), §ion);
71: PetscSectionSetChart(section, pStart, pEnd);
72: PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);
73: /* Always use FVM adjacency to create partitioner graph */
74: DMPlexGetAdjacencyUseCone(dm, &useCone);
75: DMPlexGetAdjacencyUseClosure(dm, &useClosure);
76: DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);
77: DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);
78: DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, &cellNumbering);
79: ISGetIndices(cellNumbering, &cellNum);
80: for (*numVertices = 0, p = pStart; p < pEnd; p++) {
81: /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
82: if (nroots > 0) {if (cellNum[p] < 0) continue;}
83: adjSize = PETSC_DETERMINE;
84: DMPlexGetAdjacency(dm, p, &adjSize, &adj);
85: for (a = 0; a < adjSize; ++a) {
86: const PetscInt point = adj[a];
87: if (point != p && pStart <= point && point < pEnd) {
88: PetscInt *PETSC_RESTRICT pBuf;
89: PetscSectionAddDof(section, p, 1);
90: PetscSegBufferGetInts(adjBuffer, 1, &pBuf);
91: *pBuf = point;
92: }
93: }
94: (*numVertices)++;
95: }
96: DMPlexSetAdjacencyUseCone(dm, useCone);
97: DMPlexSetAdjacencyUseClosure(dm, useClosure);
98: /* Derive CSR graph from section/segbuffer */
99: PetscSectionSetUp(section);
100: PetscSectionGetStorageSize(section, &size);
101: PetscMalloc1(*numVertices+1, &vOffsets);
102: for (idx = 0, p = pStart; p < pEnd; p++) {
103: if (nroots > 0) {if (cellNum[p] < 0) continue;}
104: PetscSectionGetOffset(section, p, &(vOffsets[idx++]));
105: }
106: vOffsets[*numVertices] = size;
107: if (offsets) *offsets = vOffsets;
108: PetscSegBufferExtractAlloc(adjBuffer, &graph);
109: {
110: ISLocalToGlobalMapping ltogCells;
111: PetscInt n, size, *cells_arr;
112: /* In parallel, apply a global cell numbering to the graph */
113: ISRestoreIndices(cellNumbering, &cellNum);
114: ISLocalToGlobalMappingCreateIS(cellNumbering, <ogCells);
115: ISLocalToGlobalMappingGetSize(ltogCells, &size);
116: ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);
117: /* Convert to positive global cell numbers */
118: for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
119: ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);
120: ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);
121: ISLocalToGlobalMappingDestroy(<ogCells);
122: ISDestroy(&cellNumbering);
123: }
124: if (adjacency) *adjacency = graph;
125: /* Clean up */
126: PetscSegBufferDestroy(&adjBuffer);
127: PetscSectionDestroy(§ion);
128: PetscFree(adj);
129: return(0);
130: }
134: PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
135: {
136: const PetscInt maxFaceCases = 30;
137: PetscInt numFaceCases = 0;
138: PetscInt numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
139: PetscInt *off, *adj;
140: PetscInt *neighborCells = NULL;
141: PetscInt dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
145: /* For parallel partitioning, I think you have to communicate supports */
146: DMGetDimension(dm, &dim);
147: cellDim = dim - cellHeight;
148: DMPlexGetDepth(dm, &depth);
149: DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);
150: if (cEnd - cStart == 0) {
151: if (numVertices) *numVertices = 0;
152: if (offsets) *offsets = NULL;
153: if (adjacency) *adjacency = NULL;
154: return(0);
155: }
156: numCells = cEnd - cStart;
157: faceDepth = depth - cellHeight;
158: if (dim == depth) {
159: PetscInt f, fStart, fEnd;
161: PetscCalloc1(numCells+1, &off);
162: /* Count neighboring cells */
163: DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);
164: for (f = fStart; f < fEnd; ++f) {
165: const PetscInt *support;
166: PetscInt supportSize;
167: DMPlexGetSupportSize(dm, f, &supportSize);
168: DMPlexGetSupport(dm, f, &support);
169: if (supportSize == 2) {
170: PetscInt numChildren;
172: DMPlexGetTreeChildren(dm,f,&numChildren,NULL);
173: if (!numChildren) {
174: ++off[support[0]-cStart+1];
175: ++off[support[1]-cStart+1];
176: }
177: }
178: }
179: /* Prefix sum */
180: for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
181: if (adjacency) {
182: PetscInt *tmp;
184: PetscMalloc1(off[numCells], &adj);
185: PetscMalloc1(numCells+1, &tmp);
186: PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));
187: /* Get neighboring cells */
188: for (f = fStart; f < fEnd; ++f) {
189: const PetscInt *support;
190: PetscInt supportSize;
191: DMPlexGetSupportSize(dm, f, &supportSize);
192: DMPlexGetSupport(dm, f, &support);
193: if (supportSize == 2) {
194: PetscInt numChildren;
196: DMPlexGetTreeChildren(dm,f,&numChildren,NULL);
197: if (!numChildren) {
198: adj[tmp[support[0]-cStart]++] = support[1];
199: adj[tmp[support[1]-cStart]++] = support[0];
200: }
201: }
202: }
203: #if defined(PETSC_USE_DEBUG)
204: for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
205: #endif
206: PetscFree(tmp);
207: }
208: if (numVertices) *numVertices = numCells;
209: if (offsets) *offsets = off;
210: if (adjacency) *adjacency = adj;
211: return(0);
212: }
213: /* Setup face recognition */
214: if (faceDepth == 1) {
215: PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
217: for (c = cStart; c < cEnd; ++c) {
218: PetscInt corners;
220: DMPlexGetConeSize(dm, c, &corners);
221: if (!cornersSeen[corners]) {
222: PetscInt nFV;
224: if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
225: cornersSeen[corners] = 1;
227: DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);
229: numFaceVertices[numFaceCases++] = nFV;
230: }
231: }
232: }
233: PetscCalloc1(numCells+1, &off);
234: /* Count neighboring cells */
235: for (cell = cStart; cell < cEnd; ++cell) {
236: PetscInt numNeighbors = PETSC_DETERMINE, n;
238: DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);
239: /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
240: for (n = 0; n < numNeighbors; ++n) {
241: PetscInt cellPair[2];
242: PetscBool found = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
243: PetscInt meetSize = 0;
244: const PetscInt *meet = NULL;
246: cellPair[0] = cell; cellPair[1] = neighborCells[n];
247: if (cellPair[0] == cellPair[1]) continue;
248: if (!found) {
249: DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);
250: if (meetSize) {
251: PetscInt f;
253: for (f = 0; f < numFaceCases; ++f) {
254: if (numFaceVertices[f] == meetSize) {
255: found = PETSC_TRUE;
256: break;
257: }
258: }
259: }
260: DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);
261: }
262: if (found) ++off[cell-cStart+1];
263: }
264: }
265: /* Prefix sum */
266: for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
268: if (adjacency) {
269: PetscMalloc1(off[numCells], &adj);
270: /* Get neighboring cells */
271: for (cell = cStart; cell < cEnd; ++cell) {
272: PetscInt numNeighbors = PETSC_DETERMINE, n;
273: PetscInt cellOffset = 0;
275: DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);
276: /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
277: for (n = 0; n < numNeighbors; ++n) {
278: PetscInt cellPair[2];
279: PetscBool found = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
280: PetscInt meetSize = 0;
281: const PetscInt *meet = NULL;
283: cellPair[0] = cell; cellPair[1] = neighborCells[n];
284: if (cellPair[0] == cellPair[1]) continue;
285: if (!found) {
286: DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);
287: if (meetSize) {
288: PetscInt f;
290: for (f = 0; f < numFaceCases; ++f) {
291: if (numFaceVertices[f] == meetSize) {
292: found = PETSC_TRUE;
293: break;
294: }
295: }
296: }
297: DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);
298: }
299: if (found) {
300: adj[off[cell-cStart]+cellOffset] = neighborCells[n];
301: ++cellOffset;
302: }
303: }
304: }
305: }
306: PetscFree(neighborCells);
307: if (numVertices) *numVertices = numCells;
308: if (offsets) *offsets = off;
309: if (adjacency) *adjacency = adj;
310: return(0);
311: }
315: /*@C
316: PetscPartitionerRegister - Adds a new PetscPartitioner implementation
318: Not Collective
320: Input Parameters:
321: + name - The name of a new user-defined creation routine
322: - create_func - The creation routine itself
324: Notes:
325: PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
327: Sample usage:
328: .vb
329: PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
330: .ve
332: Then, your PetscPartitioner type can be chosen with the procedural interface via
333: .vb
334: PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
335: PetscPartitionerSetType(PetscPartitioner, "my_part");
336: .ve
337: or at runtime via the option
338: .vb
339: -petscpartitioner_type my_part
340: .ve
342: Level: advanced
344: .keywords: PetscPartitioner, register
345: .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
347: @*/
348: PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
349: {
353: PetscFunctionListAdd(&PetscPartitionerList, sname, function);
354: return(0);
355: }
359: /*@C
360: PetscPartitionerSetType - Builds a particular PetscPartitioner
362: Collective on PetscPartitioner
364: Input Parameters:
365: + part - The PetscPartitioner object
366: - name - The kind of partitioner
368: Options Database Key:
369: . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
371: Level: intermediate
373: .keywords: PetscPartitioner, set, type
374: .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
375: @*/
376: PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
377: {
378: PetscErrorCode (*r)(PetscPartitioner);
379: PetscBool match;
384: PetscObjectTypeCompare((PetscObject) part, name, &match);
385: if (match) return(0);
387: PetscPartitionerRegisterAll();
388: PetscFunctionListFind(PetscPartitionerList, name, &r);
389: if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
391: if (part->ops->destroy) {
392: (*part->ops->destroy)(part);
393: part->ops->destroy = NULL;
394: }
395: (*r)(part);
396: PetscObjectChangeTypeName((PetscObject) part, name);
397: return(0);
398: }
402: /*@C
403: PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
405: Not Collective
407: Input Parameter:
408: . part - The PetscPartitioner
410: Output Parameter:
411: . name - The PetscPartitioner type name
413: Level: intermediate
415: .keywords: PetscPartitioner, get, type, name
416: .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
417: @*/
418: PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
419: {
425: PetscPartitionerRegisterAll();
426: *name = ((PetscObject) part)->type_name;
427: return(0);
428: }
432: /*@C
433: PetscPartitionerView - Views a PetscPartitioner
435: Collective on PetscPartitioner
437: Input Parameter:
438: + part - the PetscPartitioner object to view
439: - v - the viewer
441: Level: developer
443: .seealso: PetscPartitionerDestroy()
444: @*/
445: PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
446: {
451: if (!v) {PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);}
452: if (part->ops->view) {(*part->ops->view)(part, v);}
453: return(0);
454: }
458: PetscErrorCode PetscPartitionerSetTypeFromOptions_Internal(PetscPartitioner part)
459: {
460: const char *defaultType;
461: char name[256];
462: PetscBool flg;
467: if (!((PetscObject) part)->type_name) defaultType = PETSCPARTITIONERCHACO;
468: else defaultType = ((PetscObject) part)->type_name;
469: PetscPartitionerRegisterAll();
471: PetscObjectOptionsBegin((PetscObject) part);
472: PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, 256, &flg);
473: if (flg) {
474: PetscPartitionerSetType(part, name);
475: } else if (!((PetscObject) part)->type_name) {
476: PetscPartitionerSetType(part, defaultType);
477: }
478: PetscOptionsEnd();
479: return(0);
480: }
484: /*@
485: PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
487: Collective on PetscPartitioner
489: Input Parameter:
490: . part - the PetscPartitioner object to set options for
492: Level: developer
494: .seealso: PetscPartitionerView()
495: @*/
496: PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
497: {
502: PetscPartitionerSetTypeFromOptions_Internal(part);
504: PetscObjectOptionsBegin((PetscObject) part);
505: if (part->ops->setfromoptions) {(*part->ops->setfromoptions)(part);}
506: /* process any options handlers added with PetscObjectAddOptionsHandler() */
507: PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);
508: PetscOptionsEnd();
509: PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");
510: return(0);
511: }
515: /*@C
516: PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
518: Collective on PetscPartitioner
520: Input Parameter:
521: . part - the PetscPartitioner object to setup
523: Level: developer
525: .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
526: @*/
527: PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
528: {
533: if (part->ops->setup) {(*part->ops->setup)(part);}
534: return(0);
535: }
539: /*@
540: PetscPartitionerDestroy - Destroys a PetscPartitioner object
542: Collective on PetscPartitioner
544: Input Parameter:
545: . part - the PetscPartitioner object to destroy
547: Level: developer
549: .seealso: PetscPartitionerView()
550: @*/
551: PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
552: {
556: if (!*part) return(0);
559: if (--((PetscObject)(*part))->refct > 0) {*part = 0; return(0);}
560: ((PetscObject) (*part))->refct = 0;
562: if ((*part)->ops->destroy) {(*(*part)->ops->destroy)(*part);}
563: PetscHeaderDestroy(part);
564: return(0);
565: }
569: /*@
570: PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
572: Collective on MPI_Comm
574: Input Parameter:
575: . comm - The communicator for the PetscPartitioner object
577: Output Parameter:
578: . part - The PetscPartitioner object
580: Level: beginner
582: .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
583: @*/
584: PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
585: {
586: PetscPartitioner p;
587: PetscErrorCode ierr;
591: *part = NULL;
592: PetscFVInitializePackage();
594: PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);
596: *part = p;
597: return(0);
598: }
602: /*@
603: PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
605: Collective on DM
607: Input Parameters:
608: + part - The PetscPartitioner
609: - dm - The mesh DM
611: Output Parameters:
612: + partSection - The PetscSection giving the division of points by partition
613: - partition - The list of points by partition
615: Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
617: Level: developer
619: .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
620: @*/
621: PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
622: {
623: PetscMPIInt size;
631: MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);
632: if (size == 1) {
633: PetscInt *points;
634: PetscInt cStart, cEnd, c;
636: DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);
637: PetscSectionSetChart(partSection, 0, size);
638: PetscSectionSetDof(partSection, 0, cEnd-cStart);
639: PetscSectionSetUp(partSection);
640: PetscMalloc1(cEnd-cStart, &points);
641: for (c = cStart; c < cEnd; ++c) points[c] = c;
642: ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);
643: return(0);
644: }
645: if (part->height == 0) {
646: PetscInt numVertices;
647: PetscInt *start = NULL;
648: PetscInt *adjacency = NULL;
650: DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency);
651: if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
652: (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);
653: PetscFree(start);
654: PetscFree(adjacency);
655: } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
656: return(0);
657: }
661: PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
662: {
663: PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
664: PetscErrorCode ierr;
667: PetscSectionDestroy(&p->section);
668: ISDestroy(&p->partition);
669: PetscFree(p);
670: return(0);
671: }
675: PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
676: {
677: PetscViewerFormat format;
678: PetscErrorCode ierr;
681: PetscViewerGetFormat(viewer, &format);
682: PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");
683: return(0);
684: }
688: PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
689: {
690: PetscBool iascii;
696: PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
697: if (iascii) {PetscPartitionerView_Shell_Ascii(part, viewer);}
698: return(0);
699: }
703: PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
704: {
705: PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
706: PetscInt np;
707: PetscErrorCode ierr;
710: PetscSectionGetChart(p->section, NULL, &np);
711: if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
712: ISGetLocalSize(p->partition, &np);
713: if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
714: PetscSectionCopy(p->section, partSection);
715: *partition = p->partition;
716: PetscObjectReference((PetscObject) p->partition);
717: return(0);
718: }
722: PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
723: {
725: part->ops->view = PetscPartitionerView_Shell;
726: part->ops->destroy = PetscPartitionerDestroy_Shell;
727: part->ops->partition = PetscPartitionerPartition_Shell;
728: return(0);
729: }
731: /*MC
732: PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
734: Level: intermediate
736: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
737: M*/
741: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
742: {
743: PetscPartitioner_Shell *p;
744: PetscErrorCode ierr;
748: PetscNewLog(part, &p);
749: part->data = p;
751: PetscPartitionerInitialize_Shell(part);
752: return(0);
753: }
757: /*@C
758: PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
760: Collective on PART
762: Input Parameters:
763: + part - The PetscPartitioner
764: . numProcs - The number of partitions
765: . sizes - array of size numProcs (or NULL) providing the number of points in each partition
766: - points - array of size sum(sizes) (may be NULL iff sizes is NULL) providing the partition each point belongs to
768: Level: developer
770: Notes:
772: It is safe to free the sizes and points arrays after use in this routine.
774: .seealso DMPlexDistribute(), PetscPartitionerCreate()
775: @*/
776: PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt numProcs, const PetscInt sizes[], const PetscInt points[])
777: {
778: PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
779: PetscInt proc, numPoints;
780: PetscErrorCode ierr;
786: PetscSectionDestroy(&p->section);
787: ISDestroy(&p->partition);
788: PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);
789: PetscSectionSetChart(p->section, 0, numProcs);
790: if (sizes) {
791: for (proc = 0; proc < numProcs; ++proc) {
792: PetscSectionSetDof(p->section, proc, sizes[proc]);
793: }
794: }
795: PetscSectionSetUp(p->section);
796: PetscSectionGetStorageSize(p->section, &numPoints);
797: ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);
798: return(0);
799: }
803: PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
804: {
805: PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
806: PetscErrorCode ierr;
809: PetscFree(p);
810: return(0);
811: }
815: PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
816: {
817: PetscViewerFormat format;
818: PetscErrorCode ierr;
821: PetscViewerGetFormat(viewer, &format);
822: PetscViewerASCIIPrintf(viewer, "Simple Graph Partitioner:\n");
823: return(0);
824: }
828: PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
829: {
830: PetscBool iascii;
836: PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
837: if (iascii) {PetscPartitionerView_Simple_Ascii(part, viewer);}
838: return(0);
839: }
843: PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
844: {
845: MPI_Comm comm;
846: PetscInt np;
847: PetscMPIInt size;
851: comm = PetscObjectComm((PetscObject)dm);
852: MPI_Comm_size(comm,&size);
853: PetscSectionSetChart(partSection, 0, nparts);
854: ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);
855: if (size == 1) {
856: for (np = 0; np < nparts; ++np) {PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));}
857: }
858: else {
859: PetscMPIInt rank;
860: PetscInt nvGlobal, *offsets, myFirst, myLast;
862: PetscMalloc1(size+1,&offsets);
863: offsets[0] = 0;
864: MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);
865: for (np = 2; np <= size; np++) {
866: offsets[np] += offsets[np-1];
867: }
868: nvGlobal = offsets[size];
869: MPI_Comm_rank(comm,&rank);
870: myFirst = offsets[rank];
871: myLast = offsets[rank + 1] - 1;
872: PetscFree(offsets);
873: if (numVertices) {
874: PetscInt firstPart = 0, firstLargePart = 0;
875: PetscInt lastPart = 0, lastLargePart = 0;
876: PetscInt rem = nvGlobal % nparts;
877: PetscInt pSmall = nvGlobal/nparts;
878: PetscInt pBig = nvGlobal/nparts + 1;
881: if (rem) {
882: firstLargePart = myFirst / pBig;
883: lastLargePart = myLast / pBig;
885: if (firstLargePart < rem) {
886: firstPart = firstLargePart;
887: }
888: else {
889: firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
890: }
891: if (lastLargePart < rem) {
892: lastPart = lastLargePart;
893: }
894: else {
895: lastPart = rem + (myLast - (rem * pBig)) / pSmall;
896: }
897: }
898: else {
899: firstPart = myFirst / (nvGlobal/nparts);
900: lastPart = myLast / (nvGlobal/nparts);
901: }
903: for (np = firstPart; np <= lastPart; np++) {
904: PetscInt PartStart = np * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
905: PetscInt PartEnd = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
907: PartStart = PetscMax(PartStart,myFirst);
908: PartEnd = PetscMin(PartEnd,myLast+1);
909: PetscSectionSetDof(partSection,np,PartEnd-PartStart);
910: }
911: }
912: }
913: PetscSectionSetUp(partSection);
914: return(0);
915: }
919: PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
920: {
922: part->ops->view = PetscPartitionerView_Simple;
923: part->ops->destroy = PetscPartitionerDestroy_Simple;
924: part->ops->partition = PetscPartitionerPartition_Simple;
925: return(0);
926: }
928: /*MC
929: PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
931: Level: intermediate
933: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
934: M*/
938: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
939: {
940: PetscPartitioner_Simple *p;
941: PetscErrorCode ierr;
945: PetscNewLog(part, &p);
946: part->data = p;
948: PetscPartitionerInitialize_Simple(part);
949: return(0);
950: }
954: PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
955: {
956: PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
957: PetscErrorCode ierr;
960: PetscFree(p);
961: return(0);
962: }
966: PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
967: {
968: PetscViewerFormat format;
969: PetscErrorCode ierr;
972: PetscViewerGetFormat(viewer, &format);
973: PetscViewerASCIIPrintf(viewer, "Gather Graph Partitioner:\n");
974: return(0);
975: }
979: PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
980: {
981: PetscBool iascii;
987: PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
988: if (iascii) {PetscPartitionerView_Gather_Ascii(part, viewer);}
989: return(0);
990: }
994: PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
995: {
996: PetscInt np;
1000: PetscSectionSetChart(partSection, 0, nparts);
1001: ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);
1002: PetscSectionSetDof(partSection,0,numVertices);
1003: for (np = 1; np < nparts; ++np) {PetscSectionSetDof(partSection, np, 0);}
1004: PetscSectionSetUp(partSection);
1005: return(0);
1006: }
1010: PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1011: {
1013: part->ops->view = PetscPartitionerView_Gather;
1014: part->ops->destroy = PetscPartitionerDestroy_Gather;
1015: part->ops->partition = PetscPartitionerPartition_Gather;
1016: return(0);
1017: }
1019: /*MC
1020: PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1022: Level: intermediate
1024: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1025: M*/
1029: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1030: {
1031: PetscPartitioner_Gather *p;
1032: PetscErrorCode ierr;
1036: PetscNewLog(part, &p);
1037: part->data = p;
1039: PetscPartitionerInitialize_Gather(part);
1040: return(0);
1041: }
1046: PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1047: {
1048: PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1049: PetscErrorCode ierr;
1052: PetscFree(p);
1053: return(0);
1054: }
1058: PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1059: {
1060: PetscViewerFormat format;
1061: PetscErrorCode ierr;
1064: PetscViewerGetFormat(viewer, &format);
1065: PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");
1066: return(0);
1067: }
1071: PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1072: {
1073: PetscBool iascii;
1079: PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
1080: if (iascii) {PetscPartitionerView_Chaco_Ascii(part, viewer);}
1081: return(0);
1082: }
1084: #if defined(PETSC_HAVE_CHACO)
1085: #if defined(PETSC_HAVE_UNISTD_H)
1086: #include <unistd.h>
1087: #endif
1088: /* Chaco does not have an include file */
1089: PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1090: float *ewgts, float *x, float *y, float *z, char *outassignname,
1091: char *outfilename, short *assignment, int architecture, int ndims_tot,
1092: int mesh_dims[3], double *goal, int global_method, int local_method,
1093: int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1095: extern int FREE_GRAPH;
1096: #endif
1100: PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1101: {
1102: #if defined(PETSC_HAVE_CHACO)
1103: enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1104: MPI_Comm comm;
1105: int nvtxs = numVertices; /* number of vertices in full graph */
1106: int *vwgts = NULL; /* weights for all vertices */
1107: float *ewgts = NULL; /* weights for all edges */
1108: float *x = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1109: char *outassignname = NULL; /* name of assignment output file */
1110: char *outfilename = NULL; /* output file name */
1111: int architecture = 1; /* 0 => hypercube, d => d-dimensional mesh */
1112: int ndims_tot = 0; /* total number of cube dimensions to divide */
1113: int mesh_dims[3]; /* dimensions of mesh of processors */
1114: double *goal = NULL; /* desired set sizes for each set */
1115: int global_method = 1; /* global partitioning algorithm */
1116: int local_method = 1; /* local partitioning algorithm */
1117: int rqi_flag = 0; /* should I use RQI/Symmlq eigensolver? */
1118: int vmax = 200; /* how many vertices to coarsen down to? */
1119: int ndims = 1; /* number of eigenvectors (2^d sets) */
1120: double eigtol = 0.001; /* tolerance on eigenvectors */
1121: long seed = 123636512; /* for random graph mutations */
1122: short int *assignment; /* Output partition */
1123: int fd_stdout, fd_pipe[2];
1124: PetscInt *points;
1125: int i, v, p;
1129: PetscObjectGetComm((PetscObject)dm,&comm);
1130: if (!numVertices) {
1131: PetscSectionSetChart(partSection, 0, nparts);
1132: PetscSectionSetUp(partSection);
1133: ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);
1134: return(0);
1135: }
1136: FREE_GRAPH = 0; /* Do not let Chaco free my memory */
1137: for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1139: if (global_method == INERTIAL_METHOD) {
1140: /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1141: SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1142: }
1143: mesh_dims[0] = nparts;
1144: mesh_dims[1] = 1;
1145: mesh_dims[2] = 1;
1146: PetscMalloc1(nvtxs, &assignment);
1147: /* Chaco outputs to stdout. We redirect this to a buffer. */
1148: /* TODO: check error codes for UNIX calls */
1149: #if defined(PETSC_HAVE_UNISTD_H)
1150: {
1151: int piperet;
1152: piperet = pipe(fd_pipe);
1153: if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1154: fd_stdout = dup(1);
1155: close(1);
1156: dup2(fd_pipe[1], 1);
1157: }
1158: #endif
1159: interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1160: assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1161: vmax, ndims, eigtol, seed);
1162: #if defined(PETSC_HAVE_UNISTD_H)
1163: {
1164: char msgLog[10000];
1165: int count;
1167: fflush(stdout);
1168: count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1169: if (count < 0) count = 0;
1170: msgLog[count] = 0;
1171: close(1);
1172: dup2(fd_stdout, 1);
1173: close(fd_stdout);
1174: close(fd_pipe[0]);
1175: close(fd_pipe[1]);
1176: if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1177: }
1178: #endif
1179: /* Convert to PetscSection+IS */
1180: PetscSectionSetChart(partSection, 0, nparts);
1181: for (v = 0; v < nvtxs; ++v) {
1182: PetscSectionAddDof(partSection, assignment[v], 1);
1183: }
1184: PetscSectionSetUp(partSection);
1185: PetscMalloc1(nvtxs, &points);
1186: for (p = 0, i = 0; p < nparts; ++p) {
1187: for (v = 0; v < nvtxs; ++v) {
1188: if (assignment[v] == p) points[i++] = v;
1189: }
1190: }
1191: if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1192: ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);
1193: if (global_method == INERTIAL_METHOD) {
1194: /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1195: }
1196: PetscFree(assignment);
1197: for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1198: return(0);
1199: #else
1200: SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1201: #endif
1202: }
1206: PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1207: {
1209: part->ops->view = PetscPartitionerView_Chaco;
1210: part->ops->destroy = PetscPartitionerDestroy_Chaco;
1211: part->ops->partition = PetscPartitionerPartition_Chaco;
1212: return(0);
1213: }
1215: /*MC
1216: PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1218: Level: intermediate
1220: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1221: M*/
1225: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1226: {
1227: PetscPartitioner_Chaco *p;
1228: PetscErrorCode ierr;
1232: PetscNewLog(part, &p);
1233: part->data = p;
1235: PetscPartitionerInitialize_Chaco(part);
1236: PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);
1237: return(0);
1238: }
1242: PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1243: {
1244: PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1245: PetscErrorCode ierr;
1248: PetscFree(p);
1249: return(0);
1250: }
1254: PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1255: {
1256: PetscViewerFormat format;
1257: PetscErrorCode ierr;
1260: PetscViewerGetFormat(viewer, &format);
1261: PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");
1262: return(0);
1263: }
1267: PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1268: {
1269: PetscBool iascii;
1275: PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
1276: if (iascii) {PetscPartitionerView_ParMetis_Ascii(part, viewer);}
1277: return(0);
1278: }
1280: #if defined(PETSC_HAVE_PARMETIS)
1281: #include <parmetis.h>
1282: #endif
1286: PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1287: {
1288: #if defined(PETSC_HAVE_PARMETIS)
1289: MPI_Comm comm;
1290: PetscInt nvtxs = numVertices; /* The number of vertices in full graph */
1291: PetscInt *vtxdist; /* Distribution of vertices across processes */
1292: PetscInt *xadj = start; /* Start of edge list for each vertex */
1293: PetscInt *adjncy = adjacency; /* Edge lists for all vertices */
1294: PetscInt *vwgt = NULL; /* Vertex weights */
1295: PetscInt *adjwgt = NULL; /* Edge weights */
1296: PetscInt wgtflag = 0; /* Indicates which weights are present */
1297: PetscInt numflag = 0; /* Indicates initial offset (0 or 1) */
1298: PetscInt ncon = 1; /* The number of weights per vertex */
1299: PetscReal *tpwgts; /* The fraction of vertex weights assigned to each partition */
1300: PetscReal *ubvec; /* The balance intolerance for vertex weights */
1301: PetscInt options[5]; /* Options */
1302: /* Outputs */
1303: PetscInt edgeCut; /* The number of edges cut by the partition */
1304: PetscInt *assignment, *points;
1305: PetscMPIInt rank, p, v, i;
1309: PetscObjectGetComm((PetscObject) part, &comm);
1310: MPI_Comm_rank(comm, &rank);
1311: options[0] = 0; /* Use all defaults */
1312: /* Calculate vertex distribution */
1313: PetscMalloc4(nparts+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment);
1314: vtxdist[0] = 0;
1315: MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);
1316: for (p = 2; p <= nparts; ++p) {
1317: vtxdist[p] += vtxdist[p-1];
1318: }
1319: /* Calculate weights */
1320: for (p = 0; p < nparts; ++p) {
1321: tpwgts[p] = 1.0/nparts;
1322: }
1323: ubvec[0] = 1.05;
1325: if (nparts == 1) {
1326: PetscMemzero(assignment, nvtxs * sizeof(PetscInt));
1327: } else {
1328: if (vtxdist[1] == vtxdist[nparts]) {
1329: if (!rank) {
1330: PetscStackPush("METIS_PartGraphKway");
1331: METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, NULL, &edgeCut, assignment);
1332: PetscStackPop;
1333: if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1334: }
1335: } else {
1336: PetscStackPush("ParMETIS_V3_PartKway");
1337: ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1338: PetscStackPop;
1339: if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
1340: }
1341: }
1342: /* Convert to PetscSection+IS */
1343: PetscSectionSetChart(partSection, 0, nparts);
1344: for (v = 0; v < nvtxs; ++v) {PetscSectionAddDof(partSection, assignment[v], 1);}
1345: PetscSectionSetUp(partSection);
1346: PetscMalloc1(nvtxs, &points);
1347: for (p = 0, i = 0; p < nparts; ++p) {
1348: for (v = 0; v < nvtxs; ++v) {
1349: if (assignment[v] == p) points[i++] = v;
1350: }
1351: }
1352: if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1353: ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);
1354: PetscFree4(vtxdist,tpwgts,ubvec,assignment);
1355: return(0);
1356: #else
1357: SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1358: #endif
1359: }
1363: PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1364: {
1366: part->ops->view = PetscPartitionerView_ParMetis;
1367: part->ops->destroy = PetscPartitionerDestroy_ParMetis;
1368: part->ops->partition = PetscPartitionerPartition_ParMetis;
1369: return(0);
1370: }
1372: /*MC
1373: PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1375: Level: intermediate
1377: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1378: M*/
1382: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1383: {
1384: PetscPartitioner_ParMetis *p;
1385: PetscErrorCode ierr;
1389: PetscNewLog(part, &p);
1390: part->data = p;
1392: PetscPartitionerInitialize_ParMetis(part);
1393: PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);
1394: return(0);
1395: }
1399: /*@
1400: DMPlexGetPartitioner - Get the mesh partitioner
1402: Not collective
1404: Input Parameter:
1405: . dm - The DM
1407: Output Parameter:
1408: . part - The PetscPartitioner
1410: Level: developer
1412: Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1414: .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1415: @*/
1416: PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1417: {
1418: DM_Plex *mesh = (DM_Plex *) dm->data;
1423: *part = mesh->partitioner;
1424: return(0);
1425: }
1429: /*@
1430: DMPlexSetPartitioner - Set the mesh partitioner
1432: logically collective on dm and part
1434: Input Parameters:
1435: + dm - The DM
1436: - part - The partitioner
1438: Level: developer
1440: Note: Any existing PetscPartitioner will be destroyed.
1442: .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1443: @*/
1444: PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1445: {
1446: DM_Plex *mesh = (DM_Plex *) dm->data;
1452: PetscObjectReference((PetscObject)part);
1453: PetscPartitionerDestroy(&mesh->partitioner);
1454: mesh->partitioner = part;
1455: return(0);
1456: }
1460: static PetscErrorCode DMPlexPartitionLabelClosure_Tree(DM dm, DMLabel label, PetscInt rank, PetscInt point, PetscBool up, PetscBool down)
1461: {
1465: if (up) {
1466: PetscInt parent;
1468: DMPlexGetTreeParent(dm,point,&parent,NULL);
1469: if (parent != point) {
1470: PetscInt closureSize, *closure = NULL, i;
1472: DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);
1473: for (i = 0; i < closureSize; i++) {
1474: PetscInt cpoint = closure[2*i];
1476: DMLabelSetValue(label,cpoint,rank);
1477: DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_TRUE,PETSC_FALSE);
1478: }
1479: DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);
1480: }
1481: }
1482: if (down) {
1483: PetscInt numChildren;
1484: const PetscInt *children;
1486: DMPlexGetTreeChildren(dm,point,&numChildren,&children);
1487: if (numChildren) {
1488: PetscInt i;
1490: for (i = 0; i < numChildren; i++) {
1491: PetscInt cpoint = children[i];
1493: DMLabelSetValue(label,cpoint,rank);
1494: DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_FALSE,PETSC_TRUE);
1495: }
1496: }
1497: }
1498: return(0);
1499: }
1503: /*@
1504: DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1506: Input Parameters:
1507: + dm - The DM
1508: - label - DMLabel assinging ranks to remote roots
1510: Level: developer
1512: .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1513: @*/
1514: PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1515: {
1516: IS rankIS, pointIS;
1517: const PetscInt *ranks, *points;
1518: PetscInt numRanks, numPoints, r, p, c, closureSize;
1519: PetscInt *closure = NULL;
1520: PetscErrorCode ierr;
1523: DMLabelGetValueIS(label, &rankIS);
1524: ISGetLocalSize(rankIS, &numRanks);
1525: ISGetIndices(rankIS, &ranks);
1526: for (r = 0; r < numRanks; ++r) {
1527: const PetscInt rank = ranks[r];
1529: DMLabelGetStratumIS(label, rank, &pointIS);
1530: ISGetLocalSize(pointIS, &numPoints);
1531: ISGetIndices(pointIS, &points);
1532: for (p = 0; p < numPoints; ++p) {
1533: DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);
1534: for (c = 0; c < closureSize*2; c += 2) {
1535: DMLabelSetValue(label, closure[c], rank);
1536: DMPlexPartitionLabelClosure_Tree(dm,label,rank,closure[c],PETSC_TRUE,PETSC_TRUE);
1537: }
1538: }
1539: ISRestoreIndices(pointIS, &points);
1540: ISDestroy(&pointIS);
1541: }
1542: if (closure) {DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, &closureSize, &closure);}
1543: ISRestoreIndices(rankIS, &ranks);
1544: ISDestroy(&rankIS);
1545: return(0);
1546: }
1550: /*@
1551: DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
1553: Input Parameters:
1554: + dm - The DM
1555: - label - DMLabel assinging ranks to remote roots
1557: Level: developer
1559: .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1560: @*/
1561: PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
1562: {
1563: IS rankIS, pointIS;
1564: const PetscInt *ranks, *points;
1565: PetscInt numRanks, numPoints, r, p, a, adjSize;
1566: PetscInt *adj = NULL;
1567: PetscErrorCode ierr;
1570: DMLabelGetValueIS(label, &rankIS);
1571: ISGetLocalSize(rankIS, &numRanks);
1572: ISGetIndices(rankIS, &ranks);
1573: for (r = 0; r < numRanks; ++r) {
1574: const PetscInt rank = ranks[r];
1576: DMLabelGetStratumIS(label, rank, &pointIS);
1577: ISGetLocalSize(pointIS, &numPoints);
1578: ISGetIndices(pointIS, &points);
1579: for (p = 0; p < numPoints; ++p) {
1580: adjSize = PETSC_DETERMINE;
1581: DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);
1582: for (a = 0; a < adjSize; ++a) {DMLabelSetValue(label, adj[a], rank);}
1583: }
1584: ISRestoreIndices(pointIS, &points);
1585: ISDestroy(&pointIS);
1586: }
1587: ISRestoreIndices(rankIS, &ranks);
1588: ISDestroy(&rankIS);
1589: PetscFree(adj);
1590: return(0);
1591: }
1595: /*@
1596: DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
1598: Input Parameters:
1599: + dm - The DM
1600: - label - DMLabel assinging ranks to remote roots
1602: Level: developer
1604: Note: This is required when generating multi-level overlaps to capture
1605: overlap points from non-neighbouring partitions.
1607: .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1608: @*/
1609: PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
1610: {
1611: MPI_Comm comm;
1612: PetscMPIInt rank;
1613: PetscSF sfPoint;
1614: DMLabel lblRoots, lblLeaves;
1615: IS rankIS, pointIS;
1616: const PetscInt *ranks;
1617: PetscInt numRanks, r;
1618: PetscErrorCode ierr;
1621: PetscObjectGetComm((PetscObject) dm, &comm);
1622: MPI_Comm_rank(comm, &rank);
1623: DMGetPointSF(dm, &sfPoint);
1624: /* Pull point contributions from remote leaves into local roots */
1625: DMLabelGather(label, sfPoint, &lblLeaves);
1626: DMLabelGetValueIS(lblLeaves, &rankIS);
1627: ISGetLocalSize(rankIS, &numRanks);
1628: ISGetIndices(rankIS, &ranks);
1629: for (r = 0; r < numRanks; ++r) {
1630: const PetscInt remoteRank = ranks[r];
1631: if (remoteRank == rank) continue;
1632: DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);
1633: DMLabelInsertIS(label, pointIS, remoteRank);
1634: ISDestroy(&pointIS);
1635: }
1636: ISRestoreIndices(rankIS, &ranks);
1637: ISDestroy(&rankIS);
1638: DMLabelDestroy(&lblLeaves);
1639: /* Push point contributions from roots into remote leaves */
1640: DMLabelDistribute(label, sfPoint, &lblRoots);
1641: DMLabelGetValueIS(lblRoots, &rankIS);
1642: ISGetLocalSize(rankIS, &numRanks);
1643: ISGetIndices(rankIS, &ranks);
1644: for (r = 0; r < numRanks; ++r) {
1645: const PetscInt remoteRank = ranks[r];
1646: if (remoteRank == rank) continue;
1647: DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);
1648: DMLabelInsertIS(label, pointIS, remoteRank);
1649: ISDestroy(&pointIS);
1650: }
1651: ISRestoreIndices(rankIS, &ranks);
1652: ISDestroy(&rankIS);
1653: DMLabelDestroy(&lblRoots);
1654: return(0);
1655: }
1659: /*@
1660: DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
1662: Input Parameters:
1663: + dm - The DM
1664: . rootLabel - DMLabel assinging ranks to local roots
1665: . processSF - A star forest mapping into the local index on each remote rank
1667: Output Parameter:
1668: - leafLabel - DMLabel assinging ranks to remote roots
1670: Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
1671: resulting leafLabel is a receiver mapping of remote roots to their parent rank.
1673: Level: developer
1675: .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1676: @*/
1677: PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
1678: {
1679: MPI_Comm comm;
1680: PetscMPIInt rank, numProcs;
1681: PetscInt p, n, numNeighbors, size, l, nleaves;
1682: PetscSF sfPoint;
1683: PetscSFNode *rootPoints, *leafPoints;
1684: PetscSection rootSection, leafSection;
1685: const PetscSFNode *remote;
1686: const PetscInt *local, *neighbors;
1687: IS valueIS;
1688: PetscErrorCode ierr;
1691: PetscObjectGetComm((PetscObject) dm, &comm);
1692: MPI_Comm_rank(comm, &rank);
1693: MPI_Comm_size(comm, &numProcs);
1694: DMGetPointSF(dm, &sfPoint);
1696: /* Convert to (point, rank) and use actual owners */
1697: PetscSectionCreate(comm, &rootSection);
1698: PetscSectionSetChart(rootSection, 0, numProcs);
1699: DMLabelGetValueIS(rootLabel, &valueIS);
1700: ISGetLocalSize(valueIS, &numNeighbors);
1701: ISGetIndices(valueIS, &neighbors);
1702: for (n = 0; n < numNeighbors; ++n) {
1703: PetscInt numPoints;
1705: DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);
1706: PetscSectionAddDof(rootSection, neighbors[n], numPoints);
1707: }
1708: PetscSectionSetUp(rootSection);
1709: PetscSectionGetStorageSize(rootSection, &size);
1710: PetscMalloc1(size, &rootPoints);
1711: PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);
1712: for (n = 0; n < numNeighbors; ++n) {
1713: IS pointIS;
1714: const PetscInt *points;
1715: PetscInt off, numPoints, p;
1717: PetscSectionGetOffset(rootSection, neighbors[n], &off);
1718: DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);
1719: ISGetLocalSize(pointIS, &numPoints);
1720: ISGetIndices(pointIS, &points);
1721: for (p = 0; p < numPoints; ++p) {
1722: if (local) {PetscFindInt(points[p], nleaves, local, &l);}
1723: else {l = -1;}
1724: if (l >= 0) {rootPoints[off+p] = remote[l];}
1725: else {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
1726: }
1727: ISRestoreIndices(pointIS, &points);
1728: ISDestroy(&pointIS);
1729: }
1730: ISRestoreIndices(valueIS, &neighbors);
1731: ISDestroy(&valueIS);
1732: /* Communicate overlap */
1733: PetscSectionCreate(comm, &leafSection);
1734: DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);
1735: /* Filter remote contributions (ovLeafPoints) into the overlapSF */
1736: PetscSectionGetStorageSize(leafSection, &size);
1737: for (p = 0; p < size; p++) {
1738: DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);
1739: }
1740: PetscFree(rootPoints);
1741: PetscSectionDestroy(&rootSection);
1742: PetscFree(leafPoints);
1743: PetscSectionDestroy(&leafSection);
1744: return(0);
1745: }
1749: /*@
1750: DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
1752: Input Parameters:
1753: + dm - The DM
1754: . label - DMLabel assinging ranks to remote roots
1756: Output Parameter:
1757: - sf - The star forest communication context encapsulating the defined mapping
1759: Note: The incoming label is a receiver mapping of remote points to their parent rank.
1761: Level: developer
1763: .seealso: DMPlexDistribute(), DMPlexCreateOverlap
1764: @*/
1765: PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
1766: {
1767: PetscMPIInt rank, numProcs;
1768: PetscInt n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
1769: PetscSFNode *remotePoints;
1770: IS remoteRootIS;
1771: const PetscInt *remoteRoots;
1775: MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);
1776: MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);
1778: for (numRemote = 0, n = 0; n < numProcs; ++n) {
1779: DMLabelGetStratumSize(label, n, &numPoints);
1780: numRemote += numPoints;
1781: }
1782: PetscMalloc1(numRemote, &remotePoints);
1783: /* Put owned points first */
1784: DMLabelGetStratumSize(label, rank, &numPoints);
1785: if (numPoints > 0) {
1786: DMLabelGetStratumIS(label, rank, &remoteRootIS);
1787: ISGetIndices(remoteRootIS, &remoteRoots);
1788: for (p = 0; p < numPoints; p++) {
1789: remotePoints[idx].index = remoteRoots[p];
1790: remotePoints[idx].rank = rank;
1791: idx++;
1792: }
1793: ISRestoreIndices(remoteRootIS, &remoteRoots);
1794: ISDestroy(&remoteRootIS);
1795: }
1796: /* Now add remote points */
1797: for (n = 0; n < numProcs; ++n) {
1798: DMLabelGetStratumSize(label, n, &numPoints);
1799: if (numPoints <= 0 || n == rank) continue;
1800: DMLabelGetStratumIS(label, n, &remoteRootIS);
1801: ISGetIndices(remoteRootIS, &remoteRoots);
1802: for (p = 0; p < numPoints; p++) {
1803: remotePoints[idx].index = remoteRoots[p];
1804: remotePoints[idx].rank = n;
1805: idx++;
1806: }
1807: ISRestoreIndices(remoteRootIS, &remoteRoots);
1808: ISDestroy(&remoteRootIS);
1809: }
1810: PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);
1811: DMPlexGetChart(dm, &pStart, &pEnd);
1812: PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);
1813: return(0);
1814: }