Actual source code: partsimple.c

  1: #include <petscvec.h>
  2: #include <petsc/private/partitionerimpl.h>

  4: typedef struct {
  5:   PetscBool useGrid;        /* Flag to use a grid layout */
  6:   PetscInt  gridDim;        /* The grid dimension */
  7:   PetscInt  nodeGrid[3];    /* Dimension of node grid */
  8:   PetscInt  processGrid[3]; /* Dimension of local process grid on each node */
  9: } PetscPartitioner_Simple;

 11: static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
 12: {
 13:   PetscFree(part->data);
 14:   return 0;
 15: }

 17: static PetscErrorCode PetscPartitionerView_Simple_ASCII(PetscPartitioner part, PetscViewer viewer)
 18: {
 19:   return 0;
 20: }

 22: static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
 23: {
 24:   PetscBool      iascii;

 28:   PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
 29:   if (iascii) PetscPartitionerView_Simple_ASCII(part, viewer);
 30:   return 0;
 31: }

 33: static PetscErrorCode PetscPartitionerSetFromOptions_Simple(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
 34: {
 35:   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
 36:   PetscInt                 num, i;
 37:   PetscBool                flg;

 39:   for (i = 0; i < 3; ++i) p->processGrid[i] = p->nodeGrid[i] = 1;
 40:   PetscOptionsHead(PetscOptionsObject, "PetscPartitioner Simple Options");
 41:   num  = 3;
 42:   PetscOptionsIntArray("-petscpartitioner_simple_node_grid", "Number of nodes in each dimension", "", p->nodeGrid, &num, &flg);
 43:   if (flg) {p->useGrid = PETSC_TRUE; p->gridDim = num;}
 44:   num  = 3;
 45:   PetscOptionsIntArray("-petscpartitioner_simple_process_grid", "Number of local processes in each dimension for a given node", "", p->processGrid, &num, &flg);
 46:   if (flg) {
 47:     p->useGrid = PETSC_TRUE;
 48:     if (p->gridDim < 0) p->gridDim = num;
 50:   }
 51:   PetscOptionsTail();
 52:   return 0;
 53: }

 55: static PetscErrorCode PetscPartitionerPartition_Simple_Grid(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
 56: {
 57:   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
 58:   const PetscInt          *nodes = p->nodeGrid;
 59:   const PetscInt          *procs = p->processGrid;
 60:   PetscInt                *cellproc, *offsets, cells[3] = {1, 1, 1}, pcells[3] = {1, 1, 1};
 61:   PetscInt                 Np    = 1, Nr, np, nk, nj, ni, pk, pj, pi, ck, cj, ci, i;
 62:   MPI_Comm                 comm;
 63:   PetscMPIInt              size;

 65:   if (vertSection)   PetscInfo(part, "PETSCPARTITIONERSIMPLE ignores vertex weights when using grid partition\n");
 66:   if (targetSection) PetscInfo(part, "PETSCPARTITIONERSIMPLE ignores partition weights when using grid partition\n");
 67:   PetscObjectGetComm((PetscObject) part, &comm);
 68:   MPI_Comm_size(comm, &size);
 69:   /* Check grid */
 70:   for (i = 0; i < 3; ++i) Np *= nodes[i]*procs[i];
 74:   for (i = 0; i < p->gridDim; ++i) cells[i] = nodes[i]*procs[i];
 75:   Nr = numVertices / nparts;
 76:   while (Nr > 1) {
 77:     for (i = 0; i < p->gridDim; ++i) {
 78:       cells[i] *= 2;
 79:       Nr       /= 2;
 80:     }
 81:   }
 83:   for (i = 0; i < p->gridDim; ++i) {
 85:     pcells[i] = cells[i] / (nodes[i]*procs[i]);
 86:   }
 87:   /* Compute sizes */
 88:   for (np = 0; np < nparts; ++np) PetscSectionSetDof(partSection, np, numVertices/nparts);
 89:   PetscSectionSetUp(partSection);
 90:   PetscCalloc1(nparts, &offsets);
 91:   for (np = 0; np < nparts; ++np) PetscSectionGetOffset(partSection, np, &offsets[np]);
 92:   if (!numVertices) pcells[0] = pcells[1] = pcells[2] = 0;
 93:   /* Compute partition */
 94:   PetscMalloc1(numVertices, &cellproc);
 95:   for (nk = 0; nk < nodes[2]; ++nk) {
 96:     for (nj = 0; nj < nodes[1]; ++nj) {
 97:       for (ni = 0; ni < nodes[0]; ++ni) {
 98:         const PetscInt nid = (nk*nodes[1] + nj)*nodes[0] + ni;

100:         for (pk = 0; pk < procs[2]; ++pk) {
101:           for (pj = 0; pj < procs[1]; ++pj) {
102:             for (pi = 0; pi < procs[0]; ++pi) {
103:               const PetscInt pid = ((nid*procs[2] + pk)*procs[1] + pj)*procs[0] + pi;

105:               /* Assume that cells are originally numbered lexicographically */
106:               for (ck = 0; ck < pcells[2]; ++ck) {
107:                 for (cj = 0; cj < pcells[1]; ++cj) {
108:                   for (ci = 0; ci < pcells[0]; ++ci) {
109:                     const PetscInt cid = (((nk*procs[2] + pk)*pcells[2] + ck)*cells[1] + ((nj*procs[1] + pj)*pcells[1] + cj))*cells[0] + (ni*procs[0] + pi)*pcells[0] + ci;

111:                     cellproc[offsets[pid]++] = cid;
112:                   }
113:                 }
114:               }
115:             }
116:           }
117:         }
118:       }
119:     }
120:   }
122:   PetscFree(offsets);
123:   ISCreateGeneral(PETSC_COMM_SELF, numVertices, cellproc, PETSC_OWN_POINTER, partition);
124:   return 0;
125: }

127: static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
128: {
129:   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
130:   MPI_Comm       comm;
131:   PetscInt       np, *tpwgts = NULL, sumw = 0, numVerticesGlobal  = 0;
132:   PetscMPIInt    size;

134:   if (p->useGrid) {
135:     PetscPartitionerPartition_Simple_Grid(part, nparts, numVertices, start, adjacency, vertSection, targetSection, partSection, partition);
136:     return 0;
137:   }
138:   if (vertSection) PetscInfo(part,"PETSCPARTITIONERSIMPLE ignores vertex weights\n");
139:   PetscObjectGetComm((PetscObject) part, &comm);
140:   MPI_Comm_size(comm, &size);
141:   if (targetSection) {
142:     MPIU_Allreduce(&numVertices, &numVerticesGlobal, 1, MPIU_INT, MPI_SUM, comm);
143:     PetscCalloc1(nparts,&tpwgts);
144:     for (np = 0; np < nparts; ++np) {
145:       PetscSectionGetDof(targetSection,np,&tpwgts[np]);
146:       sumw += tpwgts[np];
147:     }
148:     if (sumw) {
149:       PetscInt m,mp;
150:       for (np = 0; np < nparts; ++np) tpwgts[np] = (tpwgts[np]*numVerticesGlobal)/sumw;
151:       for (np = 0, m = -1, mp = 0, sumw = 0; np < nparts; ++np) {
152:         if (m < tpwgts[np]) { m = tpwgts[np]; mp = np; }
153:         sumw += tpwgts[np];
154:       }
155:       if (sumw != numVerticesGlobal) tpwgts[mp] += numVerticesGlobal - sumw;
156:     }
157:     if (!sumw) PetscFree(tpwgts);
158:   }

160:   ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);
161:   if (size == 1) {
162:     if (tpwgts) {
163:       for (np = 0; np < nparts; ++np) {
164:         PetscSectionSetDof(partSection, np, tpwgts[np]);
165:       }
166:     } else {
167:       for (np = 0; np < nparts; ++np) {
168:         PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));
169:       }
170:     }
171:   } else {
172:     if (tpwgts) {
173:       Vec         v;
174:       PetscScalar *array;
175:       PetscInt    st,j;
176:       PetscMPIInt rank;

178:       VecCreate(comm,&v);
179:       VecSetSizes(v,numVertices,numVerticesGlobal);
180:       VecSetType(v,VECSTANDARD);
181:       MPI_Comm_rank(comm,&rank);
182:       for (np = 0,st = 0; np < nparts; ++np) {
183:         if (rank == np || (rank == size-1 && size < nparts && np >= size)) {
184:           for (j = 0; j < tpwgts[np]; j++) {
185:             VecSetValue(v,st+j,np,INSERT_VALUES);
186:           }
187:         }
188:         st += tpwgts[np];
189:       }
190:       VecAssemblyBegin(v);
191:       VecAssemblyEnd(v);
192:       VecGetArray(v,&array);
193:       for (j = 0; j < numVertices; ++j) {
194:         PetscSectionAddDof(partSection,PetscRealPart(array[j]),1);
195:       }
196:       VecRestoreArray(v,&array);
197:       VecDestroy(&v);
198:     } else {
199:       PetscMPIInt rank;
200:       PetscInt nvGlobal, *offsets, myFirst, myLast;

202:       PetscMalloc1(size+1,&offsets);
203:       offsets[0] = 0;
204:       MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);
205:       for (np = 2; np <= size; np++) {
206:         offsets[np] += offsets[np-1];
207:       }
208:       nvGlobal = offsets[size];
209:       MPI_Comm_rank(comm,&rank);
210:       myFirst = offsets[rank];
211:       myLast  = offsets[rank + 1] - 1;
212:       PetscFree(offsets);
213:       if (numVertices) {
214:         PetscInt firstPart = 0, firstLargePart = 0;
215:         PetscInt lastPart = 0, lastLargePart = 0;
216:         PetscInt rem = nvGlobal % nparts;
217:         PetscInt pSmall = nvGlobal/nparts;
218:         PetscInt pBig = nvGlobal/nparts + 1;

220:         if (rem) {
221:           firstLargePart = myFirst / pBig;
222:           lastLargePart  = myLast  / pBig;

224:           if (firstLargePart < rem) {
225:             firstPart = firstLargePart;
226:           } else {
227:             firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
228:           }
229:           if (lastLargePart < rem) {
230:             lastPart = lastLargePart;
231:           } else {
232:             lastPart = rem + (myLast - (rem * pBig)) / pSmall;
233:           }
234:         } else {
235:           firstPart = myFirst / (nvGlobal/nparts);
236:           lastPart  = myLast  / (nvGlobal/nparts);
237:         }

239:         for (np = firstPart; np <= lastPart; np++) {
240:           PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
241:           PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);

243:           PartStart = PetscMax(PartStart,myFirst);
244:           PartEnd   = PetscMin(PartEnd,myLast+1);
245:           PetscSectionSetDof(partSection,np,PartEnd-PartStart);
246:         }
247:       }
248:     }
249:   }
250:   PetscFree(tpwgts);
251:   return 0;
252: }

254: static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
255: {
256:   part->noGraph             = PETSC_TRUE;
257:   part->ops->view           = PetscPartitionerView_Simple;
258:   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Simple;
259:   part->ops->destroy        = PetscPartitionerDestroy_Simple;
260:   part->ops->partition      = PetscPartitionerPartition_Simple;
261:   return 0;
262: }

264: /*MC
265:   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object

267:   Level: intermediate

269: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
270: M*/

272: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
273: {
274:   PetscPartitioner_Simple *p;

277:   PetscNewLog(part, &p);
278:   p->gridDim = -1;
279:   part->data = p;

281:   PetscPartitionerInitialize_Simple(part);
282:   return 0;
283: }