Actual source code: ex1.c

  1: static char help[] = "Tests various DMPlex routines to construct, refine and distribute a mesh.\n\n";

  3: #include <petscdmplex.h>
  4: #include <petscdmplextransform.h>
  5: #include <petscsf.h>

  7: enum {
  8:   STAGE_LOAD,
  9:   STAGE_DISTRIBUTE,
 10:   STAGE_REFINE,
 11:   STAGE_OVERLAP
 12: };

 14: typedef struct {
 15:   PetscLogEvent createMeshEvent;
 16:   PetscLogStage stages[4];
 17:   /* Domain and mesh definition */
 18:   PetscInt  dim;     /* The topological mesh dimension */
 19:   PetscInt  overlap; /* The cell overlap to use during partitioning */
 20:   PetscBool testp4est[2];
 21:   PetscBool redistribute;
 22:   PetscBool final_ref;         /* Run refinement at the end */
 23:   PetscBool final_diagnostics; /* Run diagnostics on the final mesh */
 24: } AppCtx;

 26: PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
 27: {
 28:   PetscFunctionBegin;
 29:   options->dim               = 2;
 30:   options->overlap           = 0;
 31:   options->testp4est[0]      = PETSC_FALSE;
 32:   options->testp4est[1]      = PETSC_FALSE;
 33:   options->redistribute      = PETSC_FALSE;
 34:   options->final_ref         = PETSC_FALSE;
 35:   options->final_diagnostics = PETSC_TRUE;

 37:   PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");
 38:   PetscCall(PetscOptionsRangeInt("-dim", "The topological mesh dimension", "ex1.c", options->dim, &options->dim, NULL, 1, 3));
 39:   PetscCall(PetscOptionsBoundedInt("-overlap", "The cell overlap for partitioning", "ex1.c", options->overlap, &options->overlap, NULL, 0));
 40:   PetscCall(PetscOptionsBool("-test_p4est_seq", "Test p4est with sequential base DM", "ex1.c", options->testp4est[0], &options->testp4est[0], NULL));
 41:   PetscCall(PetscOptionsBool("-test_p4est_par", "Test p4est with parallel base DM", "ex1.c", options->testp4est[1], &options->testp4est[1], NULL));
 42:   PetscCall(PetscOptionsBool("-test_redistribute", "Test redistribution", "ex1.c", options->redistribute, &options->redistribute, NULL));
 43:   PetscCall(PetscOptionsBool("-final_ref", "Run uniform refinement on the final mesh", "ex1.c", options->final_ref, &options->final_ref, NULL));
 44:   PetscCall(PetscOptionsBool("-final_diagnostics", "Run diagnostics on the final mesh", "ex1.c", options->final_diagnostics, &options->final_diagnostics, NULL));
 45:   PetscOptionsEnd();

 47:   PetscCall(PetscLogEventRegister("CreateMesh", DM_CLASSID, &options->createMeshEvent));
 48:   PetscCall(PetscLogStageRegister("MeshLoad", &options->stages[STAGE_LOAD]));
 49:   PetscCall(PetscLogStageRegister("MeshDistribute", &options->stages[STAGE_DISTRIBUTE]));
 50:   PetscCall(PetscLogStageRegister("MeshRefine", &options->stages[STAGE_REFINE]));
 51:   PetscCall(PetscLogStageRegister("MeshOverlap", &options->stages[STAGE_OVERLAP]));
 52:   PetscFunctionReturn(PETSC_SUCCESS);
 53: }

 55: PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
 56: {
 57:   PetscInt    dim           = user->dim;
 58:   PetscBool   testp4est_seq = user->testp4est[0];
 59:   PetscBool   testp4est_par = user->testp4est[1];
 60:   PetscMPIInt rank, size;

 62:   PetscFunctionBegin;
 63:   PetscCall(PetscLogEventBegin(user->createMeshEvent, 0, 0, 0, 0));
 64:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
 65:   PetscCallMPI(MPI_Comm_size(comm, &size));
 66:   PetscCall(PetscLogStagePush(user->stages[STAGE_LOAD]));
 67:   PetscCall(DMCreate(comm, dm));
 68:   PetscCall(DMSetType(*dm, DMPLEX));
 69:   PetscCall(DMPlexDistributeSetDefault(*dm, PETSC_FALSE));
 70:   PetscCall(DMSetFromOptions(*dm));
 71:   PetscCall(DMLocalizeCoordinates(*dm));

 73:   PetscCall(DMViewFromOptions(*dm, NULL, "-init_dm_view"));
 74:   PetscCall(DMGetDimension(*dm, &dim));

 76:   if (testp4est_seq) {
 77:     PetscCheck(PetscDefined(HAVE_P4EST), PETSC_COMM_WORLD, PETSC_ERR_SUP, "Reconfigure PETSc with --download-p4est");
 78:     DM dmConv = NULL;

 80:     PetscCall(DMPlexCheck(*dm));
 81:     PetscCall(DMPlexSetRefinementUniform(*dm, PETSC_TRUE));
 82:     PetscCall(DMPlexSetTransformType(*dm, DMPLEXREFINETOBOX));
 83:     PetscCall(DMRefine(*dm, PETSC_COMM_WORLD, &dmConv));
 84:     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)*dm, NULL));
 85:     if (dmConv) {
 86:       PetscCall(DMDestroy(dm));
 87:       *dm = dmConv;
 88:     }
 89:     PetscCall(DMViewFromOptions(*dm, NULL, "-initref_dm_view"));
 90:     PetscCall(DMPlexCheck(*dm));

 92:     /* For topologically periodic meshes, we first localize coordinates,
 93:        and then remove any information related with the
 94:        automatic computation of localized vertices.
 95:        This way, refinement operations and conversions to p4est
 96:        will preserve the shape of the domain in physical space */
 97:     PetscCall(DMSetPeriodicity(*dm, NULL, NULL, NULL));

 99:     PetscCall(DMConvert(*dm, dim == 2 ? DMP4EST : DMP8EST, &dmConv));
100:     if (dmConv) {
101:       PetscCall(PetscObjectSetOptionsPrefix((PetscObject)dmConv, "conv_seq_1_"));
102:       PetscCall(DMSetFromOptions(dmConv));
103:       PetscCall(DMDestroy(dm));
104:       *dm = dmConv;
105:     }
106:     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)*dm, "conv_seq_1_"));
107:     PetscCall(DMSetUp(*dm));
108:     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
109:     PetscCall(DMConvert(*dm, DMPLEX, &dmConv));
110:     if (dmConv) {
111:       PetscCall(PetscObjectSetOptionsPrefix((PetscObject)dmConv, "conv_seq_2_"));
112:       PetscCall(DMPlexDistributeSetDefault(dmConv, PETSC_FALSE));
113:       PetscCall(DMSetFromOptions(dmConv));
114:       PetscCall(DMDestroy(dm));
115:       *dm = dmConv;
116:     }
117:     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)*dm, "conv_seq_2_"));
118:     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
119:     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)*dm, NULL));
120:   }

122:   PetscCall(PetscLogStagePop());
123:   if (!testp4est_seq) {
124:     PetscCall(PetscLogStagePush(user->stages[STAGE_DISTRIBUTE]));
125:     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_pre_dist_view"));
126:     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)*dm, "dist_"));
127:     PetscCall(DMSetFromOptions(*dm));
128:     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)*dm, NULL));
129:     PetscCall(PetscLogStagePop());
130:     PetscCall(DMViewFromOptions(*dm, NULL, "-distributed_dm_view"));
131:   }
132:   PetscCall(PetscLogStagePush(user->stages[STAGE_REFINE]));
133:   PetscCall(PetscObjectSetOptionsPrefix((PetscObject)*dm, "ref_"));
134:   PetscCall(DMSetFromOptions(*dm));
135:   PetscCall(PetscObjectSetOptionsPrefix((PetscObject)*dm, NULL));
136:   PetscCall(PetscLogStagePop());

138:   if (testp4est_par) {
139:     PetscCheck(PetscDefined(HAVE_P4EST), PETSC_COMM_WORLD, PETSC_ERR_SUP, "Reconfigure PETSc with --download-p4est");
140:     DM dmConv = NULL;

142:     PetscCall(DMPlexCheck(*dm));
143:     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_tobox_view"));
144:     PetscCall(DMPlexSetRefinementUniform(*dm, PETSC_TRUE));
145:     PetscCall(DMPlexSetTransformType(*dm, DMPLEXREFINETOBOX));
146:     PetscCall(DMRefine(*dm, PETSC_COMM_WORLD, &dmConv));
147:     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)*dm, NULL));
148:     if (dmConv) {
149:       PetscCall(DMDestroy(dm));
150:       *dm = dmConv;
151:     }
152:     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_tobox_view"));
153:     PetscCall(DMPlexCheck(*dm));

155:     PetscCall(DMConvert(*dm, dim == 2 ? DMP4EST : DMP8EST, &dmConv));
156:     if (dmConv) {
157:       PetscCall(PetscObjectSetOptionsPrefix((PetscObject)dmConv, "conv_par_1_"));
158:       PetscCall(DMSetFromOptions(dmConv));
159:       PetscCall(DMDestroy(dm));
160:       *dm = dmConv;
161:     }
162:     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)*dm, "conv_par_1_"));
163:     PetscCall(DMSetUp(*dm));
164:     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
165:     PetscCall(DMConvert(*dm, DMPLEX, &dmConv));
166:     if (dmConv) {
167:       PetscCall(PetscObjectSetOptionsPrefix((PetscObject)dmConv, "conv_par_2_"));
168:       PetscCall(DMPlexDistributeSetDefault(dmConv, PETSC_FALSE));
169:       PetscCall(DMSetFromOptions(dmConv));
170:       PetscCall(DMDestroy(dm));
171:       *dm = dmConv;
172:     }
173:     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)*dm, "conv_par_2_"));
174:     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
175:     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)*dm, NULL));
176:   }

178:   /* test redistribution of an already distributed mesh */
179:   if (user->redistribute) {
180:     DM       distributedMesh;
181:     PetscSF  sf;
182:     PetscInt nranks;

184:     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_pre_redist_view"));
185:     PetscCall(DMPlexDistribute(*dm, 0, NULL, &distributedMesh));
186:     if (distributedMesh) {
187:       PetscCall(DMGetPointSF(distributedMesh, &sf));
188:       PetscCall(PetscSFSetUp(sf));
189:       PetscCall(DMGetNeighbors(distributedMesh, &nranks, NULL));
190:       PetscCall(MPIU_Allreduce(MPI_IN_PLACE, &nranks, 1, MPIU_INT, MPI_MIN, PetscObjectComm((PetscObject)*dm)));
191:       PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_(PetscObjectComm((PetscObject)*dm)), "Minimum number of neighbors: %" PetscInt_FMT "\n", nranks));
192:       PetscCall(DMDestroy(dm));
193:       *dm = distributedMesh;
194:     }
195:     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_post_redist_view"));
196:   }

198:   if (user->overlap) {
199:     DM overlapMesh = NULL;

201:     /* Add the overlap to refined mesh */
202:     PetscCall(PetscLogStagePush(user->stages[STAGE_OVERLAP]));
203:     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_pre_overlap_view"));
204:     PetscCall(DMPlexDistributeOverlap(*dm, user->overlap, NULL, &overlapMesh));
205:     if (overlapMesh) {
206:       PetscInt overlap;
207:       PetscCall(DMPlexGetOverlap(overlapMesh, &overlap));
208:       PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "Overlap: %" PetscInt_FMT "\n", overlap));
209:       PetscCall(DMDestroy(dm));
210:       *dm = overlapMesh;
211:     }
212:     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_post_overlap_view"));
213:     PetscCall(PetscLogStagePop());
214:   }
215:   if (user->final_ref) {
216:     DM refinedMesh = NULL;

218:     PetscCall(DMPlexSetRefinementUniform(*dm, PETSC_TRUE));
219:     PetscCall(DMRefine(*dm, comm, &refinedMesh));
220:     if (refinedMesh) {
221:       PetscCall(DMDestroy(dm));
222:       *dm = refinedMesh;
223:     }
224:   }

226:   PetscCall(PetscObjectSetName((PetscObject)*dm, "Generated Mesh"));
227:   PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
228:   if (user->final_diagnostics) PetscCall(DMPlexCheck(*dm));
229:   PetscCall(PetscLogEventEnd(user->createMeshEvent, 0, 0, 0, 0));
230:   PetscFunctionReturn(PETSC_SUCCESS);
231: }

233: int main(int argc, char **argv)
234: {
235:   DM     dm;
236:   AppCtx user;

238:   PetscFunctionBeginUser;
239:   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
240:   PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user));
241:   PetscCall(CreateMesh(PETSC_COMM_WORLD, &user, &dm));
242:   PetscCall(DMDestroy(&dm));
243:   PetscCall(PetscFinalize());
244:   return 0;
245: }

247: /*TEST

249:   # CTetGen 0-1
250:   test:
251:     suffix: 0
252:     requires: ctetgen
253:     args: -dm_coord_space 0 -dm_plex_dim 3 -dim 3 -dm_plex_interpolate 0 -ctetgen_verbose 4 -dm_view ascii::ascii_info_detail -info :~sys
254:   test:
255:     suffix: 1
256:     requires: ctetgen
257:     args: -dm_coord_space 0 -dm_plex_dim 3 -dim 3 -dm_plex_interpolate 0 -ctetgen_verbose 4 -dm_refine_volume_limit_pre 0.0625 -dm_view ascii::ascii_info_detail -info :~sys

259:   # 2D LaTex and ASCII output 2-9
260:   test:
261:     suffix: 2
262:     requires: triangle
263:     args: -dm_plex_interpolate 0 -dm_view ascii::ascii_latex
264:   test:
265:     suffix: 3
266:     requires: triangle
267:     args: -ref_dm_refine 1 -dm_view ascii::ascii_info_detail
268:   test:
269:     suffix: 4
270:     requires: triangle
271:     nsize: 2
272:     args: -dm_coord_space 0 -ref_dm_refine 1 -dist_dm_distribute -petscpartitioner_type simple -dm_view ascii::ascii_info_detail
273:   test:
274:     suffix: 5
275:     requires: triangle
276:     nsize: 2
277:     args: -dm_coord_space 0 -ref_dm_refine 1 -dist_dm_distribute -petscpartitioner_type simple -dm_view ascii::ascii_latex
278:   test:
279:     suffix: 6
280:     args: -dm_coord_space 0 -dm_plex_simplex 0 -dm_view ascii::ascii_info_detail
281:   test:
282:     suffix: 7
283:     args: -dm_coord_space 0 -dm_plex_simplex 0 -ref_dm_refine 1 -dm_view ascii::ascii_info_detail
284:   test:
285:     suffix: 8
286:     nsize: 2
287:     args: -dm_plex_simplex 0 -ref_dm_refine 1 -dist_dm_distribute -petscpartitioner_type simple -dm_view ascii::ascii_latex
288:   test:
289:     suffix: box_2d_latex_xper
290:     nsize: 1
291:     args: -dm_plex_simplex 0 -dm_plex_box_faces 5,5 -dm_plex_box_bd periodic,none \
292:           -dist_dm_distribute -petscpartitioner_type simple -dm_view ascii::ascii_latex -dm_plex_view_edges 0

294:   # 1D ASCII output
295:   testset:
296:     args: -dm_coord_space 0 -dm_plex_dim 1 -dm_view ascii::ascii_info_detail -dm_plex_check_all
297:     test:
298:       suffix: 1d_0
299:       args:
300:     test:
301:       suffix: 1d_1
302:       args: -ref_dm_refine 2
303:     test:
304:       suffix: 1d_2
305:       args: -dm_plex_box_faces 5 -dm_plex_box_bd periodic

307:   # Parallel refinement tests with overlap
308:   test:
309:     suffix: refine_overlap_1d
310:     nsize: 2
311:     args: -dm_plex_dim 1 -dim 1 -dm_plex_box_faces 4 -dm_plex_box_faces 4 -ref_dm_refine 1 -overlap {{0 1 2}separate output} -dist_dm_distribute -petscpartitioner_type simple -dm_view ascii::ascii_info
312:   test:
313:     suffix: refine_overlap_2d
314:     requires: triangle
315:     nsize: {{2 8}separate output}
316:     args: -dm_coord_space 0 -ref_dm_refine 1 -dist_dm_distribute -petscpartitioner_type simple -overlap {{0 1 2}separate output} -dm_view ascii::ascii_info

318:   # Parallel extrusion tests
319:   test:
320:     suffix: 1d_extruded
321:     args: -dm_plex_dim 1 -dm_plex_box_faces 5 -dm_extrude 3 -dm_plex_check_all -dm_view draw

323:   test:
324:     # This test needs a non-tensor prism so we can make a coordinate space
325:     suffix: spheresurface_extruded
326:     nsize : 4
327:     args: -dm_plex_shape sphere -dm_extrude 3 -dm_plex_transform_extrude_use_tensor 0 \
328:           -dist_dm_distribute -petscpartitioner_type simple \
329:           -dm_plex_check_all -dm_view ::ascii_info_detail -dm_plex_view_coord_system spherical

331:   test:
332:     # This test needs a non-tensor prism so we can make a coordinate space
333:     suffix: spheresurface_extruded_symmetric
334:     nsize : 4
335:     args: -dm_plex_shape sphere -dm_extrude 3 -dm_plex_transform_extrude_use_tensor 0 -dm_plex_transform_extrude_symmetric \
336:           -dist_dm_distribute -petscpartitioner_type simple \
337:           -dm_plex_check_all -dm_view ::ascii_info_detail -dm_plex_view_coord_system spherical

339:   test:
340:     # Test with a tensor prism which cannot have a coordinate space
341:     suffix: spheresurface_extruded_nocoord
342:     nsize : 4
343:     args: -dm_coord_space 0 -dm_plex_shape sphere -dm_extrude 3 \
344:           -dist_dm_distribute -petscpartitioner_type simple \
345:           -dm_plex_check_all -dm_view ::ascii_info_detail -dm_plex_view_coord_system spherical

347:   # Parallel simple partitioner tests
348:   test:
349:     suffix: part_simple_0
350:     requires: triangle
351:     nsize: 2
352:     args: -dm_coord_space 0 -dm_plex_interpolate 0 -dist_dm_distribute -petscpartitioner_type simple -dist_partition_view -dm_view ascii::ascii_info_detail
353:   test:
354:     suffix: part_simple_1
355:     requires: triangle
356:     nsize: 8
357:     args: -dm_coord_space 0 -ref_dm_refine 1 -dist_dm_distribute -petscpartitioner_type simple -dist_partition_view -dm_view ascii::ascii_info_detail

359:   # Parallel partitioner tests
360:   test:
361:     suffix: part_parmetis_0
362:     requires: parmetis
363:     nsize: 2
364:     args: -dm_plex_simplex 0 -ref_dm_refine 1 -dist_dm_distribute -petscpartitioner_type parmetis -dm_view -petscpartitioner_view -test_redistribute -dm_plex_csr_alg {{mat graph overlap}} -dm_pre_redist_view ::load_balance -dm_post_redist_view ::load_balance -petscpartitioner_view_graph
365:   test:
366:     suffix: part_ptscotch_0
367:     requires: ptscotch
368:     nsize: 2
369:     args: -dm_plex_simplex 0 -dist_dm_distribute -petscpartitioner_type ptscotch -petscpartitioner_view -petscpartitioner_ptscotch_strategy quality -test_redistribute -dm_plex_csr_alg {{mat graph overlap}} -dm_pre_redist_view ::load_balance -dm_post_redist_view ::load_balance -petscpartitioner_view_graph
370:   test:
371:     suffix: part_ptscotch_1
372:     requires: ptscotch
373:     nsize: 8
374:     args: -dm_plex_simplex 0 -ref_dm_refine 1 -dist_dm_distribute -petscpartitioner_type ptscotch -petscpartitioner_view -petscpartitioner_ptscotch_imbalance 0.1

376:   # CGNS reader tests 10-11 (need to find smaller test meshes)
377:   test:
378:     suffix: cgns_0
379:     requires: cgns
380:     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/tut21.cgns -dm_view

382:   # ExodusII reader tests
383:   testset:
384:     args: -dm_plex_boundary_label boundary -dm_plex_check_all -dm_view
385:     test:
386:       suffix: exo_0
387:       requires: exodusii
388:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/sevenside-quad.exo
389:     test:
390:       suffix: exo_1
391:       requires: exodusii
392:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/sevenside-quad-15.exo
393:     test:
394:       suffix: exo_2
395:       requires: exodusii
396:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/squaremotor-30.exo
397:     test:
398:       suffix: exo_3
399:       requires: exodusii
400:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo
401:     test:
402:       suffix: exo_4
403:       requires: exodusii
404:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/simpleblock-100.exo
405:     test:
406:       suffix: exo_1d_0
407:       requires: exodusii
408:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/1d-2elems.e

410:   # Gmsh mesh reader tests
411:   testset:
412:     args: -dm_coord_space 0 -dm_view

414:     test:
415:       suffix: gmsh_0
416:       requires: !single
417:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
418:     test:
419:       suffix: gmsh_1
420:       requires: !single
421:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.msh
422:     test:
423:       suffix: gmsh_2
424:       requires: !single
425:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin.msh
426:     test:
427:       suffix: gmsh_3
428:       nsize: 3
429:       requires: !single
430:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.msh -dist_dm_distribute -petscpartitioner_type simple
431:     test:
432:       suffix: gmsh_4
433:       nsize: 3
434:       requires: !single
435:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin.msh -dist_dm_distribute -petscpartitioner_type simple
436:     test:
437:       suffix: gmsh_5
438:       requires: !single
439:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_quad.msh
440:     # TODO: it seems the mesh is not a valid gmsh (inverted cell)
441:     test:
442:       suffix: gmsh_6
443:       requires: !single
444:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin_physnames.msh -final_diagnostics 0
445:     test:
446:       suffix: gmsh_7
447:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere_bin.msh -dm_view ::ascii_info_detail -dm_plex_check_all
448:     test:
449:       suffix: gmsh_8
450:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere.msh -dm_view ::ascii_info_detail -dm_plex_check_all
451:   testset:
452:     args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic_bin.msh -dm_view ::ascii_info_detail -dm_plex_check_all
453:     test:
454:       suffix: gmsh_9
455:     test:
456:       suffix: gmsh_9_periodic_0
457:       args: -dm_plex_gmsh_periodic 0
458:   testset:
459:     args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh -dm_view ::ascii_info_detail -dm_plex_check_all
460:     test:
461:       suffix: gmsh_10
462:     test:
463:       suffix: gmsh_10_periodic_0
464:       args: -dm_plex_gmsh_periodic 0
465:   testset:
466:     args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh -dm_view ::ascii_info_detail -dm_plex_check_all -ref_dm_refine 1
467:     test:
468:       suffix: gmsh_11
469:     test:
470:       suffix: gmsh_11_periodic_0
471:       args: -dm_plex_gmsh_periodic 0
472:   # TODO: it seems the mesh is not a valid gmsh (inverted cell)
473:   test:
474:     suffix: gmsh_12
475:     nsize: 4
476:     requires: !single mpiio
477:     args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin_physnames.msh -viewer_binary_mpiio -dist_dm_distribute -petscpartitioner_type simple -dm_view -final_diagnostics 0
478:   test:
479:     suffix: gmsh_13_hybs2t
480:     nsize: 4
481:     args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_triquad.msh -dist_dm_distribute -petscpartitioner_type simple -dm_view -ref_dm_refine 1 -ref_dm_plex_transform_type refine_tobox -dm_plex_check_all
482:   test:
483:     suffix: gmsh_14_ext
484:     requires: !single
485:     args: -dm_coord_space 0 -dm_extrude 2 -dm_plex_transform_extrude_thickness 1.5 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin.msh -dm_view -dm_plex_check_all
486:   test:
487:     suffix: gmsh_14_ext_s2t
488:     requires: !single
489:     args: -dm_coord_space 0 -dm_extrude 2 -dm_plex_transform_extrude_thickness 1.5 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin.msh -dm_view -dm_plex_check_all -ref_dm_refine 1 -ref_dm_plex_transform_type refine_tobox
490:   test:
491:     suffix: gmsh_15_hyb3d
492:     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh -dm_view -dm_plex_check_all
493:   test:
494:     suffix: gmsh_15_hyb3d_vtk
495:     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh -dm_view vtk: -dm_plex_gmsh_hybrid -dm_plex_check_all
496:   test:
497:     suffix: gmsh_15_hyb3d_s2t
498:     args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh -dm_view -dm_plex_check_all -ref_dm_refine 1 -ref_dm_plex_transform_type refine_tobox
499:   test:
500:     suffix: gmsh_16_spheresurface
501:     nsize : 4
502:     args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 -dm_plex_check_all -dm_view -dist_dm_distribute -petscpartitioner_type simple
503:   test:
504:     suffix: gmsh_16_spheresurface_s2t
505:     nsize : 4
506:     args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 -ref_dm_refine 1 -ref_dm_plex_transform_type refine_tobox -dm_plex_check_all -dm_view -dist_dm_distribute -petscpartitioner_type simple
507:   test:
508:     suffix: gmsh_16_spheresurface_extruded
509:     nsize : 4
510:     args: -dm_coord_space 0 -dm_extrude 3 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 -dm_plex_check_all -dm_view -dist_dm_distribute -petscpartitioner_type simple
511:   test:
512:     suffix: gmsh_16_spheresurface_extruded_s2t
513:     nsize : 4
514:     args: -dm_coord_space 0 -dm_extrude 3 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 -ref_dm_refine 1 -ref_dm_plex_transform_type refine_tobox -dm_plex_check_all -dm_view -dist_dm_distribute -petscpartitioner_type simple
515:   test:
516:     suffix: gmsh_17_hyb3d_interp_ascii
517:     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_hexwedge.msh -dm_view -dm_plex_check_all
518:   test:
519:     suffix: exodus_17_hyb3d_interp_ascii
520:     requires: exodusii
521:     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_hexwedge.exo -dm_view -dm_plex_check_all

523:   # Legacy Gmsh v22/v40 ascii/binary reader tests
524:   testset:
525:     output_file: output/ex1_gmsh_3d_legacy.out
526:     args: -dm_coord_space 0 -dm_view ::ascii_info_detail -dm_plex_check_all
527:     test:
528:       suffix: gmsh_3d_ascii_v22
529:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii.msh2
530:     test:
531:       suffix: gmsh_3d_ascii_v40
532:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii.msh4
533:     test:
534:       suffix: gmsh_3d_binary_v22
535:       # Could not remake binary to remove extra face labeling
536:       output_file: output/ex1_gmsh_3d_legacy_v22_bin.out
537:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary.msh2
538:     test:
539:       suffix: gmsh_3d_binary_v40
540:       requires: long64
541:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary.msh4

543:   # Gmsh v41 ascii/binary reader tests
544:   testset: # 32-bit mesh, sequential
545:     args: -dm_coord_space 0 -dm_view ::ascii_info_detail -dm_plex_check_all -dm_plex_gmsh_mark_vertices
546:     output_file: output/ex1_gmsh_3d_32.out
547:     test:
548:       suffix: gmsh_3d_ascii_v41_32
549:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii-32.msh
550:     test:
551:       suffix: gmsh_3d_binary_v41_32
552:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-32.msh
553:     test:
554:       suffix: gmsh_3d_binary_v41_32_mpiio
555:       requires: defined(PETSC_HAVE_MPIIO)
556:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-32.msh -viewer_binary_mpiio
557:   test:
558:     suffix: gmsh_quad_8node
559:     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-qua-8node.msh \
560:           -dm_view -dm_plex_check_all -dm_plex_gmsh_mark_vertices
561:   test:
562:     suffix: gmsh_hex_20node
563:     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-hex-20node.msh \
564:           -dm_view -dm_plex_check_all -dm_plex_gmsh_mark_vertices
565:   testset:  # 32-bit mesh, parallel
566:     args: -dm_coord_space 0 -dist_dm_distribute -petscpartitioner_type simple -dm_view ::ascii_info_detail -dm_plex_check_all -dm_plex_gmsh_mark_vertices
567:     nsize: 2
568:     output_file: output/ex1_gmsh_3d_32_np2.out
569:     test:
570:       suffix: gmsh_3d_ascii_v41_32_np2
571:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii-32.msh
572:     test:
573:       suffix: gmsh_3d_binary_v41_32_np2
574:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-32.msh
575:     test:
576:       suffix: gmsh_3d_binary_v41_32_np2_mpiio
577:       requires: defined(PETSC_HAVE_MPIIO)
578:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-32.msh -viewer_binary_mpiio
579:   testset: # 64-bit mesh, sequential
580:     args: -dm_coord_space 0 -dm_view ::ascii_info_detail -dm_plex_check_all -dm_plex_gmsh_mark_vertices
581:     output_file: output/ex1_gmsh_3d_64.out
582:     test:
583:       suffix: gmsh_3d_ascii_v41_64
584:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii-64.msh
585:     test:
586:       suffix: gmsh_3d_binary_v41_64
587:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-64.msh
588:     test:
589:       suffix: gmsh_3d_binary_v41_64_mpiio
590:       requires: defined(PETSC_HAVE_MPIIO)
591:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-64.msh -viewer_binary_mpiio
592:   testset:  # 64-bit mesh, parallel
593:     args: -dm_coord_space 0 -dist_dm_distribute -petscpartitioner_type simple -dm_view ::ascii_info_detail -dm_plex_check_all -dm_plex_gmsh_mark_vertices
594:     nsize: 2
595:     output_file: output/ex1_gmsh_3d_64_np2.out
596:     test:
597:       suffix: gmsh_3d_ascii_v41_64_np2
598:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii-64.msh
599:     test:
600:       suffix: gmsh_3d_binary_v41_64_np2
601:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-64.msh
602:     test:
603:       suffix: gmsh_3d_binary_v41_64_np2_mpiio
604:       requires: defined(PETSC_HAVE_MPIIO)
605:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-64.msh -viewer_binary_mpiio

607:   # Fluent mesh reader tests
608:   # TODO: Geometry checks fail
609:   test:
610:     suffix: fluent_0
611:     requires: !complex
612:     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.cas -dm_view -final_diagnostics 0
613:   test:
614:     suffix: fluent_1
615:     nsize: 3
616:     requires: !complex
617:     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.cas -dist_dm_distribute -petscpartitioner_type simple -dm_view -final_diagnostics 0
618:   test:
619:     suffix: fluent_2
620:     requires: !complex
621:     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/cube_5tets_ascii.cas -dm_view -final_diagnostics 0
622:   test:
623:     suffix: fluent_3
624:     requires: !complex
625:     TODO: Fails on non-linux: fseek(), fileno() ? https://gitlab.com/petsc/petsc/merge_requests/2206#note_238166382
626:     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/cube_5tets.cas -dm_view -final_diagnostics 0

628:   # Test shape quality
629:   test:
630:     suffix: test_shape
631:     requires: ctetgen
632:     args: -dm_plex_dim 3 -dim 3 -dm_refine_hierarchy 3 -dm_plex_check_all -dm_plex_check_cell_shape

634:   # Test simplex to tensor conversion
635:   test:
636:     suffix: s2t2
637:     requires: triangle
638:     args: -dm_coord_space 0 -ref_dm_refine 1 -ref_dm_plex_transform_type refine_tobox -dm_refine_volume_limit_pre 0.0625 -dm_view ascii::ascii_info_detail

640:   test:
641:     suffix: s2t3
642:     requires: ctetgen
643:     args: -dm_coord_space 0 -dm_plex_dim 3 -dim 3 -ref_dm_refine 1 -ref_dm_plex_transform_type refine_tobox -dm_refine_volume_limit_pre 0.0625 -dm_view ascii::ascii_info_detail

645:   # Test cylinder
646:   testset:
647:     args: -dm_plex_shape cylinder -dm_plex_check_all -dm_view
648:     test:
649:       suffix: cylinder
650:       args: -ref_dm_refine 1
651:     test:
652:       suffix: cylinder_per
653:       args: -dm_plex_cylinder_bd periodic -ref_dm_refine 1 -ref_dm_refine_remap 0
654:     test:
655:       suffix: cylinder_wedge
656:       args: -dm_coord_space 0 -dm_plex_interpolate 0 -dm_plex_cell tensor_triangular_prism -dm_view vtk:
657:     test:
658:       suffix: cylinder_wedge_int
659:       output_file: output/ex1_cylinder_wedge.out
660:       args: -dm_coord_space 0 -dm_plex_cell tensor_triangular_prism -dm_view vtk:

662:   test:
663:     suffix: box_2d
664:     args: -dm_plex_simplex 0 -ref_dm_refine 2 -dm_plex_check_all -dm_view

666:   test:
667:     suffix: box_2d_per
668:     args: -dm_plex_simplex 0 -ref_dm_refine 2 -dm_plex_check_all -dm_view

670:   test:
671:     suffix: box_2d_per_unint
672:     args: -dm_coord_space 0 -dm_plex_simplex 0 -dm_plex_interpolate 0 -dm_plex_box_faces 3,3 -dm_plex_box_faces 3,3 -dm_plex_check_all -dm_view ::ascii_info_detail

674:   test:
675:     suffix: box_3d
676:     args: -dm_plex_dim 3 -dim 3 -dm_plex_simplex 0 -ref_dm_refine 3 -dm_plex_check_all -dm_view

678:   test:
679:     requires: triangle
680:     suffix: box_wedge
681:     args: -dm_coord_space 0 -dm_plex_dim 3 -dim 3 -dm_plex_simplex 0 -dm_plex_cell tensor_triangular_prism -dm_view vtk: -dm_plex_check_all

683:   testset:
684:     requires: triangle
685:     args: -dm_coord_space 0 -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_cell tensor_triangular_prism -dm_plex_box_faces 2,3,1 -dm_view -dm_plex_check_all -ref_dm_refine 1 -ref_dm_plex_transform_type refine_tobox
686:     test:
687:       suffix: box_wedge_s2t
688:     test:
689:       nsize: 3
690:       args: -dist_dm_distribute -petscpartitioner_type simple
691:       suffix: box_wedge_s2t_parallel

693:   # Test GLVis output
694:   testset:
695:     args: -dm_coord_space 0 -dm_plex_interpolate 0
696:     test:
697:       suffix: glvis_2d_tet
698:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh -dm_plex_gmsh_periodic 0 -dm_view glvis:
699:     test:
700:       suffix: glvis_2d_tet_per
701:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary 0
702:     test:
703:       suffix: glvis_3d_tet
704:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere_bin.msh -dm_plex_gmsh_periodic 0 -dm_view glvis:
705:   testset:
706:     args: -dm_coord_space 0
707:     test:
708:       suffix: glvis_2d_tet_per_mfem
709:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh -viewer_glvis_dm_plex_enable_boundary -viewer_glvis_dm_plex_enable_mfem -dm_view glvis:
710:     test:
711:       suffix: glvis_2d_quad
712:       args: -dm_plex_simplex 0 -dm_plex_box_faces 3,3 -dm_view glvis:
713:     test:
714:       suffix: glvis_2d_quad_per
715:       args: -dm_plex_simplex 0 -dm_plex_box_faces 3,3 -dm_plex_box_bd periodic,periodic -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary
716:     test:
717:       suffix: glvis_2d_quad_per_shift
718:       args: -dm_plex_simplex 0 -dm_plex_box_faces 3,3 -dm_plex_box_bd periodic,periodic -dm_plex_box_lower -1,-1 -dm_plex_box_upper 1,1 -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary
719:     test:
720:       suffix: glvis_2d_quad_per_mfem
721:       args: -dm_plex_simplex 0 -dm_plex_box_faces 3,3 -dm_plex_box_bd periodic,periodic -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary -viewer_glvis_dm_plex_enable_mfem
722:     test:
723:       suffix: glvis_3d_tet_per
724:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere_bin.msh -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary
725:     test:
726:       suffix: glvis_3d_tet_per_mfem
727:       TODO: broken
728:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere_bin.msh -viewer_glvis_dm_plex_enable_mfem -dm_view glvis:
729:     test:
730:       suffix: glvis_3d_hex
731:       args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_faces 3,3,3 -dm_view glvis:
732:     test:
733:       suffix: glvis_3d_hex_per
734:       args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_faces 3,3,3 -dm_plex_box_bd periodic,periodic,periodic -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary 0
735:     test:
736:       suffix: glvis_3d_hex_per_mfem
737:       args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_faces 3,3,3 -dm_plex_box_bd periodic,periodic,periodic -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary -viewer_glvis_dm_plex_enable_mfem
738:     test:
739:       suffix: glvis_2d_hyb
740:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_triquad.msh -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary
741:     test:
742:       suffix: glvis_3d_hyb
743:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary
744:     test:
745:       suffix: glvis_3d_hyb_s2t
746:       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_3d_cube.msh -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary -ref_dm_refine 1 -ref_dm_plex_transform_type refine_tobox -dm_plex_check_all

748:   # Test P4EST
749:   testset:
750:     requires: p4est
751:     args: -dm_coord_space 0 -dm_view -test_p4est_seq -conv_seq_2_dm_plex_check_all -conv_seq_1_dm_forest_minimum_refinement 1
752:     test:
753:       suffix: p4est_periodic
754:       args: -dm_plex_simplex 0 -dm_plex_box_bd periodic,periodic -dm_plex_box_faces 3,5 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 2 -conv_seq_1_dm_p4est_refine_pattern hash
755:     test:
756:       suffix: p4est_periodic_3d
757:       args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_bd periodic,periodic,none -dm_plex_box_faces 3,5,4 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 2 -conv_seq_1_dm_p4est_refine_pattern hash
758:     test:
759:       suffix: p4est_gmsh_periodic
760:       args: -dm_coord_space 0 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh
761:     test:
762:       suffix: p4est_gmsh_surface
763:       args: -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3
764:     test:
765:       suffix: p4est_gmsh_surface_parallel
766:       nsize: 2
767:       args: -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 -petscpartitioner_type simple -dm_view ::load_balance
768:     test:
769:       suffix: p4est_hyb_2d
770:       args: -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_triquad.msh
771:     test:
772:       suffix: p4est_hyb_3d
773:       args: -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh
774:     test:
775:       requires: ctetgen
776:       suffix: p4est_s2t_bugfaces_3d
777:       args: -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 0 -dm_plex_dim 3 -dm_plex_box_faces 1,1
778:     test:
779:       suffix: p4est_bug_overlapsf
780:       nsize: 3
781:       args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_faces 2,2,1 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -petscpartitioner_type simple
782:     test:
783:       suffix: p4est_redistribute
784:       nsize: 3
785:       args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_faces 2,2,1 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -petscpartitioner_type simple -test_redistribute -dm_plex_csr_alg {{mat graph overlap}} -dm_view ::load_balance
786:     test:
787:       suffix: p4est_gmsh_s2t_3d
788:       args: -conv_seq_1_dm_forest_initial_refinement 1 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
789:     test:
790:       suffix: p4est_gmsh_s2t_3d_hash
791:       args: -conv_seq_1_dm_forest_initial_refinement 1 -conv_seq_1_dm_forest_maximum_refinement 2 -conv_seq_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
792:     test:
793:       requires: long_runtime
794:       suffix: p4est_gmsh_periodic_3d
795:       args: -dm_coord_space 0 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere.msh

797:   testset:
798:     requires: p4est
799:     nsize: 6
800:     args: -dm_coord_space 0 -test_p4est_par -conv_par_2_dm_plex_check_all -conv_par_1_dm_forest_minimum_refinement 1 -conv_par_1_dm_forest_partition_overlap 0 -dist_dm_distribute
801:     test:
802:       TODO: interface cones do not conform
803:       suffix: p4est_par_periodic
804:       args: -dm_plex_simplex 0 -dm_plex_box_bd periodic,periodic -dm_plex_box_faces 3,5 -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash
805:     test:
806:       TODO: interface cones do not conform
807:       suffix: p4est_par_periodic_3d
808:       args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_bd periodic,periodic,periodic -dm_plex_box_faces 3,5,4 -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash
809:     test:
810:       TODO: interface cones do not conform
811:       suffix: p4est_par_gmsh_periodic
812:       args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh
813:     test:
814:       suffix: p4est_par_gmsh_surface
815:       args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3
816:     test:
817:       suffix: p4est_par_gmsh_s2t_3d
818:       args: -conv_par_1_dm_forest_initial_refinement 1 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
819:     test:
820:       TODO: interface cones do not conform
821:       suffix: p4est_par_gmsh_s2t_3d_hash
822:       args: -conv_par_1_dm_forest_initial_refinement 1 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
823:     test:
824:       requires: long_runtime
825:       suffix: p4est_par_gmsh_periodic_3d
826:       args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere.msh

828:   testset:
829:     requires: p4est
830:     nsize: 6
831:     args: -dm_coord_space 0 -test_p4est_par -conv_par_2_dm_plex_check_all -conv_par_1_dm_forest_minimum_refinement 1 -conv_par_1_dm_forest_partition_overlap 1 -dist_dm_distribute -petscpartitioner_type simple
832:     test:
833:       suffix: p4est_par_ovl_periodic
834:       args: -dm_plex_simplex 0 -dm_plex_box_bd periodic,periodic -dm_plex_box_faces 3,5 -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash
835:     #TODO Mesh cell 201 is inverted, vol = 0. (FVM Volume. Is it correct? -> Diagnostics disabled)
836:     test:
837:       suffix: p4est_par_ovl_periodic_3d
838:       args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_bd periodic,periodic,none -dm_plex_box_faces 3,5,4 -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash -final_diagnostics 0
839:     test:
840:       suffix: p4est_par_ovl_gmsh_periodic
841:       args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh
842:     test:
843:       suffix: p4est_par_ovl_gmsh_surface
844:       args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3
845:     test:
846:       suffix: p4est_par_ovl_gmsh_s2t_3d
847:       args: -conv_par_1_dm_forest_initial_refinement 1 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
848:     test:
849:       suffix: p4est_par_ovl_gmsh_s2t_3d_hash
850:       args: -conv_par_1_dm_forest_initial_refinement 1 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
851:     test:
852:       requires: long_runtime
853:       suffix: p4est_par_ovl_gmsh_periodic_3d
854:       args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere.msh
855:     test:
856:       suffix: p4est_par_ovl_hyb_2d
857:       args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_triquad.msh
858:     test:
859:       suffix: p4est_par_ovl_hyb_3d
860:       args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh

862:   test:
863:     TODO: broken
864:     requires: p4est
865:     nsize: 2
866:     suffix: p4est_bug_labels_noovl
867:     args: -test_p4est_seq -dm_plex_check_all -dm_forest_minimum_refinement 0 -dm_forest_partition_overlap 1 -dm_plex_simplex 0 -dm_plex_box_faces 3,3 -dm_forest_initial_refinement 0 -dm_forest_maximum_refinement 2 -dm_p4est_refine_pattern hash -dist_dm_distribute -petscpartitioner_type simple -dm_forest_print_label_error

869:   test:
870:     requires: p4est
871:     nsize: 2
872:     suffix: p4est_bug_distribute_overlap
873:     args: -dm_coord_space 0 -test_p4est_seq -conv_seq_2_dm_plex_check_all -conv_seq_1_dm_forest_minimum_refinement 0 -conv_seq_1_dm_forest_partition_overlap 0 -dm_plex_simplex 0 -dm_plex_box_faces 3,3 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 2 -conv_seq_1_dm_p4est_refine_pattern hash -petscpartitioner_type simple -overlap 1 -dm_view ::load_balance
874:     args: -dm_post_overlap_view

876:   test:
877:     suffix: ref_alfeld2d_0
878:     requires: triangle
879:     args: -dm_plex_box_faces 5,3 -dm_view -dm_plex_check_all -ref_dm_refine 1 -ref_dm_plex_transform_type refine_alfeld -final_diagnostics
880:   test:
881:     suffix: ref_alfeld3d_0
882:     requires: ctetgen
883:     args: -dm_plex_dim 3 -dm_plex_box_faces 5,1,1 -dm_view -dm_plex_check_all -ref_dm_refine 1 -ref_dm_plex_transform_type refine_alfeld -final_diagnostics

885:   # Boundary layer refiners
886:   test:
887:     suffix: ref_bl_1
888:     args: -dm_plex_dim 1 -dm_plex_simplex 0 -dm_plex_box_faces 5,1 -dm_view -dm_plex_check_all 0 -ref_dm_refine 1 -ref_dm_plex_transform_type refine_boundary_layer -dm_extrude 2 -final_diagnostics -ref_dm_plex_transform_bl_splits 3
889:   test:
890:     suffix: ref_bl_2_tri
891:     requires: triangle
892:     args: -dm_coord_space 0 -dm_plex_box_faces 5,3 -dm_view -dm_plex_check_all 0 -ref_dm_refine 1 -ref_dm_plex_transform_type refine_boundary_layer -dm_extrude 3 -final_diagnostics -ref_dm_plex_transform_bl_splits 4
893:   test:
894:     suffix: ref_bl_3_quad
895:     args: -dm_plex_simplex 0 -dm_plex_box_faces 5,1 -dm_view -dm_plex_check_all 0 -ref_dm_refine 1 -ref_dm_plex_transform_type refine_boundary_layer -dm_extrude 3 -final_diagnostics -ref_dm_plex_transform_bl_splits 4
896:   test:
897:     suffix: ref_bl_spheresurface_extruded
898:     nsize : 4
899:     args: -dm_coord_space 0 -dm_extrude 3 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 -dm_plex_check_all -dm_view -dist_dm_distribute -petscpartitioner_type simple -final_diagnostics -ref_dm_refine 1 -ref_dm_plex_transform_type refine_boundary_layer -ref_dm_plex_transform_bl_splits 2
900:   test:
901:     suffix: ref_bl_3d_hyb
902:     nsize : 4
903:     args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_3d_cube.msh -dm_plex_check_all -dm_view -dist_dm_distribute -petscpartitioner_type simple -final_diagnostics -ref_dm_refine 1 -ref_dm_plex_transform_type refine_boundary_layer -ref_dm_plex_transform_bl_splits 4 -ref_dm_plex_transform_bl_height_factor 3.1

905:   testset:
906:     args: -dm_plex_shape sphere -dm_plex_check_all -dm_view
907:     test:
908:       suffix: sphere_0
909:       args:
910:     test:
911:       suffix: sphere_1
912:       args: -ref_dm_refine 2
913:     test:
914:       suffix: sphere_2
915:       args: -dm_plex_simplex 0
916:     test:
917:       suffix: sphere_3
918:       args: -dm_plex_simplex 0 -ref_dm_refine 2
919:     test:
920:       suffix: sphere_4
921:       args: -dm_plex_dim 1 -ref_dm_refine 2

923:   testset:
924:     args: -dm_plex_shape ball -dm_plex_check_all -dm_view

926:     test:
927:       suffix: ball_0
928:       requires: ctetgen
929:       args: -dm_plex_dim 3

931:     test:
932:       suffix: ball_1
933:       requires: ctetgen
934:       args: -dm_plex_dim 3 -bd_dm_refine 2

936:     test:
937:       suffix: ball_2
938:       requires: triangle
939:       args: -dm_plex_dim 2 -bd_dm_refine 2

941:   test:
942:     suffix: schwarz_p_extrude
943:     args: -dm_plex_shape schwarz_p -dm_plex_tps_extent 1,1,1 -dm_plex_tps_layers 1 -dm_plex_tps_thickness .2 -dm_view

945:   test:
946:     suffix: pyr_mixed_0
947:     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/pyr_tet.msh -dm_plex_check_all -dm_view

949:   test:
950:     suffix: hypercubic_0
951:     args: -dm_plex_dim 2 -dm_plex_shape hypercubic -dm_plex_box_faces 3,3 -dm_plex_check_all -dm_view

953:   test:
954:     suffix: hypercubic_1
955:     args: -dm_plex_dim 3 -dm_plex_shape hypercubic -dm_plex_box_faces 3,3,3 -dm_plex_check_all -dm_view

957:   test:
958:     suffix: hypercubic_2
959:     args: -dm_plex_dim 4 -dm_plex_shape hypercubic -dm_plex_box_faces 3,3,3,3 -dm_view \
960:           -dm_plex_check_symmetry -dm_plex_check_skeleton -dm_plex_check_faces -dm_plex_check_pointsf -final_diagnostics 0

962:   test:
963:     suffix: hypercubic_3
964:     args: -dm_plex_dim 5 -dm_plex_shape hypercubic -dm_plex_box_faces 3,3,3,3,3 -dm_view \
965:           -dm_plex_check_symmetry -dm_plex_check_skeleton -dm_plex_check_faces -dm_plex_check_pointsf -final_diagnostics 0

967:   test:
968:     suffix: hypercubic_4
969:     args: -dm_plex_dim 6 -dm_plex_shape hypercubic -dm_plex_box_faces 3,3,3,3,3,3 -dm_view \
970:           -dm_plex_check_symmetry -dm_plex_check_skeleton -dm_plex_check_faces -dm_plex_check_pointsf -final_diagnostics 0
971: TEST*/