Actual source code: ex11.c
1: static char help[] = "Tests MatMeshToDual()\n\n";
3: /*
4: Include "petscmat.h" so that we can use matrices.
5: automatically includes:
6: petscsys.h - base PETSc routines petscvec.h - vectors
7: petscmat.h - matrices
8: petscis.h - index sets petscviewer.h - viewers
9: */
10: #include <petscmat.h>
12: int main(int argc, char **args)
13: {
14: Mat mesh, dual;
15: PetscInt Nvertices = 6; /* total number of vertices */
16: PetscInt ncells = 2; /* number cells on this process */
17: PetscInt *ii, *jj;
18: PetscMPIInt size, rank;
19: MatPartitioning part;
20: IS is;
22: PetscFunctionBeginUser;
23: PetscCall(PetscInitialize(&argc, &args, NULL, help));
24: PetscCallMPI(MPI_Comm_size(MPI_COMM_WORLD, &size));
25: PetscCheck(size == 2, PETSC_COMM_WORLD, PETSC_ERR_WRONG_MPI_SIZE, "This example is for exactly two processes");
26: PetscCallMPI(MPI_Comm_rank(MPI_COMM_WORLD, &rank));
28: PetscCall(PetscMalloc1(3, &ii));
29: PetscCall(PetscMalloc1(6, &jj));
30: ii[0] = 0;
31: ii[1] = 3;
32: ii[2] = 6;
33: if (rank == 0) {
34: jj[0] = 0;
35: jj[1] = 1;
36: jj[2] = 2;
37: jj[3] = 1;
38: jj[4] = 2;
39: jj[5] = 3;
40: } else {
41: jj[0] = 1;
42: jj[1] = 4;
43: jj[2] = 5;
44: jj[3] = 1;
45: jj[4] = 3;
46: jj[5] = 5;
47: }
48: PetscCall(MatCreateMPIAdj(MPI_COMM_WORLD, ncells, Nvertices, ii, jj, NULL, &mesh));
49: PetscCall(MatMeshToCellGraph(mesh, 2, &dual));
50: PetscCall(MatView(dual, PETSC_VIEWER_STDOUT_WORLD));
52: PetscCall(MatPartitioningCreate(MPI_COMM_WORLD, &part));
53: PetscCall(MatPartitioningSetAdjacency(part, dual));
54: PetscCall(MatPartitioningSetFromOptions(part));
55: PetscCall(MatPartitioningApply(part, &is));
56: PetscCall(ISView(is, PETSC_VIEWER_STDOUT_WORLD));
57: PetscCall(ISDestroy(&is));
58: PetscCall(MatPartitioningDestroy(&part));
60: PetscCall(MatDestroy(&mesh));
61: PetscCall(MatDestroy(&dual));
62: PetscCall(PetscFinalize());
63: return 0;
64: }
66: /*TEST
68: build:
69: requires: parmetis
71: test:
72: nsize: 2
73: requires: parmetis
75: TEST*/