Actual source code: ex3.c

  1: static const char help[] = "Test freeing of MPI types in PetscSF\n\n";

  3: #include <petscvec.h>
  4: #include <petscsf.h>
  5: #include <petscviewer.h>

  7: int main(int argc, char **argv)
  8: {
  9:   PetscSF      sf;
 10:   Vec          A, Aout;
 11:   PetscScalar *bufA;
 12:   PetscScalar *bufAout;
 13:   PetscMPIInt  rank, size;
 14:   PetscInt     nroots, nleaves;
 15:   PetscInt     i;
 16:   PetscInt    *ilocal;
 17:   PetscSFNode *iremote;
 18:   PetscBool    test_dupped_type;
 19:   MPI_Datatype contig;

 22:   PetscInitialize(&argc, &argv, NULL, help);
 23:   MPI_Comm_rank(PETSC_COMM_WORLD, &rank);
 24:   MPI_Comm_size(PETSC_COMM_WORLD, &size);


 28:   PetscOptionsBegin(PETSC_COMM_WORLD, "", "PetscSF type freeing options", "none");
 29:   test_dupped_type = PETSC_FALSE;
 30:   PetscOptionsBool("-test_dupped_type", "Test dupped input type", "", test_dupped_type, &test_dupped_type, NULL);
 31:   PetscOptionsEnd();

 33:   PetscSFCreate(PETSC_COMM_WORLD, &sf);
 34:   PetscSFSetFromOptions(sf);

 36:   nleaves = 1;
 37:   nroots  = 1;
 38:   PetscMalloc1(nleaves, &ilocal);

 40:   for (i = 0; i < nleaves; i++) ilocal[i] = i;

 42:   PetscMalloc1(nleaves, &iremote);
 43:   iremote[0].rank  = 0;
 44:   iremote[0].index = 0;
 45:   PetscSFSetGraph(sf, nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);
 46:   PetscSFSetUp(sf);
 47:   PetscSFView(sf, PETSC_VIEWER_STDOUT_WORLD);
 48:   VecCreate(PETSC_COMM_WORLD, &A);
 49:   VecSetSizes(A, 4, PETSC_DETERMINE);
 50:   VecSetFromOptions(A);
 51:   VecSetUp(A);

 53:   VecDuplicate(A, &Aout);
 54:   VecGetArray(A, &bufA);
 55:   for (i = 0; i < 4; i++) bufA[i] = (PetscScalar)i;
 56:   VecRestoreArray(A, &bufA);

 58:   VecGetArrayRead(A, (const PetscScalar **)&bufA);
 59:   VecGetArray(Aout, &bufAout);

 61:   MPI_Type_contiguous(4, MPIU_SCALAR, &contig);
 62:   MPI_Type_commit(&contig);

 64:   if (test_dupped_type) {
 65:     MPI_Datatype tmp;
 66:     MPI_Type_dup(contig, &tmp);
 67:     MPI_Type_free(&contig);
 68:     contig = tmp;
 69:   }
 70:   for (i = 0; i < 10000; i++) {
 71:     PetscSFBcastBegin(sf, contig, bufA, bufAout, MPI_REPLACE);
 72:     PetscSFBcastEnd(sf, contig, bufA, bufAout, MPI_REPLACE);
 73:   }
 74:   VecRestoreArrayRead(A, (const PetscScalar **)&bufA);
 75:   VecRestoreArray(Aout, &bufAout);

 77:   VecView(Aout, PETSC_VIEWER_STDOUT_WORLD);
 78:   VecDestroy(&A);
 79:   VecDestroy(&Aout);
 80:   PetscSFDestroy(&sf);
 81:   MPI_Type_free(&contig);
 82:   PetscFinalize();
 83:   return 0;
 84: }

 86: /*TEST

 88:    test:
 89:       suffix: basic
 90:       args: -sf_type basic

 92:    test:
 93:       suffix: basic_dupped
 94:       args: -test_dupped_type -sf_type basic

 96:    test:
 97:       suffix: window
 98:       filter: grep -v "type" | grep -v "sort"
 99:       args: -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create allocate dynamic}}
100:       requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)

102:    test:
103:       suffix: window_dupped
104:       filter: grep -v "type" | grep -v "sort"
105:       args: -test_dupped_type -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create allocate dynamic}}
106:       requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)

108:    test:
109:       suffix: window_shared
110:       output_file: output/ex3_window.out
111:       filter: grep -v "type" | grep -v "sort"
112:       args: -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor shared
113:       requires: defined(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY) defined(PETSC_HAVE_MPI_ONE_SIDED) !defined(PETSC_HAVE_I_MPI_NUMVERSION)

115:    test:
116:       suffix: window_dupped_shared
117:       output_file: output/ex3_window_dupped.out
118:       filter: grep -v "type" | grep -v "sort"
119:       args: -test_dupped_type -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor shared
120:       requires: defined(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY) defined(PETSC_HAVE_MPI_ONE_SIDED) !defined(PETSC_HAVE_I_MPI_NUMVERSION)

122: TEST*/