Actual source code: pdvec.c
2: /*
3: Code for some of the parallel vector primitives.
4: */
5: #include <../src/vec/vec/impls/mpi/pvecimpl.h>
6: #include <petsc/private/viewerhdf5impl.h>
7: #include <petsc/private/glvisviewerimpl.h>
8: #include <petsc/private/glvisvecimpl.h>
10: PetscErrorCode VecDestroy_MPI(Vec v)
11: {
12: Vec_MPI *x = (Vec_MPI*)v->data;
14: #if defined(PETSC_USE_LOG)
15: PetscLogObjectState((PetscObject)v,"Length=%" PetscInt_FMT,v->map->N);
16: #endif
17: if (!x) return 0;
18: PetscFree(x->array_allocated);
20: /* Destroy local representation of vector if it exists */
21: if (x->localrep) {
22: VecDestroy(&x->localrep);
23: VecScatterDestroy(&x->localupdate);
24: }
25: VecAssemblyReset_MPI(v);
27: /* Destroy the stashes: note the order - so that the tags are freed properly */
28: VecStashDestroy_Private(&v->bstash);
29: VecStashDestroy_Private(&v->stash);
30: PetscFree(v->data);
31: return 0;
32: }
34: PetscErrorCode VecView_MPI_ASCII(Vec xin,PetscViewer viewer)
35: {
36: PetscInt i,work = xin->map->n,cnt,len,nLen;
37: PetscMPIInt j,n = 0,size,rank,tag = ((PetscObject)viewer)->tag;
38: MPI_Status status;
39: PetscScalar *values;
40: const PetscScalar *xarray;
41: const char *name;
42: PetscViewerFormat format;
44: MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
45: PetscViewerGetFormat(viewer,&format);
46: if (format == PETSC_VIEWER_LOAD_BALANCE) {
47: PetscInt nmax = 0,nmin = xin->map->n,navg;
48: for (i=0; i<(PetscInt)size; i++) {
49: nmax = PetscMax(nmax,xin->map->range[i+1] - xin->map->range[i]);
50: nmin = PetscMin(nmin,xin->map->range[i+1] - xin->map->range[i]);
51: }
52: navg = xin->map->N/size;
53: PetscViewerASCIIPrintf(viewer," Load Balance - Local vector size Min %" PetscInt_FMT " avg %" PetscInt_FMT " max %" PetscInt_FMT "\n",nmin,navg,nmax);
54: return 0;
55: }
57: VecGetArrayRead(xin,&xarray);
58: /* determine maximum message to arrive */
59: MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
60: MPI_Reduce(&work,&len,1,MPIU_INT,MPI_MAX,0,PetscObjectComm((PetscObject)xin));
61: if (format == PETSC_VIEWER_ASCII_GLVIS) { rank = 0, len = 0; } /* no parallel distributed write support from GLVis */
62: if (rank == 0) {
63: PetscMalloc1(len,&values);
64: /*
65: MATLAB format and ASCII format are very similar except
66: MATLAB uses %18.16e format while ASCII uses %g
67: */
68: if (format == PETSC_VIEWER_ASCII_MATLAB) {
69: PetscObjectGetName((PetscObject)xin,&name);
70: PetscViewerASCIIPrintf(viewer,"%s = [\n",name);
71: for (i=0; i<xin->map->n; i++) {
72: #if defined(PETSC_USE_COMPLEX)
73: if (PetscImaginaryPart(xarray[i]) > 0.0) {
74: PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16ei\n",(double)PetscRealPart(xarray[i]),(double)PetscImaginaryPart(xarray[i]));
75: } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
76: PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16ei\n",(double)PetscRealPart(xarray[i]),-(double)PetscImaginaryPart(xarray[i]));
77: } else {
78: PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)PetscRealPart(xarray[i]));
79: }
80: #else
81: PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)xarray[i]);
82: #endif
83: }
84: /* receive and print messages */
85: for (j=1; j<size; j++) {
86: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
87: MPI_Get_count(&status,MPIU_SCALAR,&n);
88: for (i=0; i<n; i++) {
89: #if defined(PETSC_USE_COMPLEX)
90: if (PetscImaginaryPart(values[i]) > 0.0) {
91: PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16ei\n",(double)PetscRealPart(values[i]),(double)PetscImaginaryPart(values[i]));
92: } else if (PetscImaginaryPart(values[i]) < 0.0) {
93: PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16ei\n",(double)PetscRealPart(values[i]),-(double)PetscImaginaryPart(values[i]));
94: } else {
95: PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)PetscRealPart(values[i]));
96: }
97: #else
98: PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)values[i]);
99: #endif
100: }
101: }
102: PetscViewerASCIIPrintf(viewer,"];\n");
104: } else if (format == PETSC_VIEWER_ASCII_SYMMODU) {
105: for (i=0; i<xin->map->n; i++) {
106: #if defined(PETSC_USE_COMPLEX)
107: PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",(double)PetscRealPart(xarray[i]),(double)PetscImaginaryPart(xarray[i]));
108: #else
109: PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)xarray[i]);
110: #endif
111: }
112: /* receive and print messages */
113: for (j=1; j<size; j++) {
114: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
115: MPI_Get_count(&status,MPIU_SCALAR,&n);
116: for (i=0; i<n; i++) {
117: #if defined(PETSC_USE_COMPLEX)
118: PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",(double)PetscRealPart(values[i]),(double)PetscImaginaryPart(values[i]));
119: #else
120: PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)values[i]);
121: #endif
122: }
123: }
124: } else if (format == PETSC_VIEWER_ASCII_VTK_DEPRECATED || format == PETSC_VIEWER_ASCII_VTK_CELL_DEPRECATED) {
125: /*
126: state 0: No header has been output
127: state 1: Only POINT_DATA has been output
128: state 2: Only CELL_DATA has been output
129: state 3: Output both, POINT_DATA last
130: state 4: Output both, CELL_DATA last
131: */
132: static PetscInt stateId = -1;
133: int outputState = 0;
134: int doOutput = 0;
135: PetscBool hasState;
136: PetscInt bs, b;
138: if (stateId < 0) {
139: PetscObjectComposedDataRegister(&stateId);
140: }
141: PetscObjectComposedDataGetInt((PetscObject) viewer, stateId, outputState, hasState);
142: if (!hasState) outputState = 0;
144: PetscObjectGetName((PetscObject)xin,&name);
145: VecGetLocalSize(xin, &nLen);
146: PetscMPIIntCast(nLen,&n);
147: VecGetBlockSize(xin, &bs);
148: if (format == PETSC_VIEWER_ASCII_VTK_DEPRECATED) {
149: if (outputState == 0) {
150: outputState = 1;
151: doOutput = 1;
152: } else if (outputState == 1) doOutput = 0;
153: else if (outputState == 2) {
154: outputState = 3;
155: doOutput = 1;
156: } else if (outputState == 3) doOutput = 0;
159: if (doOutput) {
160: PetscViewerASCIIPrintf(viewer, "POINT_DATA %" PetscInt_FMT "\n", xin->map->N/bs);
161: }
162: } else {
163: if (outputState == 0) {
164: outputState = 2;
165: doOutput = 1;
166: } else if (outputState == 1) {
167: outputState = 4;
168: doOutput = 1;
169: } else if (outputState == 2) doOutput = 0;
171: else if (outputState == 4) doOutput = 0;
173: if (doOutput) {
174: PetscViewerASCIIPrintf(viewer, "CELL_DATA %" PetscInt_FMT "\n", xin->map->N/bs);
175: }
176: }
177: PetscObjectComposedDataSetInt((PetscObject) viewer, stateId, outputState);
178: if (name) {
179: if (bs == 3) {
180: PetscViewerASCIIPrintf(viewer, "VECTORS %s double\n", name);
181: } else {
182: PetscViewerASCIIPrintf(viewer, "SCALARS %s double %" PetscInt_FMT "\n", name, bs);
183: }
184: } else {
185: PetscViewerASCIIPrintf(viewer, "SCALARS scalars double %" PetscInt_FMT "\n", bs);
186: }
187: if (bs != 3) {
188: PetscViewerASCIIPrintf(viewer, "LOOKUP_TABLE default\n");
189: }
190: for (i=0; i<n/bs; i++) {
191: for (b=0; b<bs; b++) {
192: if (b > 0) {
193: PetscViewerASCIIPrintf(viewer," ");
194: }
195: PetscViewerASCIIPrintf(viewer,"%g",(double)PetscRealPart(xarray[i*bs+b]));
196: }
197: PetscViewerASCIIPrintf(viewer,"\n");
198: }
199: for (j=1; j<size; j++) {
200: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
201: MPI_Get_count(&status,MPIU_SCALAR,&n);
202: for (i=0; i<n/bs; i++) {
203: for (b=0; b<bs; b++) {
204: if (b > 0) {
205: PetscViewerASCIIPrintf(viewer," ");
206: }
207: PetscViewerASCIIPrintf(viewer,"%g",(double)PetscRealPart(values[i*bs+b]));
208: }
209: PetscViewerASCIIPrintf(viewer,"\n");
210: }
211: }
212: } else if (format == PETSC_VIEWER_ASCII_VTK_COORDS_DEPRECATED) {
213: PetscInt bs, b;
215: VecGetLocalSize(xin, &nLen);
216: PetscMPIIntCast(nLen,&n);
217: VecGetBlockSize(xin, &bs);
220: for (i=0; i<n/bs; i++) {
221: for (b=0; b<bs; b++) {
222: if (b > 0) {
223: PetscViewerASCIIPrintf(viewer," ");
224: }
225: PetscViewerASCIIPrintf(viewer,"%g",(double)PetscRealPart(xarray[i*bs+b]));
226: }
227: for (b=bs; b<3; b++) {
228: PetscViewerASCIIPrintf(viewer," 0.0");
229: }
230: PetscViewerASCIIPrintf(viewer,"\n");
231: }
232: for (j=1; j<size; j++) {
233: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
234: MPI_Get_count(&status,MPIU_SCALAR,&n);
235: for (i=0; i<n/bs; i++) {
236: for (b=0; b<bs; b++) {
237: if (b > 0) {
238: PetscViewerASCIIPrintf(viewer," ");
239: }
240: PetscViewerASCIIPrintf(viewer,"%g",(double)PetscRealPart(values[i*bs+b]));
241: }
242: for (b=bs; b<3; b++) {
243: PetscViewerASCIIPrintf(viewer," 0.0");
244: }
245: PetscViewerASCIIPrintf(viewer,"\n");
246: }
247: }
248: } else if (format == PETSC_VIEWER_ASCII_PCICE) {
249: PetscInt bs, b, vertexCount = 1;
251: VecGetLocalSize(xin, &nLen);
252: PetscMPIIntCast(nLen,&n);
253: VecGetBlockSize(xin, &bs);
256: PetscViewerASCIIPrintf(viewer,"%" PetscInt_FMT "\n", xin->map->N/bs);
257: for (i=0; i<n/bs; i++) {
258: PetscViewerASCIIPrintf(viewer,"%7" PetscInt_FMT " ", vertexCount++);
259: for (b=0; b<bs; b++) {
260: if (b > 0) {
261: PetscViewerASCIIPrintf(viewer," ");
262: }
263: #if !defined(PETSC_USE_COMPLEX)
264: PetscViewerASCIIPrintf(viewer,"% 12.5E",(double)xarray[i*bs+b]);
265: #endif
266: }
267: PetscViewerASCIIPrintf(viewer,"\n");
268: }
269: for (j=1; j<size; j++) {
270: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
271: MPI_Get_count(&status,MPIU_SCALAR,&n);
272: for (i=0; i<n/bs; i++) {
273: PetscViewerASCIIPrintf(viewer,"%7" PetscInt_FMT " ", vertexCount++);
274: for (b=0; b<bs; b++) {
275: if (b > 0) {
276: PetscViewerASCIIPrintf(viewer," ");
277: }
278: #if !defined(PETSC_USE_COMPLEX)
279: PetscViewerASCIIPrintf(viewer,"% 12.5E",(double)values[i*bs+b]);
280: #endif
281: }
282: PetscViewerASCIIPrintf(viewer,"\n");
283: }
284: }
285: } else if (format == PETSC_VIEWER_ASCII_GLVIS) {
286: /* GLVis ASCII visualization/dump: this function mimics mfem::GridFunction::Save() */
287: const PetscScalar *array;
288: PetscInt i,n,vdim, ordering = 1; /* mfem::FiniteElementSpace::Ordering::byVDIM */
289: PetscContainer glvis_container;
290: PetscViewerGLVisVecInfo glvis_vec_info;
291: PetscViewerGLVisInfo glvis_info;
293: /* mfem::FiniteElementSpace::Save() */
294: VecGetBlockSize(xin,&vdim);
295: PetscViewerASCIIPrintf(viewer,"FiniteElementSpace\n");
296: PetscObjectQuery((PetscObject)xin,"_glvis_info_container",(PetscObject*)&glvis_container);
298: PetscContainerGetPointer(glvis_container,(void**)&glvis_vec_info);
299: PetscViewerASCIIPrintf(viewer,"%s\n",glvis_vec_info->fec_type);
300: PetscViewerASCIIPrintf(viewer,"VDim: %" PetscInt_FMT "\n",vdim);
301: PetscViewerASCIIPrintf(viewer,"Ordering: %" PetscInt_FMT "\n",ordering);
302: PetscViewerASCIIPrintf(viewer,"\n");
303: /* mfem::Vector::Print() */
304: PetscObjectQuery((PetscObject)viewer,"_glvis_info_container",(PetscObject*)&glvis_container);
306: PetscContainerGetPointer(glvis_container,(void**)&glvis_info);
307: if (glvis_info->enabled) {
308: VecGetLocalSize(xin,&n);
309: VecGetArrayRead(xin,&array);
310: for (i=0;i<n;i++) {
311: PetscViewerASCIIPrintf(viewer,glvis_info->fmt,(double)PetscRealPart(array[i]));
312: PetscViewerASCIIPrintf(viewer,"\n");
313: }
314: VecRestoreArrayRead(xin,&array);
315: }
316: } else if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
317: /* No info */
318: } else {
319: if (format != PETSC_VIEWER_ASCII_COMMON) PetscViewerASCIIPrintf(viewer,"Process [%d]\n",rank);
320: cnt = 0;
321: for (i=0; i<xin->map->n; i++) {
322: if (format == PETSC_VIEWER_ASCII_INDEX) {
323: PetscViewerASCIIPrintf(viewer,"%" PetscInt_FMT ": ",cnt++);
324: }
325: #if defined(PETSC_USE_COMPLEX)
326: if (PetscImaginaryPart(xarray[i]) > 0.0) {
327: PetscViewerASCIIPrintf(viewer,"%g + %g i\n",(double)PetscRealPart(xarray[i]),(double)PetscImaginaryPart(xarray[i]));
328: } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
329: PetscViewerASCIIPrintf(viewer,"%g - %g i\n",(double)PetscRealPart(xarray[i]),-(double)PetscImaginaryPart(xarray[i]));
330: } else {
331: PetscViewerASCIIPrintf(viewer,"%g\n",(double)PetscRealPart(xarray[i]));
332: }
333: #else
334: PetscViewerASCIIPrintf(viewer,"%g\n",(double)xarray[i]);
335: #endif
336: }
337: /* receive and print messages */
338: for (j=1; j<size; j++) {
339: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
340: MPI_Get_count(&status,MPIU_SCALAR,&n);
341: if (format != PETSC_VIEWER_ASCII_COMMON) {
342: PetscViewerASCIIPrintf(viewer,"Process [%d]\n",j);
343: }
344: for (i=0; i<n; i++) {
345: if (format == PETSC_VIEWER_ASCII_INDEX) {
346: PetscViewerASCIIPrintf(viewer,"%" PetscInt_FMT ": ",cnt++);
347: }
348: #if defined(PETSC_USE_COMPLEX)
349: if (PetscImaginaryPart(values[i]) > 0.0) {
350: PetscViewerASCIIPrintf(viewer,"%g + %g i\n",(double)PetscRealPart(values[i]),(double)PetscImaginaryPart(values[i]));
351: } else if (PetscImaginaryPart(values[i]) < 0.0) {
352: PetscViewerASCIIPrintf(viewer,"%g - %g i\n",(double)PetscRealPart(values[i]),-(double)PetscImaginaryPart(values[i]));
353: } else {
354: PetscViewerASCIIPrintf(viewer,"%g\n",(double)PetscRealPart(values[i]));
355: }
356: #else
357: PetscViewerASCIIPrintf(viewer,"%g\n",(double)values[i]);
358: #endif
359: }
360: }
361: }
362: PetscFree(values);
363: } else {
364: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
365: /* Rank 0 is not trying to receive anything, so don't send anything */
366: } else {
367: if (format == PETSC_VIEWER_ASCII_MATLAB || format == PETSC_VIEWER_ASCII_VTK_DEPRECATED || format == PETSC_VIEWER_ASCII_VTK_CELL_DEPRECATED) {
368: /* this may be a collective operation so make sure everyone calls it */
369: PetscObjectGetName((PetscObject)xin,&name);
370: }
371: MPI_Send((void*)xarray,xin->map->n,MPIU_SCALAR,0,tag,PetscObjectComm((PetscObject)xin));
372: }
373: }
374: PetscViewerFlush(viewer);
375: VecRestoreArrayRead(xin,&xarray);
376: return 0;
377: }
379: PetscErrorCode VecView_MPI_Binary(Vec xin,PetscViewer viewer)
380: {
381: return VecView_Binary(xin,viewer);
382: }
384: #include <petscdraw.h>
385: PetscErrorCode VecView_MPI_Draw_LG(Vec xin,PetscViewer viewer)
386: {
387: PetscDraw draw;
388: PetscBool isnull;
389: PetscDrawLG lg;
390: PetscMPIInt i,size,rank,n,N,*lens = NULL,*disp = NULL;
391: PetscReal *values, *xx = NULL,*yy = NULL;
392: const PetscScalar *xarray;
393: int colors[] = {PETSC_DRAW_RED};
395: PetscViewerDrawGetDraw(viewer,0,&draw);
396: PetscDrawIsNull(draw,&isnull);
397: if (isnull) return 0;
398: MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
399: MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
400: PetscMPIIntCast(xin->map->n,&n);
401: PetscMPIIntCast(xin->map->N,&N);
403: VecGetArrayRead(xin,&xarray);
404: #if defined(PETSC_USE_COMPLEX)
405: PetscMalloc1(n+1,&values);
406: for (i=0; i<n; i++) values[i] = PetscRealPart(xarray[i]);
407: #else
408: values = (PetscReal*)xarray;
409: #endif
410: if (rank == 0) {
411: PetscMalloc2(N,&xx,N,&yy);
412: for (i=0; i<N; i++) xx[i] = (PetscReal)i;
413: PetscMalloc2(size,&lens,size,&disp);
414: for (i=0; i<size; i++) lens[i] = (PetscMPIInt)xin->map->range[i+1] - (PetscMPIInt)xin->map->range[i];
415: for (i=0; i<size; i++) disp[i] = (PetscMPIInt)xin->map->range[i];
416: }
417: MPI_Gatherv(values,n,MPIU_REAL,yy,lens,disp,MPIU_REAL,0,PetscObjectComm((PetscObject)xin));
418: PetscFree2(lens,disp);
419: #if defined(PETSC_USE_COMPLEX)
420: PetscFree(values);
421: #endif
422: VecRestoreArrayRead(xin,&xarray);
424: PetscViewerDrawGetDrawLG(viewer,0,&lg);
425: PetscDrawLGReset(lg);
426: PetscDrawLGSetDimension(lg,1);
427: PetscDrawLGSetColors(lg,colors);
428: if (rank == 0) {
429: PetscDrawLGAddPoints(lg,N,&xx,&yy);
430: PetscFree2(xx,yy);
431: }
432: PetscDrawLGDraw(lg);
433: PetscDrawLGSave(lg);
434: return 0;
435: }
437: PetscErrorCode VecView_MPI_Draw(Vec xin,PetscViewer viewer)
438: {
439: PetscMPIInt rank,size,tag = ((PetscObject)viewer)->tag;
440: PetscInt i,start,end;
441: MPI_Status status;
442: PetscReal min,max,tmp = 0.0;
443: PetscDraw draw;
444: PetscBool isnull;
445: PetscDrawAxis axis;
446: const PetscScalar *xarray;
447: PetscErrorCode ierr;
449: PetscViewerDrawGetDraw(viewer,0,&draw);
450: PetscDrawIsNull(draw,&isnull);
451: if (isnull) return 0;
452: MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
453: MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
455: VecMin(xin,NULL,&min);
456: VecMax(xin,NULL,&max);
457: if (min == max) {
458: min -= 1.e-5;
459: max += 1.e-5;
460: }
462: PetscDrawCheckResizedWindow(draw);
463: PetscDrawClear(draw);
465: PetscDrawAxisCreate(draw,&axis);
466: PetscDrawAxisSetLimits(axis,0.0,(PetscReal)xin->map->N,min,max);
467: PetscDrawAxisDraw(axis);
468: PetscDrawAxisDestroy(&axis);
470: /* draw local part of vector */
471: VecGetArrayRead(xin,&xarray);
472: VecGetOwnershipRange(xin,&start,&end);
473: if (rank < size-1) { /* send value to right */
474: MPI_Send((void*)&xarray[xin->map->n-1],1,MPIU_REAL,rank+1,tag,PetscObjectComm((PetscObject)xin));
475: }
476: if (rank) { /* receive value from right */
477: MPI_Recv(&tmp,1,MPIU_REAL,rank-1,tag,PetscObjectComm((PetscObject)xin),&status);
478: }
479: PetscDrawCollectiveBegin(draw);
480: if (rank) {
481: PetscDrawLine(draw,(PetscReal)start-1,tmp,(PetscReal)start,PetscRealPart(xarray[0]),PETSC_DRAW_RED);
482: }
483: for (i=1; i<xin->map->n; i++) {
484: PetscDrawLine(draw,(PetscReal)(i-1+start),PetscRealPart(xarray[i-1]),(PetscReal)(i+start),PetscRealPart(xarray[i]),PETSC_DRAW_RED);
485: }
486: PetscDrawCollectiveEnd(draw);
487: VecRestoreArrayRead(xin,&xarray);
489: PetscDrawFlush(draw);
490: PetscDrawPause(draw);
491: PetscDrawSave(draw);
492: return 0;
493: }
495: #if defined(PETSC_HAVE_MATLAB_ENGINE)
496: PetscErrorCode VecView_MPI_Matlab(Vec xin,PetscViewer viewer)
497: {
498: PetscMPIInt rank,size,*lens;
499: PetscInt i,N = xin->map->N;
500: const PetscScalar *xarray;
501: PetscScalar *xx;
503: VecGetArrayRead(xin,&xarray);
504: MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
505: MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
506: if (rank == 0) {
507: PetscMalloc1(N,&xx);
508: PetscMalloc1(size,&lens);
509: for (i=0; i<size; i++) lens[i] = xin->map->range[i+1] - xin->map->range[i];
511: MPI_Gatherv((void*)xarray,xin->map->n,MPIU_SCALAR,xx,lens,xin->map->range,MPIU_SCALAR,0,PetscObjectComm((PetscObject)xin));
512: PetscFree(lens);
514: PetscObjectName((PetscObject)xin);
515: PetscViewerMatlabPutArray(viewer,N,1,xx,((PetscObject)xin)->name);
517: PetscFree(xx);
518: } else {
519: MPI_Gatherv((void*)xarray,xin->map->n,MPIU_SCALAR,0,0,0,MPIU_SCALAR,0,PetscObjectComm((PetscObject)xin));
520: }
521: VecRestoreArrayRead(xin,&xarray);
522: return 0;
523: }
524: #endif
526: #if defined(PETSC_HAVE_ADIOS)
527: #include <adios.h>
528: #include <adios_read.h>
529: #include <petsc/private/vieweradiosimpl.h>
530: #include <petsc/private/viewerimpl.h>
532: PetscErrorCode VecView_MPI_ADIOS(Vec xin, PetscViewer viewer)
533: {
534: PetscViewer_ADIOS *adios = (PetscViewer_ADIOS*)viewer->data;
535: const char *vecname;
536: int64_t id;
537: PetscInt n,N,rstart;
538: const PetscScalar *array;
539: char nglobalname[16],nlocalname[16],coffset[16];
541: PetscObjectGetName((PetscObject) xin, &vecname);
543: VecGetLocalSize(xin,&n);
544: VecGetSize(xin,&N);
545: VecGetOwnershipRange(xin,&rstart,NULL);
547: sprintf(nlocalname,"%d",(int)n);
548: sprintf(nglobalname,"%d",(int)N);
549: sprintf(coffset,"%d",(int)rstart);
550: id = adios_define_var(Petsc_adios_group,vecname,"",adios_double,nlocalname,nglobalname,coffset);
551: VecGetArrayRead(xin,&array);
552: adios_write_byid(adios->adios_handle,id,array);
553: VecRestoreArrayRead(xin,&array);
554: return 0;
555: }
556: #endif
558: #if defined(PETSC_HAVE_HDF5)
559: PetscErrorCode VecView_MPI_HDF5(Vec xin, PetscViewer viewer)
560: {
561: PetscViewer_HDF5 *hdf5 = (PetscViewer_HDF5*) viewer->data;
562: /* TODO: It looks like we can remove the H5Sclose(filespace) and H5Dget_space(dset_id). Why do we do this? */
563: hid_t filespace; /* file dataspace identifier */
564: hid_t chunkspace; /* chunk dataset property identifier */
565: hid_t dset_id; /* dataset identifier */
566: hid_t memspace; /* memory dataspace identifier */
567: hid_t file_id;
568: hid_t group;
569: hid_t memscalartype; /* scalar type for mem (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
570: hid_t filescalartype; /* scalar type for file (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
571: PetscInt bs = PetscAbs(xin->map->bs);
572: hsize_t dim;
573: hsize_t maxDims[4], dims[4], chunkDims[4], count[4], offset[4];
574: PetscBool timestepping, dim2, spoutput;
575: PetscInt timestep=PETSC_MIN_INT, low;
576: hsize_t chunksize;
577: const PetscScalar *x;
578: const char *vecname;
580: PetscViewerHDF5OpenGroup(viewer, &file_id, &group);
581: PetscViewerHDF5IsTimestepping(viewer, ×tepping);
582: if (timestepping) {
583: PetscViewerHDF5GetTimestep(viewer, ×tep);
584: }
585: PetscViewerHDF5GetBaseDimension2(viewer,&dim2);
586: PetscViewerHDF5GetSPOutput(viewer,&spoutput);
588: /* Create the dataspace for the dataset.
589: *
590: * dims - holds the current dimensions of the dataset
591: *
592: * maxDims - holds the maximum dimensions of the dataset (unlimited
593: * for the number of time steps with the current dimensions for the
594: * other dimensions; so only additional time steps can be added).
595: *
596: * chunkDims - holds the size of a single time step (required to
597: * permit extending dataset).
598: */
599: dim = 0;
600: chunksize = 1;
601: if (timestep >= 0) {
602: dims[dim] = timestep+1;
603: maxDims[dim] = H5S_UNLIMITED;
604: chunkDims[dim] = 1;
605: ++dim;
606: }
607: PetscHDF5IntCast(xin->map->N/bs,dims + dim);
609: maxDims[dim] = dims[dim];
610: chunkDims[dim] = PetscMax(1, dims[dim]);
611: chunksize *= chunkDims[dim];
612: ++dim;
613: if (bs > 1 || dim2) {
614: dims[dim] = bs;
615: maxDims[dim] = dims[dim];
616: chunkDims[dim] = PetscMax(1, dims[dim]);
617: chunksize *= chunkDims[dim];
618: ++dim;
619: }
620: #if defined(PETSC_USE_COMPLEX)
621: dims[dim] = 2;
622: maxDims[dim] = dims[dim];
623: chunkDims[dim] = PetscMax(1, dims[dim]);
624: chunksize *= chunkDims[dim];
625: /* hdf5 chunks must be less than 4GB */
626: if (chunksize > PETSC_HDF5_MAX_CHUNKSIZE/64) {
627: if (bs > 1 || dim2) {
628: if (chunkDims[dim-2] > (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_MAX_CHUNKSIZE/128))) {
629: chunkDims[dim-2] = (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_MAX_CHUNKSIZE/128));
630: } if (chunkDims[dim-1] > (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_MAX_CHUNKSIZE/128))) {
631: chunkDims[dim-1] = (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_MAX_CHUNKSIZE/128));
632: }
633: } else {
634: chunkDims[dim-1] = PETSC_HDF5_MAX_CHUNKSIZE/128;
635: }
636: }
637: ++dim;
638: #else
639: /* hdf5 chunks must be less than 4GB */
640: if (chunksize > PETSC_HDF5_MAX_CHUNKSIZE/64) {
641: if (bs > 1 || dim2) {
642: if (chunkDims[dim-2] > (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_MAX_CHUNKSIZE/64))) {
643: chunkDims[dim-2] = (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_MAX_CHUNKSIZE/64));
644: } if (chunkDims[dim-1] > (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_MAX_CHUNKSIZE/64))) {
645: chunkDims[dim-1] = (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_MAX_CHUNKSIZE/64));
646: }
647: } else {
648: chunkDims[dim-1] = PETSC_HDF5_MAX_CHUNKSIZE/64;
649: }
650: }
651: #endif
653: PetscStackCallHDF5Return(filespace,H5Screate_simple,(dim, dims, maxDims));
655: #if defined(PETSC_USE_REAL_SINGLE)
656: memscalartype = H5T_NATIVE_FLOAT;
657: filescalartype = H5T_NATIVE_FLOAT;
658: #elif defined(PETSC_USE_REAL___FLOAT128)
659: #error "HDF5 output with 128 bit floats not supported."
660: #elif defined(PETSC_USE_REAL___FP16)
661: #error "HDF5 output with 16 bit floats not supported."
662: #else
663: memscalartype = H5T_NATIVE_DOUBLE;
664: if (spoutput == PETSC_TRUE) filescalartype = H5T_NATIVE_FLOAT;
665: else filescalartype = H5T_NATIVE_DOUBLE;
666: #endif
668: /* Create the dataset with default properties and close filespace */
669: PetscObjectGetName((PetscObject) xin, &vecname);
670: if (H5Lexists(group, vecname, H5P_DEFAULT) < 1) {
671: /* Create chunk */
672: PetscStackCallHDF5Return(chunkspace,H5Pcreate,(H5P_DATASET_CREATE));
673: PetscStackCallHDF5(H5Pset_chunk,(chunkspace, dim, chunkDims));
675: PetscStackCallHDF5Return(dset_id,H5Dcreate2,(group, vecname, filescalartype, filespace, H5P_DEFAULT, chunkspace, H5P_DEFAULT));
676: PetscStackCallHDF5(H5Pclose,(chunkspace));
677: } else {
678: PetscStackCallHDF5Return(dset_id,H5Dopen2,(group, vecname, H5P_DEFAULT));
679: PetscStackCallHDF5(H5Dset_extent,(dset_id, dims));
680: }
681: PetscStackCallHDF5(H5Sclose,(filespace));
683: /* Each process defines a dataset and writes it to the hyperslab in the file */
684: dim = 0;
685: if (timestep >= 0) {
686: count[dim] = 1;
687: ++dim;
688: }
689: PetscHDF5IntCast(xin->map->n/bs,count + dim);
690: ++dim;
691: if (bs > 1 || dim2) {
692: count[dim] = bs;
693: ++dim;
694: }
695: #if defined(PETSC_USE_COMPLEX)
696: count[dim] = 2;
697: ++dim;
698: #endif
699: if (xin->map->n > 0 || H5_VERSION_GE(1,10,0)) {
700: PetscStackCallHDF5Return(memspace,H5Screate_simple,(dim, count, NULL));
701: } else {
702: /* Can't create dataspace with zero for any dimension, so create null dataspace. */
703: PetscStackCallHDF5Return(memspace,H5Screate,(H5S_NULL));
704: }
706: /* Select hyperslab in the file */
707: VecGetOwnershipRange(xin, &low, NULL);
708: dim = 0;
709: if (timestep >= 0) {
710: offset[dim] = timestep;
711: ++dim;
712: }
713: PetscHDF5IntCast(low/bs,offset + dim);
714: ++dim;
715: if (bs > 1 || dim2) {
716: offset[dim] = 0;
717: ++dim;
718: }
719: #if defined(PETSC_USE_COMPLEX)
720: offset[dim] = 0;
721: ++dim;
722: #endif
723: if (xin->map->n > 0 || H5_VERSION_GE(1,10,0)) {
724: PetscStackCallHDF5Return(filespace,H5Dget_space,(dset_id));
725: PetscStackCallHDF5(H5Sselect_hyperslab,(filespace, H5S_SELECT_SET, offset, NULL, count, NULL));
726: } else {
727: /* Create null filespace to match null memspace. */
728: PetscStackCallHDF5Return(filespace,H5Screate,(H5S_NULL));
729: }
731: VecGetArrayRead(xin, &x);
732: PetscStackCallHDF5(H5Dwrite,(dset_id, memscalartype, memspace, filespace, hdf5->dxpl_id, x));
733: PetscStackCallHDF5(H5Fflush,(file_id, H5F_SCOPE_GLOBAL));
734: VecRestoreArrayRead(xin, &x);
736: /* Close/release resources */
737: PetscStackCallHDF5(H5Gclose,(group));
738: PetscStackCallHDF5(H5Sclose,(filespace));
739: PetscStackCallHDF5(H5Sclose,(memspace));
740: PetscStackCallHDF5(H5Dclose,(dset_id));
742: #if defined(PETSC_USE_COMPLEX)
743: {
744: PetscBool tru = PETSC_TRUE;
745: PetscViewerHDF5WriteObjectAttribute(viewer,(PetscObject)xin,"complex",PETSC_BOOL,&tru);
746: }
747: #endif
748: if (timestepping) {
749: PetscViewerHDF5WriteObjectAttribute(viewer,(PetscObject)xin,"timestepping",PETSC_BOOL,×tepping);
750: }
751: PetscInfo(xin,"Wrote Vec object with name %s\n",vecname);
752: return 0;
753: }
754: #endif
756: PETSC_EXTERN PetscErrorCode VecView_MPI(Vec xin,PetscViewer viewer)
757: {
758: PetscBool iascii,isbinary,isdraw;
759: #if defined(PETSC_HAVE_MATHEMATICA)
760: PetscBool ismathematica;
761: #endif
762: #if defined(PETSC_HAVE_HDF5)
763: PetscBool ishdf5;
764: #endif
765: #if defined(PETSC_HAVE_MATLAB_ENGINE)
766: PetscBool ismatlab;
767: #endif
768: #if defined(PETSC_HAVE_ADIOS)
769: PetscBool isadios;
770: #endif
771: PetscBool isglvis;
773: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
774: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
775: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);
776: #if defined(PETSC_HAVE_MATHEMATICA)
777: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERMATHEMATICA,&ismathematica);
778: #endif
779: #if defined(PETSC_HAVE_HDF5)
780: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);
781: #endif
782: #if defined(PETSC_HAVE_MATLAB_ENGINE)
783: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERMATLAB,&ismatlab);
784: #endif
785: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERGLVIS,&isglvis);
786: #if defined(PETSC_HAVE_ADIOS)
787: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERADIOS,&isadios);
788: #endif
789: if (iascii) {
790: VecView_MPI_ASCII(xin,viewer);
791: } else if (isbinary) {
792: VecView_MPI_Binary(xin,viewer);
793: } else if (isdraw) {
794: PetscViewerFormat format;
795: PetscViewerGetFormat(viewer,&format);
796: if (format == PETSC_VIEWER_DRAW_LG) {
797: VecView_MPI_Draw_LG(xin,viewer);
798: } else {
799: VecView_MPI_Draw(xin,viewer);
800: }
801: #if defined(PETSC_HAVE_MATHEMATICA)
802: } else if (ismathematica) {
803: PetscViewerMathematicaPutVector(viewer,xin);
804: #endif
805: #if defined(PETSC_HAVE_HDF5)
806: } else if (ishdf5) {
807: VecView_MPI_HDF5(xin,viewer);
808: #endif
809: #if defined(PETSC_HAVE_ADIOS)
810: } else if (isadios) {
811: VecView_MPI_ADIOS(xin,viewer);
812: #endif
813: #if defined(PETSC_HAVE_MATLAB_ENGINE)
814: } else if (ismatlab) {
815: VecView_MPI_Matlab(xin,viewer);
816: #endif
817: } else if (isglvis) {
818: VecView_GLVis(xin,viewer);
819: }
820: return 0;
821: }
823: PetscErrorCode VecGetSize_MPI(Vec xin,PetscInt *N)
824: {
825: *N = xin->map->N;
826: return 0;
827: }
829: PetscErrorCode VecGetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],PetscScalar y[])
830: {
831: const PetscScalar *xx;
832: PetscInt i,tmp,start = xin->map->range[xin->stash.rank];
834: VecGetArrayRead(xin,&xx);
835: for (i=0; i<ni; i++) {
836: if (xin->stash.ignorenegidx && ix[i] < 0) continue;
837: tmp = ix[i] - start;
839: y[i] = xx[tmp];
840: }
841: VecRestoreArrayRead(xin,&xx);
842: return 0;
843: }
845: PetscErrorCode VecSetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar y[],InsertMode addv)
846: {
847: PetscMPIInt rank = xin->stash.rank;
848: PetscInt *owners = xin->map->range,start = owners[rank];
849: PetscInt end = owners[rank+1],i,row;
850: PetscScalar *xx;
852: if (PetscDefined(USE_DEBUG)) {
855: }
856: VecGetArray(xin,&xx);
857: xin->stash.insertmode = addv;
859: if (addv == INSERT_VALUES) {
860: for (i=0; i<ni; i++) {
861: if (xin->stash.ignorenegidx && ix[i] < 0) continue;
863: if ((row = ix[i]) >= start && row < end) {
864: xx[row-start] = y[i];
865: } else if (!xin->stash.donotstash) {
867: VecStashValue_Private(&xin->stash,row,y[i]);
868: }
869: }
870: } else {
871: for (i=0; i<ni; i++) {
872: if (xin->stash.ignorenegidx && ix[i] < 0) continue;
874: if ((row = ix[i]) >= start && row < end) {
875: xx[row-start] += y[i];
876: } else if (!xin->stash.donotstash) {
878: VecStashValue_Private(&xin->stash,row,y[i]);
879: }
880: }
881: }
882: VecRestoreArray(xin,&xx);
883: return 0;
884: }
886: PetscErrorCode VecSetValuesBlocked_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar yin[],InsertMode addv)
887: {
888: PetscMPIInt rank = xin->stash.rank;
889: PetscInt *owners = xin->map->range,start = owners[rank];
890: PetscInt end = owners[rank+1],i,row,bs = PetscAbs(xin->map->bs),j;
891: PetscScalar *xx,*y = (PetscScalar*)yin;
893: VecGetArray(xin,&xx);
894: if (PetscDefined(USE_DEBUG)) {
897: }
898: xin->stash.insertmode = addv;
900: if (addv == INSERT_VALUES) {
901: for (i=0; i<ni; i++) {
902: if ((row = bs*ix[i]) >= start && row < end) {
903: for (j=0; j<bs; j++) xx[row-start+j] = y[j];
904: } else if (!xin->stash.donotstash) {
905: if (ix[i] < 0) { y += bs; continue; }
907: VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
908: }
909: y += bs;
910: }
911: } else {
912: for (i=0; i<ni; i++) {
913: if ((row = bs*ix[i]) >= start && row < end) {
914: for (j=0; j<bs; j++) xx[row-start+j] += y[j];
915: } else if (!xin->stash.donotstash) {
916: if (ix[i] < 0) { y += bs; continue; }
918: VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
919: }
920: y += bs;
921: }
922: }
923: VecRestoreArray(xin,&xx);
924: return 0;
925: }
927: /*
928: Since nsends or nreceives may be zero we add 1 in certain mallocs
929: to make sure we never malloc an empty one.
930: */
931: PetscErrorCode VecAssemblyBegin_MPI(Vec xin)
932: {
933: PetscInt *owners = xin->map->range,*bowners,i,bs,nstash,reallocs;
934: PetscMPIInt size;
935: InsertMode addv;
936: MPI_Comm comm;
938: PetscObjectGetComm((PetscObject)xin,&comm);
939: if (xin->stash.donotstash) return 0;
941: MPIU_Allreduce((PetscEnum*)&xin->stash.insertmode,(PetscEnum*)&addv,1,MPIU_ENUM,MPI_BOR,comm);
943: xin->stash.insertmode = addv; /* in case this processor had no cache */
944: xin->bstash.insertmode = addv; /* Block stash implicitly tracks InsertMode of scalar stash */
946: VecGetBlockSize(xin,&bs);
947: MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
948: if (!xin->bstash.bowners && xin->map->bs != -1) {
949: PetscMalloc1(size+1,&bowners);
950: for (i=0; i<size+1; i++) bowners[i] = owners[i]/bs;
951: xin->bstash.bowners = bowners;
952: } else bowners = xin->bstash.bowners;
954: VecStashScatterBegin_Private(&xin->stash,owners);
955: VecStashScatterBegin_Private(&xin->bstash,bowners);
956: VecStashGetInfo_Private(&xin->stash,&nstash,&reallocs);
957: PetscInfo(xin,"Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n",nstash,reallocs);
958: VecStashGetInfo_Private(&xin->bstash,&nstash,&reallocs);
959: PetscInfo(xin,"Block-Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n",nstash,reallocs);
960: return 0;
961: }
963: PetscErrorCode VecAssemblyEnd_MPI(Vec vec)
964: {
965: PetscInt base,i,j,*row,flg,bs;
966: PetscMPIInt n;
967: PetscScalar *val,*vv,*array,*xarray;
969: if (!vec->stash.donotstash) {
970: VecGetArray(vec,&xarray);
971: VecGetBlockSize(vec,&bs);
972: base = vec->map->range[vec->stash.rank];
974: /* Process the stash */
975: while (1) {
976: VecStashScatterGetMesg_Private(&vec->stash,&n,&row,&val,&flg);
977: if (!flg) break;
978: if (vec->stash.insertmode == ADD_VALUES) {
979: for (i=0; i<n; i++) xarray[row[i] - base] += val[i];
980: } else if (vec->stash.insertmode == INSERT_VALUES) {
981: for (i=0; i<n; i++) xarray[row[i] - base] = val[i];
982: } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
983: }
984: VecStashScatterEnd_Private(&vec->stash);
986: /* now process the block-stash */
987: while (1) {
988: VecStashScatterGetMesg_Private(&vec->bstash,&n,&row,&val,&flg);
989: if (!flg) break;
990: for (i=0; i<n; i++) {
991: array = xarray+row[i]*bs-base;
992: vv = val+i*bs;
993: if (vec->stash.insertmode == ADD_VALUES) {
994: for (j=0; j<bs; j++) array[j] += vv[j];
995: } else if (vec->stash.insertmode == INSERT_VALUES) {
996: for (j=0; j<bs; j++) array[j] = vv[j];
997: } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
998: }
999: }
1000: VecStashScatterEnd_Private(&vec->bstash);
1001: VecRestoreArray(vec,&xarray);
1002: }
1003: vec->stash.insertmode = NOT_SET_VALUES;
1004: return 0;
1005: }