Actual source code: veckok.kokkos.cxx

  1: /*
  2:    Implements the sequential Kokkos vectors.
  3: */
  4: #include <petsc_kokkos.hpp>
  5: #include <petscvec_kokkos.hpp>

  7: #include <petsc/private/sfimpl.h>
  8: #include <petsc/private/petscimpl.h>
  9: #include <petscmath.h>
 10: #include <petscviewer.h>
 11: #include <KokkosBlas.hpp>
 12: #include <Kokkos_Functional.hpp>

 14: #include <petscerror.h>
 15: #include <../src/vec/vec/impls/dvecimpl.h>
 16: #include <../src/vec/vec/impls/seq/kokkos/veckokkosimpl.hpp>

 18: // Sync a Kokkos::DualView to  in execution space 
 19: // If  is HostSpace, fence the exec so that the data on host is immediately available.
 20: template <class MemorySpace, typename Type>
 21: static PetscErrorCode KokkosDualViewSync(Kokkos::DualView<Type *> &v_dual, const Kokkos::DefaultExecutionSpace &exec)
 22: {
 23:   size_t bytes = v_dual.extent(0) * sizeof(Type);

 25:   PetscFunctionBegin;
 26:   PetscCall(PetscLogGpuTimeBegin());
 27:   if (std::is_same_v<MemorySpace, Kokkos::HostSpace>) {
 28:     if (v_dual.need_sync_host()) {
 29:       PetscCallCXX(v_dual.sync_host(exec));
 30:       PetscCallCXX(exec.fence()); // make sure one can access the host copy immediately
 31:       PetscCall(PetscLogGpuToCpu(bytes));
 32:     }
 33:   } else {
 34:     if (v_dual.need_sync_device()) {
 35:       PetscCallCXX(v_dual.sync_device(exec));
 36:       PetscCall(PetscLogCpuToGpu(bytes));
 37:     }
 38:   }
 39:   PetscCall(PetscLogGpuTimeEnd());
 40:   PetscFunctionReturn(PETSC_SUCCESS);
 41: }

 43: template <class MemorySpace>
 44: static PetscErrorCode VecGetKokkosView_Private(Vec v, PetscScalarKokkosViewType<MemorySpace> *kv, PetscBool overwrite)
 45: {
 46:   Vec_Kokkos *veckok = static_cast<Vec_Kokkos *>(v->spptr);

 48:   PetscFunctionBegin;
 49:   VecErrorIfNotKokkos(v);
 50:   if (!overwrite) { /* If overwrite=true, no need to sync the space, since caller will overwrite the data */
 51:     PetscCall(KokkosDualViewSync<MemorySpace>(veckok->v_dual, PetscGetKokkosExecutionSpace()));
 52:   }
 53:   *kv = veckok->v_dual.view<MemorySpace>();
 54:   PetscFunctionReturn(PETSC_SUCCESS);
 55: }

 57: template <class MemorySpace>
 58: static PetscErrorCode VecRestoreKokkosView_Private(Vec v, PetscScalarKokkosViewType<MemorySpace> *kv, PetscBool overwrite)
 59: {
 60:   Vec_Kokkos *veckok = static_cast<Vec_Kokkos *>(v->spptr);

 62:   PetscFunctionBegin;
 63:   VecErrorIfNotKokkos(v);
 64:   if (overwrite) veckok->v_dual.clear_sync_state(); /* If overwrite=true, clear the old sync state since user forced an overwrite */
 65:   veckok->v_dual.modify<MemorySpace>();
 66:   PetscCall(PetscObjectStateIncrease((PetscObject)v));
 67:   PetscFunctionReturn(PETSC_SUCCESS);
 68: }

 70: template <class MemorySpace>
 71: PetscErrorCode VecGetKokkosView(Vec v, ConstPetscScalarKokkosViewType<MemorySpace> *kv)
 72: {
 73:   Vec_Kokkos *veckok = static_cast<Vec_Kokkos *>(v->spptr);

 75:   PetscFunctionBegin;
 76:   VecErrorIfNotKokkos(v);
 77:   PetscCall(KokkosDualViewSync<MemorySpace>(veckok->v_dual, PetscGetKokkosExecutionSpace()));
 78:   *kv = veckok->v_dual.view<MemorySpace>();
 79:   PetscFunctionReturn(PETSC_SUCCESS);
 80: }

 82: /* Function template explicit instantiation */
 83: template PETSC_VISIBILITY_PUBLIC PetscErrorCode VecGetKokkosView(Vec, ConstPetscScalarKokkosView *);
 84: template <>
 85: PETSC_VISIBILITY_PUBLIC PetscErrorCode VecGetKokkosView(Vec v, PetscScalarKokkosView *kv)
 86: {
 87:   return VecGetKokkosView_Private(v, kv, PETSC_FALSE);
 88: }
 89: template <>
 90: PETSC_VISIBILITY_PUBLIC PetscErrorCode VecRestoreKokkosView(Vec v, PetscScalarKokkosView *kv)
 91: {
 92:   return VecRestoreKokkosView_Private(v, kv, PETSC_FALSE);
 93: }
 94: template <>
 95: PETSC_VISIBILITY_PUBLIC PetscErrorCode VecGetKokkosViewWrite(Vec v, PetscScalarKokkosView *kv)
 96: {
 97:   return VecGetKokkosView_Private(v, kv, PETSC_TRUE);
 98: }
 99: template <>
100: PETSC_VISIBILITY_PUBLIC PetscErrorCode VecRestoreKokkosViewWrite(Vec v, PetscScalarKokkosView *kv)
101: {
102:   return VecRestoreKokkosView_Private(v, kv, PETSC_TRUE);
103: }

105: #if !defined(KOKKOS_ENABLE_DEFAULT_DEVICE_TYPE_HOST) /* Get host views if the default memory space is not host space */
106: template PETSC_VISIBILITY_PUBLIC PetscErrorCode VecGetKokkosView(Vec, ConstPetscScalarKokkosViewHost *);
107: template <>
108: PETSC_VISIBILITY_PUBLIC PetscErrorCode VecGetKokkosView(Vec v, PetscScalarKokkosViewHost *kv)
109: {
110:   return VecGetKokkosView_Private(v, kv, PETSC_FALSE);
111: }
112: template <>
113: PETSC_VISIBILITY_PUBLIC PetscErrorCode VecRestoreKokkosView(Vec v, PetscScalarKokkosViewHost *kv)
114: {
115:   return VecRestoreKokkosView_Private(v, kv, PETSC_FALSE);
116: }
117: template <>
118: PETSC_VISIBILITY_PUBLIC PetscErrorCode VecGetKokkosViewWrite(Vec v, PetscScalarKokkosViewHost *kv)
119: {
120:   return VecGetKokkosView_Private(v, kv, PETSC_TRUE);
121: }
122: template <>
123: PETSC_VISIBILITY_PUBLIC PetscErrorCode VecRestoreKokkosViewWrite(Vec v, PetscScalarKokkosViewHost *kv)
124: {
125:   return VecRestoreKokkosView_Private(v, kv, PETSC_TRUE);
126: }
127: #endif

129: PetscErrorCode VecSetRandom_SeqKokkos(Vec xin, PetscRandom r)
130: {
131:   const PetscInt n = xin->map->n;
132:   PetscScalar   *xx;

134:   PetscFunctionBegin;
135:   PetscCall(VecGetArrayWrite(xin, &xx)); /* TODO: generate randoms directly on device */
136:   for (PetscInt i = 0; i < n; i++) PetscCall(PetscRandomGetValue(r, &xx[i]));
137:   PetscCall(VecRestoreArrayWrite(xin, &xx));
138:   PetscFunctionReturn(PETSC_SUCCESS);
139: }

141: /* x = |x| */
142: PetscErrorCode VecAbs_SeqKokkos(Vec xin)
143: {
144:   PetscScalarKokkosView xv;
145:   auto                 &exec = PetscGetKokkosExecutionSpace();

147:   PetscFunctionBegin;
148:   PetscCall(PetscLogGpuTimeBegin());
149:   PetscCall(VecGetKokkosView(xin, &xv));
150:   PetscCallCXX(KokkosBlas::abs(exec, xv, xv));
151:   PetscCall(VecRestoreKokkosView(xin, &xv));
152:   PetscCall(PetscLogGpuTimeEnd());
153:   PetscFunctionReturn(PETSC_SUCCESS);
154: }

156: /* x = 1/x */
157: PetscErrorCode VecReciprocal_SeqKokkos(Vec xin)
158: {
159:   PetscScalarKokkosView xv;

161:   PetscFunctionBegin;
162:   PetscCall(PetscLogGpuTimeBegin());
163:   PetscCall(VecGetKokkosView(xin, &xv));
164:   PetscCallCXX(Kokkos::parallel_for(
165:     "VecReciprocal", Kokkos::RangePolicy<>(PetscGetKokkosExecutionSpace(), 0, xin->map->n), KOKKOS_LAMBDA(const PetscInt &i) {
166:       if (xv(i) != (PetscScalar)0.0) xv(i) = (PetscScalar)1.0 / xv(i);
167:     }));
168:   PetscCall(VecRestoreKokkosView(xin, &xv));
169:   PetscCall(PetscLogGpuTimeEnd());
170:   PetscFunctionReturn(PETSC_SUCCESS);
171: }

173: PetscErrorCode VecMin_SeqKokkos(Vec xin, PetscInt *p, PetscReal *val)
174: {
175:   ConstPetscScalarKokkosView                      xv;
176:   Kokkos::MinLoc<PetscReal, PetscInt>::value_type result;

178:   PetscFunctionBegin;
179:   PetscCall(PetscLogGpuTimeBegin());
180:   PetscCall(VecGetKokkosView(xin, &xv));
181:   PetscCallCXX(Kokkos::parallel_reduce(
182:     "VecMin", Kokkos::RangePolicy<>(PetscGetKokkosExecutionSpace(), 0, xin->map->n),
183:     KOKKOS_LAMBDA(const PetscInt &i, Kokkos::MinLoc<PetscReal, PetscInt>::value_type &lupdate) {
184:       if (PetscRealPart(xv(i)) < lupdate.val) {
185:         lupdate.val = PetscRealPart(xv(i));
186:         lupdate.loc = i;
187:       }
188:     },
189:     Kokkos::MinLoc<PetscReal, PetscInt>(result)));
190:   *val = result.val;
191:   if (p) *p = result.loc;
192:   PetscCall(VecRestoreKokkosView(xin, &xv));
193:   PetscCall(PetscLogGpuTimeEnd());
194:   PetscFunctionReturn(PETSC_SUCCESS);
195: }

197: PetscErrorCode VecMax_SeqKokkos(Vec xin, PetscInt *p, PetscReal *val)
198: {
199:   ConstPetscScalarKokkosView                      xv;
200:   Kokkos::MaxLoc<PetscReal, PetscInt>::value_type result;

202:   PetscFunctionBegin;
203:   PetscCall(PetscLogGpuTimeBegin());
204:   PetscCall(VecGetKokkosView(xin, &xv));
205:   PetscCallCXX(Kokkos::parallel_reduce(
206:     "VecMax", Kokkos::RangePolicy<>(PetscGetKokkosExecutionSpace(), 0, xin->map->n),
207:     KOKKOS_LAMBDA(const PetscInt &i, Kokkos::MaxLoc<PetscReal, PetscInt>::value_type &lupdate) {
208:       if (PetscRealPart(xv(i)) > lupdate.val) {
209:         lupdate.val = PetscRealPart(xv(i));
210:         lupdate.loc = i;
211:       }
212:     },
213:     Kokkos::MaxLoc<PetscReal, PetscInt>(result)));
214:   *val = result.val;
215:   if (p) *p = result.loc;
216:   PetscCall(VecRestoreKokkosView(xin, &xv));
217:   PetscCall(PetscLogGpuTimeEnd());
218:   PetscFunctionReturn(PETSC_SUCCESS);
219: }

221: PetscErrorCode VecSum_SeqKokkos(Vec xin, PetscScalar *sum)
222: {
223:   ConstPetscScalarKokkosView xv;

225:   PetscFunctionBegin;
226:   PetscCall(PetscLogGpuTimeBegin());
227:   PetscCall(VecGetKokkosView(xin, &xv));
228:   PetscCallCXX(*sum = KokkosBlas::sum(PetscGetKokkosExecutionSpace(), xv));
229:   PetscCall(VecRestoreKokkosView(xin, &xv));
230:   PetscCall(PetscLogGpuTimeEnd());
231:   PetscFunctionReturn(PETSC_SUCCESS);
232: }

234: PetscErrorCode VecShift_SeqKokkos(Vec xin, PetscScalar shift)
235: {
236:   PetscScalarKokkosView xv;

238:   PetscFunctionBegin;
239:   PetscCall(PetscLogGpuTimeBegin());
240:   PetscCall(VecGetKokkosView(xin, &xv));
241:   PetscCallCXX(Kokkos::parallel_for("VecShift", Kokkos::RangePolicy<>(PetscGetKokkosExecutionSpace(), 0, xin->map->n), KOKKOS_LAMBDA(const PetscInt &i) { xv(i) += shift; }); PetscCall(VecRestoreKokkosView(xin, &xv)));
242:   PetscCall(PetscLogGpuTimeEnd());
243:   PetscCall(PetscLogGpuFlops(xin->map->n));
244:   PetscFunctionReturn(PETSC_SUCCESS);
245: }

247: /* y = alpha x + y */
248: PetscErrorCode VecAXPY_SeqKokkos(Vec yin, PetscScalar alpha, Vec xin)
249: {
250:   PetscFunctionBegin;
251:   if (alpha == (PetscScalar)0.0) PetscFunctionReturn(PETSC_SUCCESS);
252:   if (yin == xin) {
253:     PetscCall(VecScale_SeqKokkos(yin, alpha + 1));
254:   } else {
255:     PetscBool xiskok, yiskok;

257:     PetscCall(PetscObjectTypeCompareAny((PetscObject)xin, &xiskok, VECSEQKOKKOS, VECMPIKOKKOS, ""));
258:     PetscCall(PetscObjectTypeCompareAny((PetscObject)yin, &yiskok, VECSEQKOKKOS, VECMPIKOKKOS, ""));
259:     if (xiskok && yiskok) {
260:       PetscScalarKokkosView      yv;
261:       ConstPetscScalarKokkosView xv;

263:       PetscCall(PetscLogGpuTimeBegin());
264:       PetscCall(VecGetKokkosView(xin, &xv));
265:       PetscCall(VecGetKokkosView(yin, &yv));
266:       PetscCallCXX(KokkosBlas::axpy(PetscGetKokkosExecutionSpace(), alpha, xv, yv));
267:       PetscCall(VecRestoreKokkosView(xin, &xv));
268:       PetscCall(VecRestoreKokkosView(yin, &yv));
269:       PetscCall(PetscLogGpuTimeEnd());
270:       PetscCall(PetscLogGpuFlops(2.0 * yin->map->n));
271:     } else {
272:       PetscCall(VecAXPY_Seq(yin, alpha, xin));
273:     }
274:   }
275:   PetscFunctionReturn(PETSC_SUCCESS);
276: }

278: /* y = x + beta y */
279: PetscErrorCode VecAYPX_SeqKokkos(Vec yin, PetscScalar beta, Vec xin)
280: {
281:   PetscFunctionBegin;
282:   /* One needs to define KOKKOSBLAS_OPTIMIZATION_LEVEL_AXPBY > 2 to have optimizations for cases alpha/beta = 0,+/-1 */
283:   PetscCall(VecAXPBY_SeqKokkos(yin, 1.0, beta, xin));
284:   PetscFunctionReturn(PETSC_SUCCESS);
285: }

287: /* z = y^T x */
288: PetscErrorCode VecTDot_SeqKokkos(Vec xin, Vec yin, PetscScalar *z)
289: {
290:   ConstPetscScalarKokkosView xv, yv;

292:   PetscFunctionBegin;
293:   PetscCall(PetscLogGpuTimeBegin());
294:   PetscCall(VecGetKokkosView(xin, &xv));
295:   PetscCall(VecGetKokkosView(yin, &yv));
296:   // Kokkos always overwrites z, so no need to init it
297:   PetscCallCXX(Kokkos::parallel_reduce("VecTDot", Kokkos::RangePolicy<>(PetscGetKokkosExecutionSpace(), 0, xin->map->n), KOKKOS_LAMBDA(const PetscInt &i, PetscScalar &update) { update += yv(i) * xv(i); }, *z));
298:   PetscCall(VecRestoreKokkosView(yin, &yv));
299:   PetscCall(VecRestoreKokkosView(xin, &xv));
300:   PetscCall(PetscLogGpuTimeEnd());
301:   if (xin->map->n > 0) PetscCall(PetscLogGpuFlops(2.0 * xin->map->n));
302:   PetscFunctionReturn(PETSC_SUCCESS);
303: }

305: struct TransposeDotTag { };
306: struct ConjugateDotTag { };

308: template <PetscInt ValueCount>
309: struct MDotFunctor {
310:   static_assert(ValueCount >= 1 && ValueCount <= 8, "ValueCount must be in [1, 8]");
311:   /* Note the C++ notation for an array typedef */
312:   // noted, thanks
313:   typedef PetscScalar                           value_type[];
314:   typedef ConstPetscScalarKokkosView::size_type size_type;

316:   /* Tell Kokkos the result array's number of entries. This must be a public value in the functor */
317:   static constexpr size_type value_count = ValueCount;
318:   ConstPetscScalarKokkosView xv, yv[8];

320:   MDotFunctor(ConstPetscScalarKokkosView &xv, ConstPetscScalarKokkosView &yv0, ConstPetscScalarKokkosView &yv1, ConstPetscScalarKokkosView &yv2, ConstPetscScalarKokkosView &yv3, ConstPetscScalarKokkosView &yv4, ConstPetscScalarKokkosView &yv5, ConstPetscScalarKokkosView &yv6, ConstPetscScalarKokkosView &yv7) :
321:     xv(xv)
322:   {
323:     yv[0] = yv0;
324:     yv[1] = yv1;
325:     yv[2] = yv2;
326:     yv[3] = yv3;
327:     yv[4] = yv4;
328:     yv[5] = yv5;
329:     yv[6] = yv6;
330:     yv[7] = yv7;
331:   }

333:   KOKKOS_INLINE_FUNCTION void operator()(TransposeDotTag, const size_type i, value_type sum) const
334:   {
335:     PetscScalar xval = xv(i);
336:     for (size_type j = 0; j < value_count; ++j) sum[j] += yv[j](i) * xval;
337:   }

339:   KOKKOS_INLINE_FUNCTION void operator()(ConjugateDotTag, const size_type i, value_type sum) const
340:   {
341:     PetscScalar xval = xv(i);
342:     for (size_type j = 0; j < value_count; ++j) sum[j] += PetscConj(yv[j](i)) * xval;
343:   }

345:   // Per https://kokkos.github.io/kokkos-core-wiki/API/core/parallel-dispatch/parallel_reduce.html#requirements
346:   // "when specifying a tag in the policy, the functor's potential init/join/final member functions must also be tagged"
347:   // So we have this kind of duplicated code.
348:   KOKKOS_INLINE_FUNCTION void join(TransposeDotTag, value_type dst, const value_type src) const { join(dst, src); }
349:   KOKKOS_INLINE_FUNCTION void join(ConjugateDotTag, value_type dst, const value_type src) const { join(dst, src); }

351:   KOKKOS_INLINE_FUNCTION void init(TransposeDotTag, value_type sum) const { init(sum); }
352:   KOKKOS_INLINE_FUNCTION void init(ConjugateDotTag, value_type sum) const { init(sum); }

354:   KOKKOS_INLINE_FUNCTION void join(value_type dst, const value_type src) const
355:   {
356:     for (size_type j = 0; j < value_count; j++) dst[j] += src[j];
357:   }

359:   KOKKOS_INLINE_FUNCTION void init(value_type sum) const
360:   {
361:     for (size_type j = 0; j < value_count; j++) sum[j] = 0.0;
362:   }
363: };

365: template <class WorkTag>
366: PetscErrorCode VecMultiDot_Private(Vec xin, PetscInt nv, const Vec yin[], PetscScalar *z)
367: {
368:   PetscInt                   i, j, cur = 0, ngroup = nv / 8, rem = nv % 8, N = xin->map->n;
369:   ConstPetscScalarKokkosView xv, yv[8];
370:   PetscScalarKokkosViewHost  zv(z, nv);
371:   auto                      &exec = PetscGetKokkosExecutionSpace();

373:   PetscFunctionBegin;
374:   PetscCall(VecGetKokkosView(xin, &xv));
375:   for (i = 0; i < ngroup; i++) { /* 8 y's per group */
376:     for (j = 0; j < 8; j++) PetscCall(VecGetKokkosView(yin[cur + j], &yv[j]));
377:     MDotFunctor<8> mdot(xv, yv[0], yv[1], yv[2], yv[3], yv[4], yv[5], yv[6], yv[7]); /* Hope Kokkos make it asynchronous */
378:     PetscCall(PetscLogGpuTimeBegin());
379:     PetscCallCXX(Kokkos::parallel_reduce(Kokkos::RangePolicy<WorkTag>(exec, 0, N), mdot, Kokkos::subview(zv, Kokkos::pair<PetscInt, PetscInt>(cur, cur + 8))));
380:     PetscCall(PetscLogGpuTimeEnd());
381:     for (j = 0; j < 8; j++) PetscCall(VecRestoreKokkosView(yin[cur + j], &yv[j]));
382:     cur += 8;
383:   }

385:   if (rem) { /* The remaining */
386:     for (j = 0; j < rem; j++) PetscCall(VecGetKokkosView(yin[cur + j], &yv[j]));
387:     Kokkos::RangePolicy<WorkTag> policy(exec, 0, N);
388:     auto                         results = Kokkos::subview(zv, Kokkos::pair<PetscInt, PetscInt>(cur, cur + rem));
389:     // clang-format off
390:     PetscCall(PetscLogGpuTimeBegin());
391:     switch (rem) {
392:     case 1: PetscCallCXX(Kokkos::parallel_reduce(policy, MDotFunctor<1>(xv, yv[0], yv[1], yv[2], yv[3], yv[4], yv[5], yv[6], yv[7]), results)); break;
393:     case 2: PetscCallCXX(Kokkos::parallel_reduce(policy, MDotFunctor<2>(xv, yv[0], yv[1], yv[2], yv[3], yv[4], yv[5], yv[6], yv[7]), results)); break;
394:     case 3: PetscCallCXX(Kokkos::parallel_reduce(policy, MDotFunctor<3>(xv, yv[0], yv[1], yv[2], yv[3], yv[4], yv[5], yv[6], yv[7]), results)); break;
395:     case 4: PetscCallCXX(Kokkos::parallel_reduce(policy, MDotFunctor<4>(xv, yv[0], yv[1], yv[2], yv[3], yv[4], yv[5], yv[6], yv[7]), results)); break;
396:     case 5: PetscCallCXX(Kokkos::parallel_reduce(policy, MDotFunctor<5>(xv, yv[0], yv[1], yv[2], yv[3], yv[4], yv[5], yv[6], yv[7]), results)); break;
397:     case 6: PetscCallCXX(Kokkos::parallel_reduce(policy, MDotFunctor<6>(xv, yv[0], yv[1], yv[2], yv[3], yv[4], yv[5], yv[6], yv[7]), results)); break;
398:     case 7: PetscCallCXX(Kokkos::parallel_reduce(policy, MDotFunctor<7>(xv, yv[0], yv[1], yv[2], yv[3], yv[4], yv[5], yv[6], yv[7]), results)); break;
399:     }
400:     PetscCall(PetscLogGpuTimeEnd());
401:     // clang-format on
402:     for (j = 0; j < rem; j++) PetscCall(VecRestoreKokkosView(yin[cur + j], &yv[j]));
403:   }
404:   PetscCall(VecRestoreKokkosView(xin, &xv));
405:   exec.fence(); /* If reduce is async, then we need this fence to make sure z is ready for use on host */
406:   PetscFunctionReturn(PETSC_SUCCESS);
407: }

409: static PetscErrorCode VecMultiDot_Verbose(Vec xin, PetscInt nv, const Vec yin[], PetscScalar *z)
410: {
411:   PetscInt                   ngroup = nv / 8, rem = nv % 8, N = xin->map->n;
412:   ConstPetscScalarKokkosView xv, y0, y1, y2, y3, y4, y5, y6, y7;
413:   PetscScalar               *zp = z;
414:   const Vec                 *yp = yin;
415:   Kokkos::RangePolicy<>      policy(PetscGetKokkosExecutionSpace(), 0, N);

417:   // clang-format off
418:   PetscFunctionBegin;
419:   PetscCall(VecGetKokkosView(xin, &xv));
420:   for (PetscInt k = 0; k < ngroup; k++) { // 8 y's per group
421:     PetscCall(VecGetKokkosView(yp[0], &y0));
422:     PetscCall(VecGetKokkosView(yp[1], &y1));
423:     PetscCall(VecGetKokkosView(yp[2], &y2));
424:     PetscCall(VecGetKokkosView(yp[3], &y3));
425:     PetscCall(VecGetKokkosView(yp[4], &y4));
426:     PetscCall(VecGetKokkosView(yp[5], &y5));
427:     PetscCall(VecGetKokkosView(yp[6], &y6));
428:     PetscCall(VecGetKokkosView(yp[7], &y7));
429:     PetscCall(PetscLogGpuTimeBegin()); // only for GPU kernel execution
430:     Kokkos::parallel_reduce(
431:       "VecMDot8", policy,
432:       KOKKOS_LAMBDA(const PetscInt &i, PetscScalar &lsum0, PetscScalar &lsum1, PetscScalar &lsum2, PetscScalar &lsum3, PetscScalar &lsum4, PetscScalar &lsum5, PetscScalar &lsum6, PetscScalar &lsum7) {
433:         lsum0 += xv(i) * PetscConj(y0(i)); lsum1 += xv(i) * PetscConj(y1(i)); lsum2 += xv(i) * PetscConj(y2(i)); lsum3 += xv(i) * PetscConj(y3(i));
434:         lsum4 += xv(i) * PetscConj(y4(i)); lsum5 += xv(i) * PetscConj(y5(i)); lsum6 += xv(i) * PetscConj(y6(i)); lsum7 += xv(i) * PetscConj(y7(i));
435:       }, zp[0], zp[1], zp[2], zp[3], zp[4], zp[5], zp[6], zp[7]);
436:     PetscCall(PetscLogGpuTimeEnd());
437:     PetscCall(PetscLogGpuToCpu(8 * sizeof(PetscScalar))); // for copying to z[] on host
438:     PetscCall(VecRestoreKokkosView(yp[0], &y0));
439:     PetscCall(VecRestoreKokkosView(yp[1], &y1));
440:     PetscCall(VecRestoreKokkosView(yp[2], &y2));
441:     PetscCall(VecRestoreKokkosView(yp[3], &y3));
442:     PetscCall(VecRestoreKokkosView(yp[4], &y4));
443:     PetscCall(VecRestoreKokkosView(yp[5], &y5));
444:     PetscCall(VecRestoreKokkosView(yp[6], &y6));
445:     PetscCall(VecRestoreKokkosView(yp[7], &y7));
446:     yp += 8;
447:     zp += 8;
448:   }

450:   if (rem) { /* The remaining */
451:     if (rem > 0) PetscCall(VecGetKokkosView(yp[0], &y0));
452:     if (rem > 1) PetscCall(VecGetKokkosView(yp[1], &y1));
453:     if (rem > 2) PetscCall(VecGetKokkosView(yp[2], &y2));
454:     if (rem > 3) PetscCall(VecGetKokkosView(yp[3], &y3));
455:     if (rem > 4) PetscCall(VecGetKokkosView(yp[4], &y4));
456:     if (rem > 5) PetscCall(VecGetKokkosView(yp[5], &y5));
457:     if (rem > 6) PetscCall(VecGetKokkosView(yp[6], &y6));
458:     PetscCall(PetscLogGpuTimeBegin());
459:     switch (rem) {
460:     case 7:
461:       Kokkos::parallel_reduce(
462:         "VecMDot7", policy,
463:         KOKKOS_LAMBDA(const PetscInt &i, PetscScalar &lsum0, PetscScalar &lsum1, PetscScalar &lsum2, PetscScalar &lsum3, PetscScalar &lsum4, PetscScalar &lsum5, PetscScalar &lsum6) {
464:         lsum0 += xv(i) * PetscConj(y0(i)); lsum1 += xv(i) * PetscConj(y1(i)); lsum2 += xv(i) * PetscConj(y2(i)); lsum3 += xv(i) * PetscConj(y3(i));
465:         lsum4 += xv(i) * PetscConj(y4(i)); lsum5 += xv(i) * PetscConj(y5(i)); lsum6 += xv(i) * PetscConj(y6(i));
466:       }, zp[0], zp[1], zp[2], zp[3], zp[4], zp[5], zp[6]);
467:       break;
468:     case 6:
469:       Kokkos::parallel_reduce(
470:         "VecMDot6", policy,
471:         KOKKOS_LAMBDA(const PetscInt &i, PetscScalar &lsum0, PetscScalar &lsum1, PetscScalar &lsum2, PetscScalar &lsum3, PetscScalar &lsum4, PetscScalar &lsum5) {
472:         lsum0 += xv(i) * PetscConj(y0(i)); lsum1 += xv(i) * PetscConj(y1(i)); lsum2 += xv(i) * PetscConj(y2(i)); lsum3 += xv(i) * PetscConj(y3(i));
473:         lsum4 += xv(i) * PetscConj(y4(i)); lsum5 += xv(i) * PetscConj(y5(i));
474:       }, zp[0], zp[1], zp[2], zp[3], zp[4], zp[5]);
475:       break;
476:     case 5:
477:       Kokkos::parallel_reduce(
478:         "VecMDot5", policy,
479:         KOKKOS_LAMBDA(const PetscInt &i, PetscScalar &lsum0, PetscScalar &lsum1, PetscScalar &lsum2, PetscScalar &lsum3, PetscScalar &lsum4) {
480:         lsum0 += xv(i) * PetscConj(y0(i)); lsum1 += xv(i) * PetscConj(y1(i)); lsum2 += xv(i) * PetscConj(y2(i)); lsum3 += xv(i) * PetscConj(y3(i));
481:         lsum4 += xv(i) * PetscConj(y4(i));
482:       }, zp[0], zp[1], zp[2], zp[3], zp[4]);
483:       break;
484:     case 4:
485:       Kokkos::parallel_reduce(
486:         "VecMDot4", policy,
487:         KOKKOS_LAMBDA(const PetscInt &i, PetscScalar &lsum0, PetscScalar &lsum1, PetscScalar &lsum2, PetscScalar &lsum3) {
488:         lsum0 += xv(i) * PetscConj(y0(i)); lsum1 += xv(i) * PetscConj(y1(i)); lsum2 += xv(i) * PetscConj(y2(i)); lsum3 += xv(i) * PetscConj(y3(i));
489:       }, zp[0], zp[1], zp[2], zp[3]);
490:       break;
491:     case 3:
492:       Kokkos::parallel_reduce(
493:         "VecMDot3", policy,
494:         KOKKOS_LAMBDA(const PetscInt &i, PetscScalar &lsum0, PetscScalar &lsum1, PetscScalar &lsum2) {
495:         lsum0 += xv(i) * PetscConj(y0(i)); lsum1 += xv(i) * PetscConj(y1(i)); lsum2 += xv(i) * PetscConj(y2(i));
496:       }, zp[0], zp[1], zp[2]);
497:       break;
498:     case 2:
499:       Kokkos::parallel_reduce(
500:         "VecMDot2", policy,
501:         KOKKOS_LAMBDA(const PetscInt &i, PetscScalar &lsum0, PetscScalar &lsum1) {
502:         lsum0 += xv(i) * PetscConj(y0(i)); lsum1 += xv(i) * PetscConj(y1(i));
503:       }, zp[0], zp[1]);
504:       break;
505:     case 1:
506:       Kokkos::parallel_reduce(
507:         "VecMDot1", policy,
508:         KOKKOS_LAMBDA(const PetscInt &i, PetscScalar &lsum0) {
509:         lsum0 += xv(i) * PetscConj(y0(i));
510:       }, zp[0]);
511:       break;
512:     }
513:     PetscCall(PetscLogGpuTimeEnd());
514:     PetscCall(PetscLogGpuToCpu(rem * sizeof(PetscScalar))); // for copying to z[] on host
515:     if (rem > 0) PetscCall(VecRestoreKokkosView(yp[0], &y0));
516:     if (rem > 1) PetscCall(VecRestoreKokkosView(yp[1], &y1));
517:     if (rem > 2) PetscCall(VecRestoreKokkosView(yp[2], &y2));
518:     if (rem > 3) PetscCall(VecRestoreKokkosView(yp[3], &y3));
519:     if (rem > 4) PetscCall(VecRestoreKokkosView(yp[4], &y4));
520:     if (rem > 5) PetscCall(VecRestoreKokkosView(yp[5], &y5));
521:     if (rem > 6) PetscCall(VecRestoreKokkosView(yp[6], &y6));
522:   }
523:   PetscCall(VecRestoreKokkosView(xin, &xv));
524:   PetscFunctionReturn(PETSC_SUCCESS);
525:   // clang-format on
526: }

528: /* z[i] = (x,y_i) = y_i^H x */
529: PetscErrorCode VecMDot_SeqKokkos(Vec xin, PetscInt nv, const Vec yin[], PetscScalar *z)
530: {
531:   PetscFunctionBegin;
532:   // With no good reason, VecMultiDot_Private() performs much worse than VecMultiDot_Verbose() with HIP,
533:   // but they are on par with CUDA. Kokkos team is investigating this problem.
534: #if 0
535:   PetscCall(VecMultiDot_Private<ConjugateDotTag>(xin, nv, yin, z));
536: #else
537:   PetscCall(VecMultiDot_Verbose(xin, nv, yin, z));
538: #endif
539:   PetscCall(PetscLogGpuFlops(PetscMax(nv * (2.0 * xin->map->n - 1), 0.0)));
540:   PetscFunctionReturn(PETSC_SUCCESS);
541: }

543: /* z[i] = (x,y_i) = y_i^T x */
544: PetscErrorCode VecMTDot_SeqKokkos(Vec xin, PetscInt nv, const Vec yin[], PetscScalar *z)
545: {
546:   PetscFunctionBegin;
547:   PetscCall(VecMultiDot_Private<TransposeDotTag>(xin, nv, yin, z));
548:   PetscCall(PetscLogGpuFlops(PetscMax(nv * (2.0 * xin->map->n - 1), 0.0)));
549:   PetscFunctionReturn(PETSC_SUCCESS);
550: }

552: // z[i] = (x,y_i) = y_i^H x OR y_i^T x
553: static PetscErrorCode VecMultiDot_SeqKokkos_GEMV(PetscBool conjugate, Vec xin, PetscInt nv, const Vec yin[], PetscScalar *z_h)
554: {
555:   PetscInt                   i, j, nfail;
556:   ConstPetscScalarKokkosView xv, yfirst, ynext;
557:   const PetscScalar         *yarray;
558:   PetscBool                  stop  = PETSC_FALSE;
559:   PetscScalar               *z_d   = nullptr;
560:   const char                *trans = conjugate ? "C" : "T";
561:   PetscInt64                 lda   = 0;
562:   PetscInt                   m, n = xin->map->n;

564:   PetscFunctionBegin;
565:   PetscCall(VecGetKokkosView(xin, &xv));
566: #if defined(KOKKOS_ENABLE_DEFAULT_DEVICE_TYPE_HOST)
567:   z_d = z_h;
568: #endif
569:   i = nfail = 0;
570:   while (i < nv) {
571:     // search a sequence of vectors with a fixed stride
572:     stop = PETSC_FALSE;
573:     PetscCall(VecGetKokkosView(yin[i], &yfirst));
574:     yarray = yfirst.data();
575:     for (j = i + 1; j < nv; j++) {
576:       PetscCall(VecGetKokkosView(yin[j], &ynext));
577:       if (j == i + 1) {
578:         lda = ynext.data() - yarray;                       // arbitrary ptrdiff could be very large
579:         if (lda < 0 || lda - n > 64) stop = PETSC_TRUE;    // avoid using arbitrary lda; 64 bytes are a big enough alignment in VecDuplicateVecs
580:       } else if (lda * (j - i) != ynext.data() - yarray) { // not in the same stride? if so, stop searching
581:         stop = PETSC_TRUE;
582:       }
583:       PetscCall(VecRestoreKokkosView(yin[j], &ynext));
584:       if (stop) break;
585:     }
586:     PetscCall(VecRestoreKokkosView(yin[i], &yfirst));

588:     // found m vectors yin[i..j) with a stride lda at address yarray
589:     m = j - i;
590:     if (m > 1) {
591:       if (!z_d) {
592:         if (nv > PetscScalarPoolSize) { // rare case
593:           PetscScalarPoolSize = nv;
594:           PetscCallCXX(PetscScalarPool = static_cast<PetscScalar *>(Kokkos::kokkos_realloc(PetscScalarPool, PetscScalarPoolSize)));
595:         }
596:         z_d = PetscScalarPool;
597:       }
598:       const auto &A  = Kokkos::View<const PetscScalar **, Kokkos::LayoutLeft>(yarray, lda, m);
599:       const auto &Y  = Kokkos::subview(A, std::pair<PetscInt, PetscInt>(0, n), Kokkos::ALL);
600:       auto        zv = PetscScalarKokkosDualView(PetscScalarKokkosView(z_d + i, m), PetscScalarKokkosViewHost(z_h + i, m));
601:       PetscCall(PetscLogGpuTimeBegin());
602:       PetscCallCXX(KokkosBlas::gemv(PetscGetKokkosExecutionSpace(), trans, 1.0, Y, xv, 0.0, zv.view_device()));
603:       PetscCall(PetscLogGpuTimeEnd());
604:       PetscCallCXX(zv.modify_device());
605:       PetscCallCXX(zv.sync_host());
606:       PetscCall(PetscLogGpuToCpu(zv.extent(0) * sizeof(PetscScalar)));
607:       PetscCall(PetscLogGpuFlops(PetscMax(m * (2.0 * n - 1), 0.0)));
608:     } else {
609:       // we only allow falling back on VecDot once, to avoid doing VecMultiDot via individual VecDots
610:       if (nfail++ == 0) {
611:         if (conjugate) PetscCall(VecDot_SeqKokkos(xin, yin[i], z_h + i));
612:         else PetscCall(VecTDot_SeqKokkos(xin, yin[i], z_h + i));
613:       } else break; // break the while loop
614:     }
615:     i = j;
616:   }
617:   PetscCall(VecRestoreKokkosView(xin, &xv));
618:   if (i < nv) { // finish the remaining if any
619:     if (conjugate) PetscCall(VecMDot_SeqKokkos(xin, nv - i, yin + i, z_h + i));
620:     else PetscCall(VecMTDot_SeqKokkos(xin, nv - i, yin + i, z_h + i));
621:   }
622:   PetscFunctionReturn(PETSC_SUCCESS);
623: }

625: PetscErrorCode VecMDot_SeqKokkos_GEMV(Vec xin, PetscInt nv, const Vec yin[], PetscScalar *z)
626: {
627:   PetscFunctionBegin;
628:   PetscCall(VecMultiDot_SeqKokkos_GEMV(PETSC_TRUE, xin, nv, yin, z)); // conjugate
629:   PetscFunctionReturn(PETSC_SUCCESS);
630: }

632: PetscErrorCode VecMTDot_SeqKokkos_GEMV(Vec xin, PetscInt nv, const Vec yin[], PetscScalar *z)
633: {
634:   PetscFunctionBegin;
635:   PetscCall(VecMultiDot_SeqKokkos_GEMV(PETSC_FALSE, xin, nv, yin, z)); // transpose
636:   PetscFunctionReturn(PETSC_SUCCESS);
637: }

639: /* x[:] = alpha */
640: PetscErrorCode VecSet_SeqKokkos(Vec xin, PetscScalar alpha)
641: {
642:   PetscScalarKokkosView xv;
643:   auto                 &exec = PetscGetKokkosExecutionSpace();

645:   PetscFunctionBegin;
646:   PetscCall(PetscLogGpuTimeBegin());
647:   PetscCall(VecGetKokkosViewWrite(xin, &xv));
648:   PetscCallCXX(KokkosBlas::fill(exec, xv, alpha));
649:   PetscCall(VecRestoreKokkosViewWrite(xin, &xv));
650:   PetscCall(PetscLogGpuTimeEnd());
651:   PetscFunctionReturn(PETSC_SUCCESS);
652: }

654: /* x = alpha x */
655: PetscErrorCode VecScale_SeqKokkos(Vec xin, PetscScalar alpha)
656: {
657:   auto &exec = PetscGetKokkosExecutionSpace();

659:   PetscFunctionBegin;
660:   if (alpha == (PetscScalar)0.0) {
661:     PetscCall(VecSet_SeqKokkos(xin, alpha));
662:   } else if (alpha != (PetscScalar)1.0) {
663:     PetscScalarKokkosView xv;

665:     PetscCall(PetscLogGpuTimeBegin());
666:     PetscCall(VecGetKokkosView(xin, &xv));
667:     PetscCallCXX(KokkosBlas::scal(exec, xv, alpha, xv));
668:     PetscCall(VecRestoreKokkosView(xin, &xv));
669:     PetscCall(PetscLogGpuTimeEnd());
670:     PetscCall(PetscLogGpuFlops(xin->map->n));
671:   }
672:   PetscFunctionReturn(PETSC_SUCCESS);
673: }

675: /* z = y^H x */
676: PetscErrorCode VecDot_SeqKokkos(Vec xin, Vec yin, PetscScalar *z)
677: {
678:   ConstPetscScalarKokkosView xv, yv;
679:   auto                      &exec = PetscGetKokkosExecutionSpace();

681:   PetscFunctionBegin;
682:   PetscCall(PetscLogGpuTimeBegin());
683:   PetscCall(VecGetKokkosView(xin, &xv));
684:   PetscCall(VecGetKokkosView(yin, &yv));
685:   PetscCallCXX(*z = KokkosBlas::dot(exec, yv, xv)); /* KokkosBlas::dot(a,b) takes conjugate of a */
686:   PetscCall(VecRestoreKokkosView(xin, &xv));
687:   PetscCall(VecRestoreKokkosView(yin, &yv));
688:   PetscCall(PetscLogGpuTimeEnd());
689:   PetscCall(PetscLogGpuFlops(PetscMax(2.0 * xin->map->n - 1, 0.0)));
690:   PetscFunctionReturn(PETSC_SUCCESS);
691: }

693: /* y = x, where x is VECKOKKOS, but y may be not */
694: PetscErrorCode VecCopy_SeqKokkos(Vec xin, Vec yin)
695: {
696:   auto &exec = PetscGetKokkosExecutionSpace();

698:   PetscFunctionBegin;
699:   PetscCall(PetscLogGpuTimeBegin());
700:   if (xin != yin) {
701:     Vec_Kokkos *xkok = static_cast<Vec_Kokkos *>(xin->spptr);
702:     if (yin->offloadmask == PETSC_OFFLOAD_KOKKOS) {
703:       /* y is also a VecKokkos */
704:       Vec_Kokkos *ykok = static_cast<Vec_Kokkos *>(yin->spptr);
705:       /* Kokkos rule: if x's host has newer data, it will copy to y's host view; otherwise to y's device view
706:         In case x's host is newer, y's device is newer, it will error (though should not, I think). So we just
707:         clear y's sync state.
708:        */
709:       ykok->v_dual.clear_sync_state();
710:       PetscCallCXX(Kokkos::deep_copy(exec, ykok->v_dual, xkok->v_dual)); // either cpu2cpu or gpu2cpu, so don't log it
711:     } else {
712:       PetscScalar *yarray;
713:       PetscCall(VecGetArrayWrite(yin, &yarray));
714:       PetscScalarKokkosViewHost yv(yarray, yin->map->n);
715:       if (xkok->v_dual.need_sync_host()) {                                     // x's device has newer data
716:         PetscCallCXX(Kokkos::deep_copy(exec, yv, xkok->v_dual.view_device())); // gpu2cpu
717:         PetscCallCXX(exec.fence());                                            // finish the deep copy
718:         PetscCall(PetscLogGpuToCpu(xkok->v_dual.extent(0) * sizeof(PetscScalar)));
719:       } else {
720:         PetscCallCXX(Kokkos::deep_copy(exec, yv, xkok->v_dual.view_host())); // cpu2cpu
721:       }
722:       PetscCall(VecRestoreArrayWrite(yin, &yarray));
723:     }
724:   }
725:   PetscCall(PetscLogGpuTimeEnd());
726:   PetscFunctionReturn(PETSC_SUCCESS);
727: }

729: /* y[i] <--> x[i] */
730: PetscErrorCode VecSwap_SeqKokkos(Vec xin, Vec yin)
731: {
732:   PetscFunctionBegin;
733:   if (xin != yin) {
734:     PetscScalarKokkosView xv, yv;

736:     PetscCall(PetscLogGpuTimeBegin());
737:     PetscCall(VecGetKokkosView(xin, &xv));
738:     PetscCall(VecGetKokkosView(yin, &yv));
739:     PetscCallCXX(Kokkos::parallel_for(
740:       Kokkos::RangePolicy<>(PetscGetKokkosExecutionSpace(), 0, xin->map->n), KOKKOS_LAMBDA(const PetscInt &i) {
741:         PetscScalar tmp = xv(i);
742:         xv(i)           = yv(i);
743:         yv(i)           = tmp;
744:       }));
745:     PetscCall(VecRestoreKokkosView(xin, &xv));
746:     PetscCall(VecRestoreKokkosView(yin, &yv));
747:     PetscCall(PetscLogGpuTimeEnd());
748:   }
749:   PetscFunctionReturn(PETSC_SUCCESS);
750: }

752: /*  w = alpha x + y */
753: PetscErrorCode VecWAXPY_SeqKokkos(Vec win, PetscScalar alpha, Vec xin, Vec yin)
754: {
755:   PetscFunctionBegin;
756:   if (alpha == (PetscScalar)0.0) {
757:     PetscCall(VecCopy_SeqKokkos(yin, win));
758:   } else {
759:     ConstPetscScalarKokkosView xv, yv;
760:     PetscScalarKokkosView      wv;

762:     PetscCall(PetscLogGpuTimeBegin());
763:     PetscCall(VecGetKokkosViewWrite(win, &wv));
764:     PetscCall(VecGetKokkosView(xin, &xv));
765:     PetscCall(VecGetKokkosView(yin, &yv));
766:     PetscCallCXX(Kokkos::parallel_for(Kokkos::RangePolicy<>(PetscGetKokkosExecutionSpace(), 0, win->map->n), KOKKOS_LAMBDA(const PetscInt &i) { wv(i) = alpha * xv(i) + yv(i); }));
767:     PetscCall(VecRestoreKokkosView(xin, &xv));
768:     PetscCall(VecRestoreKokkosView(yin, &yv));
769:     PetscCall(VecRestoreKokkosViewWrite(win, &wv));
770:     PetscCall(PetscLogGpuTimeEnd());
771:     PetscCall(PetscLogGpuFlops(2.0 * win->map->n));
772:   }
773:   PetscFunctionReturn(PETSC_SUCCESS);
774: }

776: template <PetscInt ValueCount>
777: struct MAXPYFunctor {
778:   static_assert(ValueCount >= 1 && ValueCount <= 8, "ValueCount must be in [1, 8]");
779:   typedef ConstPetscScalarKokkosView::size_type size_type;

781:   PetscScalarKokkosView      yv;
782:   PetscScalar                a[8];
783:   ConstPetscScalarKokkosView xv[8];

785:   MAXPYFunctor(PetscScalarKokkosView yv, PetscScalar a0, PetscScalar a1, PetscScalar a2, PetscScalar a3, PetscScalar a4, PetscScalar a5, PetscScalar a6, PetscScalar a7, ConstPetscScalarKokkosView xv0, ConstPetscScalarKokkosView xv1, ConstPetscScalarKokkosView xv2, ConstPetscScalarKokkosView xv3, ConstPetscScalarKokkosView xv4, ConstPetscScalarKokkosView xv5, ConstPetscScalarKokkosView xv6, ConstPetscScalarKokkosView xv7) :
786:     yv(yv)
787:   {
788:     a[0]  = a0;
789:     a[1]  = a1;
790:     a[2]  = a2;
791:     a[3]  = a3;
792:     a[4]  = a4;
793:     a[5]  = a5;
794:     a[6]  = a6;
795:     a[7]  = a7;
796:     xv[0] = xv0;
797:     xv[1] = xv1;
798:     xv[2] = xv2;
799:     xv[3] = xv3;
800:     xv[4] = xv4;
801:     xv[5] = xv5;
802:     xv[6] = xv6;
803:     xv[7] = xv7;
804:   }

806:   KOKKOS_INLINE_FUNCTION void operator()(const size_type i) const
807:   {
808:     for (PetscInt j = 0; j < ValueCount; ++j) yv(i) += a[j] * xv[j](i);
809:   }
810: };

812: /*  y = y + sum alpha[i] x[i] */
813: PetscErrorCode VecMAXPY_SeqKokkos(Vec yin, PetscInt nv, const PetscScalar *alpha, Vec *xin)
814: {
815:   PetscInt                   i, j, cur = 0, ngroup = nv / 8, rem = nv % 8, N = yin->map->n;
816:   PetscScalarKokkosView      yv;
817:   PetscScalar                a[8];
818:   ConstPetscScalarKokkosView xv[8];
819:   Kokkos::RangePolicy<>      policy(PetscGetKokkosExecutionSpace(), 0, N);

821:   PetscFunctionBegin;
822:   PetscCall(PetscLogGpuTimeBegin());
823:   PetscCall(VecGetKokkosView(yin, &yv));
824:   for (i = 0; i < ngroup; i++) { /* 8 x's per group */
825:     for (j = 0; j < 8; j++) {    /* Fill the parameters */
826:       a[j] = alpha[cur + j];
827:       PetscCall(VecGetKokkosView(xin[cur + j], &xv[j]));
828:     }
829:     MAXPYFunctor<8> maxpy(yv, a[0], a[1], a[2], a[3], a[4], a[5], a[6], a[7], xv[0], xv[1], xv[2], xv[3], xv[4], xv[5], xv[6], xv[7]);
830:     PetscCallCXX(Kokkos::parallel_for(policy, maxpy));
831:     for (j = 0; j < 8; j++) PetscCall(VecRestoreKokkosView(xin[cur + j], &xv[j]));
832:     cur += 8;
833:   }

835:   if (rem) { /* The remaining */
836:     for (j = 0; j < rem; j++) {
837:       a[j] = alpha[cur + j];
838:       PetscCall(VecGetKokkosView(xin[cur + j], &xv[j]));
839:     }
840:     // clang-format off
841:     switch (rem) {
842:     case 1: PetscCallCXX(Kokkos::parallel_for(policy, MAXPYFunctor<1>(yv, a[0], a[1], a[2], a[3], a[4], a[5], a[6], a[7], xv[0], xv[1], xv[2], xv[3], xv[4], xv[5], xv[6], xv[7]))); break;
843:     case 2: PetscCallCXX(Kokkos::parallel_for(policy, MAXPYFunctor<2>(yv, a[0], a[1], a[2], a[3], a[4], a[5], a[6], a[7], xv[0], xv[1], xv[2], xv[3], xv[4], xv[5], xv[6], xv[7]))); break;
844:     case 3: PetscCallCXX(Kokkos::parallel_for(policy, MAXPYFunctor<3>(yv, a[0], a[1], a[2], a[3], a[4], a[5], a[6], a[7], xv[0], xv[1], xv[2], xv[3], xv[4], xv[5], xv[6], xv[7]))); break;
845:     case 4: PetscCallCXX(Kokkos::parallel_for(policy, MAXPYFunctor<4>(yv, a[0], a[1], a[2], a[3], a[4], a[5], a[6], a[7], xv[0], xv[1], xv[2], xv[3], xv[4], xv[5], xv[6], xv[7]))); break;
846:     case 5: PetscCallCXX(Kokkos::parallel_for(policy, MAXPYFunctor<5>(yv, a[0], a[1], a[2], a[3], a[4], a[5], a[6], a[7], xv[0], xv[1], xv[2], xv[3], xv[4], xv[5], xv[6], xv[7]))); break;
847:     case 6: PetscCallCXX(Kokkos::parallel_for(policy, MAXPYFunctor<6>(yv, a[0], a[1], a[2], a[3], a[4], a[5], a[6], a[7], xv[0], xv[1], xv[2], xv[3], xv[4], xv[5], xv[6], xv[7]))); break;
848:     case 7: PetscCallCXX(Kokkos::parallel_for(policy, MAXPYFunctor<7>(yv, a[0], a[1], a[2], a[3], a[4], a[5], a[6], a[7], xv[0], xv[1], xv[2], xv[3], xv[4], xv[5], xv[6], xv[7]))); break;
849:     }
850:     // clang-format on
851:     for (j = 0; j < rem; j++) PetscCall(VecRestoreKokkosView(xin[cur + j], &xv[j]));
852:   }
853:   PetscCall(VecRestoreKokkosView(yin, &yv));
854:   PetscCall(PetscLogGpuTimeEnd());
855:   PetscCall(PetscLogGpuFlops(nv * 2.0 * yin->map->n));
856:   PetscFunctionReturn(PETSC_SUCCESS);
857: }

859: /*  y = y + sum alpha[i] x[i] */
860: PetscErrorCode VecMAXPY_SeqKokkos_GEMV(Vec yin, PetscInt nv, const PetscScalar *a_h, Vec *xin)
861: {
862:   const PetscInt             n = yin->map->n;
863:   PetscInt                   i, j, nfail;
864:   PetscScalarKokkosView      yv;
865:   ConstPetscScalarKokkosView xfirst, xnext;
866:   PetscBool                  stop = PETSC_FALSE;
867:   PetscInt                   lda  = 0, m;
868:   const PetscScalar         *xarray;
869:   PetscScalar               *a_d = nullptr;

871:   PetscFunctionBegin;
872:   PetscCall(PetscLogGpuTimeBegin());
873:   PetscCall(VecGetKokkosView(yin, &yv));
874: #if defined(KOKKOS_ENABLE_DEFAULT_DEVICE_TYPE_HOST)
875:   a_d = const_cast<PetscScalar *>(a_h);
876: #endif
877:   i = nfail = 0;
878:   while (i < nv) {
879:     stop = PETSC_FALSE;
880:     PetscCall(VecGetKokkosView(xin[i], &xfirst));
881:     xarray = xfirst.data();
882:     for (j = i + 1; j < nv; j++) {
883:       PetscCall(VecGetKokkosView(xin[j], &xnext));
884:       if (j == i + 1) {
885:         lda = xnext.data() - xfirst.data();
886:         if (lda < 0 || lda - n > 64) stop = PETSC_TRUE;    // avoid using arbitrary lda; 64 bytes are a big enough alignment in VecDuplicateVecs
887:       } else if (lda * (j - i) != xnext.data() - xarray) { // not in the same stride? if so, stop here
888:         stop = PETSC_TRUE;
889:       }
890:       PetscCall(VecRestoreKokkosView(xin[j], &xnext));
891:       if (stop) break;
892:     }
893:     PetscCall(VecRestoreKokkosView(xin[i], &xfirst));

895:     m = j - i;
896:     if (m > 1) {
897:       if (!a_d) {
898:         if (nv > PetscScalarPoolSize) { // rare case
899:           PetscScalarPoolSize = nv;
900:           PetscCallCXX(PetscScalarPool = static_cast<PetscScalar *>(Kokkos::kokkos_realloc(PetscScalarPool, PetscScalarPoolSize)));
901:         }
902:         a_d = PetscScalarPool;
903:       }
904:       const auto &B  = Kokkos::View<const PetscScalar **, Kokkos::LayoutLeft>(xarray, lda, m);
905:       const auto &A  = Kokkos::subview(B, std::pair<PetscInt, PetscInt>(0, n), Kokkos::ALL);
906:       auto        av = PetscScalarKokkosDualView(PetscScalarKokkosView(a_d + i, m), PetscScalarKokkosViewHost(const_cast<PetscScalar *>(a_h) + i, m));
907:       av.modify_host();
908:       av.sync_device();
909:       PetscCall(PetscLogCpuToGpu(av.extent(0) * sizeof(PetscScalar)));
910:       PetscCallCXX(KokkosBlas::gemv(PetscGetKokkosExecutionSpace(), "N", 1.0, A, av.view_device(), 1.0, yv));
911:       PetscCall(PetscLogGpuFlops(m * 2.0 * n));
912:     } else {
913:       // we only allow falling back on VecAXPY once
914:       if (nfail++ == 0) PetscCall(VecAXPY_SeqKokkos(yin, a_h[i], xin[i]));
915:       else break; // break the while loop
916:     }
917:     i = j;
918:   }
919:   // finish the remaining if any
920:   PetscCall(VecRestoreKokkosView(yin, &yv));
921:   if (i < nv) PetscCall(VecMAXPY_SeqKokkos(yin, nv - i, a_h + i, xin + i));
922:   PetscCall(PetscLogGpuTimeEnd());
923:   PetscFunctionReturn(PETSC_SUCCESS);
924: }

926: /* y = alpha x + beta y */
927: PetscErrorCode VecAXPBY_SeqKokkos(Vec yin, PetscScalar alpha, PetscScalar beta, Vec xin)
928: {
929:   PetscBool xiskok, yiskok;

931:   PetscFunctionBegin;
932:   PetscCall(PetscObjectTypeCompareAny((PetscObject)xin, &xiskok, VECSEQKOKKOS, VECMPIKOKKOS, ""));
933:   PetscCall(PetscObjectTypeCompareAny((PetscObject)yin, &yiskok, VECSEQKOKKOS, VECMPIKOKKOS, ""));
934:   if (xiskok && yiskok) {
935:     ConstPetscScalarKokkosView xv;
936:     PetscScalarKokkosView      yv;

938:     PetscCall(PetscLogGpuTimeBegin());
939:     PetscCall(VecGetKokkosView(xin, &xv));
940:     PetscCall(VecGetKokkosView(yin, &yv));
941:     PetscCallCXX(KokkosBlas::axpby(PetscGetKokkosExecutionSpace(), alpha, xv, beta, yv));
942:     PetscCall(VecRestoreKokkosView(xin, &xv));
943:     PetscCall(VecRestoreKokkosView(yin, &yv));
944:     PetscCall(PetscLogGpuTimeEnd());
945:     if (alpha == (PetscScalar)0.0 || beta == (PetscScalar)0.0) {
946:       PetscCall(PetscLogGpuFlops(xin->map->n));
947:     } else if (beta == (PetscScalar)1.0 || alpha == (PetscScalar)1.0) {
948:       PetscCall(PetscLogGpuFlops(2.0 * xin->map->n));
949:     } else {
950:       PetscCall(PetscLogGpuFlops(3.0 * xin->map->n));
951:     }
952:   } else {
953:     PetscCall(VecAXPBY_Seq(yin, alpha, beta, xin));
954:   }
955:   PetscFunctionReturn(PETSC_SUCCESS);
956: }

958: /* z = alpha x + beta y + gamma z */
959: PetscErrorCode VecAXPBYPCZ_SeqKokkos(Vec zin, PetscScalar alpha, PetscScalar beta, PetscScalar gamma, Vec xin, Vec yin)
960: {
961:   ConstPetscScalarKokkosView xv, yv;
962:   PetscScalarKokkosView      zv;
963:   Kokkos::RangePolicy<>      policy(PetscGetKokkosExecutionSpace(), 0, zin->map->n);

965:   PetscFunctionBegin;
966:   PetscCall(PetscLogGpuTimeBegin());
967:   PetscCall(VecGetKokkosView(zin, &zv));
968:   PetscCall(VecGetKokkosView(xin, &xv));
969:   PetscCall(VecGetKokkosView(yin, &yv));
970:   if (gamma == (PetscScalar)0.0) { // a common case
971:     if (alpha == -beta) {
972:       PetscCallCXX(Kokkos::parallel_for( // a common case
973:         policy, KOKKOS_LAMBDA(const PetscInt &i) { zv(i) = alpha * (xv(i) - yv(i)); }));
974:     } else {
975:       PetscCallCXX(Kokkos::parallel_for(policy, KOKKOS_LAMBDA(const PetscInt &i) { zv(i) = alpha * xv(i) + beta * yv(i); }));
976:     }
977:   } else {
978:     PetscCallCXX(KokkosBlas::update(PetscGetKokkosExecutionSpace(), alpha, xv, beta, yv, gamma, zv));
979:   }
980:   PetscCall(VecRestoreKokkosView(xin, &xv));
981:   PetscCall(VecRestoreKokkosView(yin, &yv));
982:   PetscCall(VecRestoreKokkosView(zin, &zv));
983:   PetscCall(PetscLogGpuTimeEnd());
984:   PetscCall(PetscLogGpuFlops(zin->map->n * 5.0));
985:   PetscFunctionReturn(PETSC_SUCCESS);
986: }

988: /* w = x*y. Any subset of the x, y, and w may be the same vector.

990:   w is of type VecKokkos, but x, y may be not.
991: */
992: PetscErrorCode VecPointwiseMult_SeqKokkos(Vec win, Vec xin, Vec yin)
993: {
994:   PetscInt n;

996:   PetscFunctionBegin;
997:   PetscCall(PetscLogGpuTimeBegin());
998:   PetscCall(VecGetLocalSize(win, &n));
999:   if (xin->offloadmask != PETSC_OFFLOAD_KOKKOS || yin->offloadmask != PETSC_OFFLOAD_KOKKOS) {
1000:     PetscScalarKokkosViewHost wv;
1001:     const PetscScalar        *xp, *yp;
1002:     PetscCall(VecGetArrayRead(xin, &xp));
1003:     PetscCall(VecGetArrayRead(yin, &yp));
1004:     PetscCall(VecGetKokkosViewWrite(win, &wv));

1006:     ConstPetscScalarKokkosViewHost xv(xp, n), yv(yp, n);
1007:     PetscCallCXX(Kokkos::parallel_for(Kokkos::RangePolicy<Kokkos::DefaultHostExecutionSpace>(0, n), KOKKOS_LAMBDA(const PetscInt &i) { wv(i) = xv(i) * yv(i); }));

1009:     PetscCall(VecRestoreArrayRead(xin, &xp));
1010:     PetscCall(VecRestoreArrayRead(yin, &yp));
1011:     PetscCall(VecRestoreKokkosViewWrite(win, &wv));
1012:   } else {
1013:     ConstPetscScalarKokkosView xv, yv;
1014:     PetscScalarKokkosView      wv;

1016:     PetscCall(VecGetKokkosViewWrite(win, &wv));
1017:     PetscCall(VecGetKokkosView(xin, &xv));
1018:     PetscCall(VecGetKokkosView(yin, &yv));
1019:     PetscCallCXX(Kokkos::parallel_for(Kokkos::RangePolicy<>(PetscGetKokkosExecutionSpace(), 0, n), KOKKOS_LAMBDA(const PetscInt &i) { wv(i) = xv(i) * yv(i); }));
1020:     PetscCall(VecRestoreKokkosView(yin, &yv));
1021:     PetscCall(VecRestoreKokkosView(xin, &xv));
1022:     PetscCall(VecRestoreKokkosViewWrite(win, &wv));
1023:   }
1024:   PetscCall(PetscLogGpuTimeEnd());
1025:   PetscCall(PetscLogGpuFlops(n));
1026:   PetscFunctionReturn(PETSC_SUCCESS);
1027: }

1029: /* w = x/y */
1030: PetscErrorCode VecPointwiseDivide_SeqKokkos(Vec win, Vec xin, Vec yin)
1031: {
1032:   PetscInt n;

1034:   PetscFunctionBegin;
1035:   PetscCall(PetscLogGpuTimeBegin());
1036:   PetscCall(VecGetLocalSize(win, &n));
1037:   if (xin->offloadmask != PETSC_OFFLOAD_KOKKOS || yin->offloadmask != PETSC_OFFLOAD_KOKKOS) {
1038:     PetscScalarKokkosViewHost wv;
1039:     const PetscScalar        *xp, *yp;
1040:     PetscCall(VecGetArrayRead(xin, &xp));
1041:     PetscCall(VecGetArrayRead(yin, &yp));
1042:     PetscCall(VecGetKokkosViewWrite(win, &wv));

1044:     ConstPetscScalarKokkosViewHost xv(xp, n), yv(yp, n);
1045:     PetscCallCXX(Kokkos::parallel_for(
1046:       Kokkos::RangePolicy<Kokkos::DefaultHostExecutionSpace>(0, n), KOKKOS_LAMBDA(const PetscInt &i) {
1047:         if (yv(i) != 0.0) wv(i) = xv(i) / yv(i);
1048:         else wv(i) = 0.0;
1049:       }));

1051:     PetscCall(VecRestoreArrayRead(xin, &xp));
1052:     PetscCall(VecRestoreArrayRead(yin, &yp));
1053:     PetscCall(VecRestoreKokkosViewWrite(win, &wv));
1054:   } else {
1055:     ConstPetscScalarKokkosView xv, yv;
1056:     PetscScalarKokkosView      wv;

1058:     PetscCall(VecGetKokkosViewWrite(win, &wv));
1059:     PetscCall(VecGetKokkosView(xin, &xv));
1060:     PetscCall(VecGetKokkosView(yin, &yv));
1061:     PetscCallCXX(Kokkos::parallel_for(
1062:       Kokkos::RangePolicy<>(PetscGetKokkosExecutionSpace(), 0, n), KOKKOS_LAMBDA(const PetscInt &i) {
1063:         if (yv(i) != 0.0) wv(i) = xv(i) / yv(i);
1064:         else wv(i) = 0.0;
1065:       }));
1066:     PetscCall(VecRestoreKokkosView(yin, &yv));
1067:     PetscCall(VecRestoreKokkosView(xin, &xv));
1068:     PetscCall(VecRestoreKokkosViewWrite(win, &wv));
1069:   }
1070:   PetscCall(PetscLogGpuTimeEnd());
1071:   PetscCall(PetscLogGpuFlops(win->map->n));
1072:   PetscFunctionReturn(PETSC_SUCCESS);
1073: }

1075: PetscErrorCode VecNorm_SeqKokkos(Vec xin, NormType type, PetscReal *z)
1076: {
1077:   const PetscInt             n = xin->map->n;
1078:   ConstPetscScalarKokkosView xv;
1079:   auto                      &exec = PetscGetKokkosExecutionSpace();

1081:   PetscFunctionBegin;
1082:   if (type == NORM_1_AND_2) {
1083:     PetscCall(VecNorm_SeqKokkos(xin, NORM_1, z));
1084:     PetscCall(VecNorm_SeqKokkos(xin, NORM_2, z + 1));
1085:   } else {
1086:     PetscCall(PetscLogGpuTimeBegin());
1087:     PetscCall(VecGetKokkosView(xin, &xv));
1088:     if (type == NORM_2 || type == NORM_FROBENIUS) {
1089:       PetscCallCXX(*z = KokkosBlas::nrm2(exec, xv));
1090:       PetscCall(PetscLogGpuFlops(PetscMax(2.0 * n - 1, 0.0)));
1091:     } else if (type == NORM_1) {
1092:       PetscCallCXX(*z = KokkosBlas::nrm1(exec, xv));
1093:       PetscCall(PetscLogGpuFlops(PetscMax(n - 1.0, 0.0)));
1094:     } else if (type == NORM_INFINITY) {
1095:       PetscCallCXX(*z = KokkosBlas::nrminf(exec, xv));
1096:     }
1097:     PetscCall(VecRestoreKokkosView(xin, &xv));
1098:     PetscCall(PetscLogGpuTimeEnd());
1099:   }
1100:   PetscFunctionReturn(PETSC_SUCCESS);
1101: }

1103: PetscErrorCode VecErrorWeightedNorms_SeqKokkos(Vec U, Vec Y, Vec E, NormType wnormtype, PetscReal atol, Vec vatol, PetscReal rtol, Vec vrtol, PetscReal ignore_max, PetscReal *norm, PetscInt *norm_loc, PetscReal *norma, PetscInt *norma_loc, PetscReal *normr, PetscInt *normr_loc)
1104: {
1105:   ConstPetscScalarKokkosView u, y, erra, atola, rtola;
1106:   PetscBool                  has_E = PETSC_FALSE, has_atol = PETSC_FALSE, has_rtol = PETSC_FALSE;
1107:   PetscInt                   n, n_loc = 0, na_loc = 0, nr_loc = 0;
1108:   PetscReal                  nrm = 0, nrma = 0, nrmr = 0;

1110:   PetscFunctionBegin;
1111:   PetscCall(VecGetLocalSize(U, &n));
1112:   PetscCall(VecGetKokkosView(U, &u));
1113:   PetscCall(VecGetKokkosView(Y, &y));
1114:   if (E) {
1115:     PetscCall(VecGetKokkosView(E, &erra));
1116:     has_E = PETSC_TRUE;
1117:   }
1118:   if (vatol) {
1119:     PetscCall(VecGetKokkosView(vatol, &atola));
1120:     has_atol = PETSC_TRUE;
1121:   }
1122:   if (vrtol) {
1123:     PetscCall(VecGetKokkosView(vrtol, &rtola));
1124:     has_rtol = PETSC_TRUE;
1125:   }

1127:   if (wnormtype == NORM_INFINITY) {
1128:     PetscCallCXX(Kokkos::parallel_reduce(
1129:       "VecErrorWeightedNorms_INFINITY", Kokkos::RangePolicy<>(PetscGetKokkosExecutionSpace(), 0, n),
1130:       KOKKOS_LAMBDA(const PetscInt &i, PetscReal &l_nrm, PetscReal &l_nrma, PetscReal &l_nrmr, PetscInt &l_n_loc, PetscInt &l_na_loc, PetscInt &l_nr_loc) {
1131:         PetscReal err, tol, tola, tolr, l_atol, l_rtol;
1132:         if (PetscAbsScalar(y(i)) >= ignore_max && PetscAbsScalar(u(i)) >= ignore_max) {
1133:           l_atol = has_atol ? PetscRealPart(atola(i)) : atol;
1134:           l_rtol = has_rtol ? PetscRealPart(rtola(i)) : rtol;
1135:           err    = has_E ? PetscAbsScalar(erra(i)) : PetscAbsScalar(y(i) - u(i));
1136:           tola   = l_atol;
1137:           tolr   = l_rtol * PetscMax(PetscAbsScalar(u(i)), PetscAbsScalar(y(i)));
1138:           tol    = tola + tolr;
1139:           if (tola > 0.) {
1140:             l_nrma = PetscMax(l_nrma, err / tola);
1141:             l_na_loc++;
1142:           }
1143:           if (tolr > 0.) {
1144:             l_nrmr = PetscMax(l_nrmr, err / tolr);
1145:             l_nr_loc++;
1146:           }
1147:           if (tol > 0.) {
1148:             l_nrm = PetscMax(l_nrm, err / tol);
1149:             l_n_loc++;
1150:           }
1151:         }
1152:       },
1153:       Kokkos::Max<PetscReal>(nrm), Kokkos::Max<PetscReal>(nrma), Kokkos::Max<PetscReal>(nrmr), n_loc, na_loc, nr_loc));
1154:   } else {
1155:     PetscCallCXX(Kokkos::parallel_reduce(
1156:       "VecErrorWeightedNorms_NORM_2", Kokkos::RangePolicy<>(PetscGetKokkosExecutionSpace(), 0, n),
1157:       KOKKOS_LAMBDA(const PetscInt &i, PetscReal &l_nrm, PetscReal &l_nrma, PetscReal &l_nrmr, PetscInt &l_n_loc, PetscInt &l_na_loc, PetscInt &l_nr_loc) {
1158:         PetscReal err, tol, tola, tolr, l_atol, l_rtol;
1159:         if (PetscAbsScalar(y(i)) >= ignore_max && PetscAbsScalar(u(i)) >= ignore_max) {
1160:           l_atol = has_atol ? PetscRealPart(atola(i)) : atol;
1161:           l_rtol = has_rtol ? PetscRealPart(rtola(i)) : rtol;
1162:           err    = has_E ? PetscAbsScalar(erra(i)) : PetscAbsScalar(y(i) - u(i));
1163:           tola   = l_atol;
1164:           tolr   = l_rtol * PetscMax(PetscAbsScalar(u(i)), PetscAbsScalar(y(i)));
1165:           tol    = tola + tolr;
1166:           if (tola > 0.) {
1167:             l_nrma += PetscSqr(err / tola);
1168:             l_na_loc++;
1169:           }
1170:           if (tolr > 0.) {
1171:             l_nrmr += PetscSqr(err / tolr);
1172:             l_nr_loc++;
1173:           }
1174:           if (tol > 0.) {
1175:             l_nrm += PetscSqr(err / tol);
1176:             l_n_loc++;
1177:           }
1178:         }
1179:       },
1180:       nrm, nrma, nrmr, n_loc, na_loc, nr_loc));
1181:   }

1183:   if (wnormtype == NORM_2) {
1184:     *norm  = PetscSqrtReal(nrm);
1185:     *norma = PetscSqrtReal(nrma);
1186:     *normr = PetscSqrtReal(nrmr);
1187:   } else {
1188:     *norm  = nrm;
1189:     *norma = nrma;
1190:     *normr = nrmr;
1191:   }
1192:   *norm_loc  = n_loc;
1193:   *norma_loc = na_loc;
1194:   *normr_loc = nr_loc;

1196:   if (E) PetscCall(VecRestoreKokkosView(E, &erra));
1197:   if (vatol) PetscCall(VecRestoreKokkosView(vatol, &atola));
1198:   if (vrtol) PetscCall(VecRestoreKokkosView(vrtol, &rtola));
1199:   PetscCall(VecRestoreKokkosView(U, &u));
1200:   PetscCall(VecRestoreKokkosView(Y, &y));
1201:   PetscFunctionReturn(PETSC_SUCCESS);
1202: }

1204: /* A functor for DotNorm2 so that we can compute dp and nm in one kernel */
1205: struct DotNorm2 {
1206:   typedef PetscScalar                           value_type[];
1207:   typedef ConstPetscScalarKokkosView::size_type size_type;

1209:   size_type                  value_count;
1210:   ConstPetscScalarKokkosView xv_, yv_; /* first and second vectors in VecDotNorm2. The order matters. */

1212:   DotNorm2(ConstPetscScalarKokkosView &xv, ConstPetscScalarKokkosView &yv) : value_count(2), xv_(xv), yv_(yv) { }

1214:   KOKKOS_INLINE_FUNCTION void operator()(const size_type i, value_type result) const
1215:   {
1216:     result[0] += PetscConj(yv_(i)) * xv_(i);
1217:     result[1] += PetscConj(yv_(i)) * yv_(i);
1218:   }

1220:   KOKKOS_INLINE_FUNCTION void join(value_type dst, const value_type src) const
1221:   {
1222:     dst[0] += src[0];
1223:     dst[1] += src[1];
1224:   }

1226:   KOKKOS_INLINE_FUNCTION void init(value_type result) const
1227:   {
1228:     result[0] = 0.0;
1229:     result[1] = 0.0;
1230:   }
1231: };

1233: /* dp = y^H x, nm = y^H y */
1234: PetscErrorCode VecDotNorm2_SeqKokkos(Vec xin, Vec yin, PetscScalar *dp, PetscScalar *nm)
1235: {
1236:   ConstPetscScalarKokkosView xv, yv;
1237:   PetscScalar                result[2];

1239:   PetscFunctionBegin;
1240:   PetscCall(PetscLogGpuTimeBegin());
1241:   PetscCall(VecGetKokkosView(xin, &xv));
1242:   PetscCall(VecGetKokkosView(yin, &yv));
1243:   DotNorm2 dn(xv, yv);
1244:   PetscCallCXX(Kokkos::parallel_reduce(Kokkos::RangePolicy<>(PetscGetKokkosExecutionSpace(), 0, xin->map->n), dn, result));
1245:   *dp = result[0];
1246:   *nm = result[1];
1247:   PetscCall(VecRestoreKokkosView(yin, &yv));
1248:   PetscCall(VecRestoreKokkosView(xin, &xv));
1249:   PetscCall(PetscLogGpuTimeEnd());
1250:   PetscCall(PetscLogGpuFlops(4.0 * xin->map->n));
1251:   PetscFunctionReturn(PETSC_SUCCESS);
1252: }

1254: PetscErrorCode VecConjugate_SeqKokkos(Vec xin)
1255: {
1256: #if defined(PETSC_USE_COMPLEX)
1257:   PetscScalarKokkosView xv;

1259:   PetscFunctionBegin;
1260:   PetscCall(PetscLogGpuTimeBegin());
1261:   PetscCall(VecGetKokkosView(xin, &xv));
1262:   PetscCallCXX(Kokkos::parallel_for(Kokkos::RangePolicy<>(PetscGetKokkosExecutionSpace(), 0, xin->map->n), KOKKOS_LAMBDA(const PetscInt &i) { xv(i) = PetscConj(xv(i)); }));
1263:   PetscCall(VecRestoreKokkosView(xin, &xv));
1264:   PetscCall(PetscLogGpuTimeEnd());
1265: #else
1266:   PetscFunctionBegin;
1267: #endif
1268:   PetscFunctionReturn(PETSC_SUCCESS);
1269: }

1271: /* Temporarily replace the array in vin with a[]. Return to the original array with a call to VecResetArray() */
1272: PetscErrorCode VecPlaceArray_SeqKokkos(Vec vin, const PetscScalar *a)
1273: {
1274:   Vec_Seq    *vecseq = (Vec_Seq *)vin->data;
1275:   Vec_Kokkos *veckok = static_cast<Vec_Kokkos *>(vin->spptr);

1277:   PetscFunctionBegin;
1278:   PetscCall(VecPlaceArray_Seq(vin, a));
1279:   PetscCall(veckok->UpdateArray<Kokkos::HostSpace>(vecseq->array));
1280:   PetscFunctionReturn(PETSC_SUCCESS);
1281: }

1283: PetscErrorCode VecResetArray_SeqKokkos(Vec vin)
1284: {
1285:   Vec_Seq    *vecseq = (Vec_Seq *)vin->data;
1286:   Vec_Kokkos *veckok = static_cast<Vec_Kokkos *>(vin->spptr);

1288:   PetscFunctionBegin;
1289:   /* User wants to unhook the provided host array. Sync it so that user can get the latest */
1290:   PetscCall(KokkosDualViewSync<Kokkos::HostSpace>(veckok->v_dual, PetscGetKokkosExecutionSpace()));
1291:   PetscCall(VecResetArray_Seq(vin)); /* Swap back the old host array, assuming its has the latest value */
1292:   PetscCall(veckok->UpdateArray<Kokkos::HostSpace>(vecseq->array));
1293:   PetscFunctionReturn(PETSC_SUCCESS);
1294: }

1296: /* Replace the array in vin with a[] that must be allocated by PetscMalloc. a[] is owned by vin afterwards. */
1297: PetscErrorCode VecReplaceArray_SeqKokkos(Vec vin, const PetscScalar *a)
1298: {
1299:   Vec_Seq    *vecseq = (Vec_Seq *)vin->data;
1300:   Vec_Kokkos *veckok = static_cast<Vec_Kokkos *>(vin->spptr);

1302:   PetscFunctionBegin;
1303:   /* Make sure the users array has the latest values */
1304:   if (vecseq->array != vecseq->array_allocated) PetscCall(KokkosDualViewSync<Kokkos::HostSpace>(veckok->v_dual, PetscGetKokkosExecutionSpace()));
1305:   PetscCall(VecReplaceArray_Seq(vin, a));
1306:   PetscCall(veckok->UpdateArray<Kokkos::HostSpace>(vecseq->array));
1307:   PetscFunctionReturn(PETSC_SUCCESS);
1308: }

1310: /* Maps the local portion of vector v into vector w */
1311: PetscErrorCode VecGetLocalVector_SeqKokkos(Vec v, Vec w)
1312: {
1313:   Vec_Seq    *vecseq = static_cast<Vec_Seq *>(w->data);
1314:   Vec_Kokkos *veckok = static_cast<Vec_Kokkos *>(w->spptr);

1316:   PetscFunctionBegin;
1317:   PetscCheckTypeNames(w, VECSEQKOKKOS, VECMPIKOKKOS);
1318:   /* Destroy w->data, w->spptr */
1319:   if (vecseq) {
1320:     PetscCall(PetscFree(vecseq->array_allocated));
1321:     PetscCall(PetscFree(w->data));
1322:   }
1323:   delete veckok;

1325:   /* Replace with v's */
1326:   w->data  = v->data;
1327:   w->spptr = v->spptr;
1328:   PetscCall(PetscObjectStateIncrease((PetscObject)w));
1329:   PetscFunctionReturn(PETSC_SUCCESS);
1330: }

1332: PetscErrorCode VecRestoreLocalVector_SeqKokkos(Vec v, Vec w)
1333: {
1334:   PetscFunctionBegin;
1335:   PetscCheckTypeNames(w, VECSEQKOKKOS, VECMPIKOKKOS);
1336:   v->data  = w->data;
1337:   v->spptr = w->spptr;
1338:   PetscCall(PetscObjectStateIncrease((PetscObject)v));
1339:   /* TODO: need to think if setting w->data/spptr to NULL is safe */
1340:   w->data  = NULL;
1341:   w->spptr = NULL;
1342:   PetscFunctionReturn(PETSC_SUCCESS);
1343: }

1345: PetscErrorCode VecGetArray_SeqKokkos(Vec v, PetscScalar **a)
1346: {
1347:   Vec_Kokkos *veckok = static_cast<Vec_Kokkos *>(v->spptr);

1349:   PetscFunctionBegin;
1350:   PetscCall(KokkosDualViewSync<Kokkos::HostSpace>(veckok->v_dual, PetscGetKokkosExecutionSpace()));
1351:   *a = *((PetscScalar **)v->data);
1352:   PetscFunctionReturn(PETSC_SUCCESS);
1353: }

1355: PetscErrorCode VecRestoreArray_SeqKokkos(Vec v, PetscScalar **a)
1356: {
1357:   Vec_Kokkos *veckok = static_cast<Vec_Kokkos *>(v->spptr);

1359:   PetscFunctionBegin;
1360:   PetscCallCXX(veckok->v_dual.modify_host());
1361:   PetscFunctionReturn(PETSC_SUCCESS);
1362: }

1364: /* Get array on host to overwrite, so no need to sync host. In VecRestoreArrayWrite() we will mark host is modified. */
1365: PetscErrorCode VecGetArrayWrite_SeqKokkos(Vec v, PetscScalar **a)
1366: {
1367:   Vec_Kokkos *veckok = static_cast<Vec_Kokkos *>(v->spptr);

1369:   PetscFunctionBegin;
1370:   PetscCallCXX(veckok->v_dual.clear_sync_state());
1371:   *a = veckok->v_dual.view_host().data();
1372:   PetscFunctionReturn(PETSC_SUCCESS);
1373: }

1375: PetscErrorCode VecGetArrayAndMemType_SeqKokkos(Vec v, PetscScalar **a, PetscMemType *mtype)
1376: {
1377:   Vec_Kokkos *veckok = static_cast<Vec_Kokkos *>(v->spptr);

1379:   PetscFunctionBegin;
1380:   /* Always return up-to-date in the default memory space */
1381:   PetscCall(KokkosDualViewSync<DefaultMemorySpace>(veckok->v_dual, PetscGetKokkosExecutionSpace()));
1382:   *a = veckok->v_dual.view_device().data();
1383:   if (mtype) *mtype = PETSC_MEMTYPE_KOKKOS; // Could be PETSC_MEMTYPE_HOST when Kokkos was not configured with cuda etc.
1384:   PetscFunctionReturn(PETSC_SUCCESS);
1385: }

1387: PetscErrorCode VecRestoreArrayAndMemType_SeqKokkos(Vec v, PetscScalar **a)
1388: {
1389:   Vec_Kokkos *veckok = static_cast<Vec_Kokkos *>(v->spptr);

1391:   PetscFunctionBegin;
1392:   if (std::is_same<DefaultMemorySpace, Kokkos::HostSpace>::value) {
1393:     PetscCallCXX(veckok->v_dual.modify_host());
1394:   } else {
1395:     PetscCallCXX(veckok->v_dual.modify_device());
1396:   }
1397:   PetscFunctionReturn(PETSC_SUCCESS);
1398: }

1400: PetscErrorCode VecGetArrayWriteAndMemType_SeqKokkos(Vec v, PetscScalar **a, PetscMemType *mtype)
1401: {
1402:   Vec_Kokkos *veckok = static_cast<Vec_Kokkos *>(v->spptr);

1404:   PetscFunctionBegin;
1405:   // Since the data will be overwritten, we clear the sync state to suppress potential memory copying in sync'ing
1406:   PetscCallCXX(veckok->v_dual.clear_sync_state()); // So that in restore, we can safely modify_device()
1407:   PetscCall(KokkosDualViewSync<DefaultMemorySpace>(veckok->v_dual, PetscGetKokkosExecutionSpace()));
1408:   *a = veckok->v_dual.view_device().data();
1409:   if (mtype) *mtype = PETSC_MEMTYPE_KOKKOS; // Could be PETSC_MEMTYPE_HOST when Kokkos was not configured with cuda etc.
1410:   PetscFunctionReturn(PETSC_SUCCESS);
1411: }

1413: /* Copy xin's sync state to y */
1414: static PetscErrorCode VecCopySyncState_Kokkos_Private(Vec xin, Vec yout)
1415: {
1416:   Vec_Kokkos *xkok = static_cast<Vec_Kokkos *>(xin->spptr);
1417:   Vec_Kokkos *ykok = static_cast<Vec_Kokkos *>(yout->spptr);

1419:   PetscFunctionBegin;
1420:   PetscCallCXX(ykok->v_dual.clear_sync_state());
1421:   if (xkok->v_dual.need_sync_host()) {
1422:     PetscCallCXX(ykok->v_dual.modify_device());
1423:   } else if (xkok->v_dual.need_sync_device()) {
1424:     PetscCallCXX(ykok->v_dual.modify_host());
1425:   }
1426:   PetscFunctionReturn(PETSC_SUCCESS);
1427: }

1429: static PetscErrorCode VecCreateSeqKokkosWithArrays_Private(MPI_Comm, PetscInt, PetscInt, const PetscScalar[], const PetscScalar[], Vec *);

1431: /* Internal routine shared by VecGetSubVector_{SeqKokkos,MPIKokkos} */
1432: PetscErrorCode VecGetSubVector_Kokkos_Private(Vec x, PetscBool xIsMPI, IS is, Vec *y)
1433: {
1434:   PetscBool contig;
1435:   PetscInt  n, N, start, bs;
1436:   MPI_Comm  comm;
1437:   Vec       z;

1439:   PetscFunctionBegin;
1440:   PetscCall(PetscObjectGetComm((PetscObject)x, &comm));
1441:   PetscCall(ISGetLocalSize(is, &n));
1442:   PetscCall(ISGetSize(is, &N));
1443:   PetscCall(VecGetSubVectorContiguityAndBS_Private(x, is, &contig, &start, &bs));

1445:   if (contig) { /* We can do a no-copy (in-place) implementation with y sharing x's arrays */
1446:     Vec_Kokkos        *xkok    = static_cast<Vec_Kokkos *>(x->spptr);
1447:     const PetscScalar *array_h = xkok->v_dual.view_host().data() + start;
1448:     const PetscScalar *array_d = xkok->v_dual.view_device().data() + start;

1450:     /* These calls assume the input arrays are synced */
1451:     if (xIsMPI) PetscCall(VecCreateMPIKokkosWithArrays_Private(comm, bs, n, N, array_h, array_d, &z)); /* x could be MPI even when x's comm size = 1 */
1452:     else PetscCall(VecCreateSeqKokkosWithArrays_Private(comm, bs, n, array_h, array_d, &z));

1454:     PetscCall(VecCopySyncState_Kokkos_Private(x, z)); /* Copy x's sync state to z */

1456:     /* This is relevant only in debug mode */
1457:     PetscInt state = 0;
1458:     PetscCall(VecLockGet(x, &state));
1459:     if (state) { /* x is either in read or read/write mode, therefore z, overlapped with x, can only be in read mode */
1460:       PetscCall(VecLockReadPush(z));
1461:     }

1463:     z->ops->placearray   = NULL; /* z's arrays can't be replaced, because z does not own them */
1464:     z->ops->replacearray = NULL;

1466:   } else { /* Have to create a VecScatter and a stand-alone vector */
1467:     PetscCall(VecGetSubVectorThroughVecScatter_Private(x, is, bs, &z));
1468:   }
1469:   *y = z;
1470:   PetscFunctionReturn(PETSC_SUCCESS);
1471: }

1473: static PetscErrorCode VecGetSubVector_SeqKokkos(Vec x, IS is, Vec *y)
1474: {
1475:   PetscFunctionBegin;
1476:   PetscCall(VecGetSubVector_Kokkos_Private(x, PETSC_FALSE, is, y));
1477:   PetscFunctionReturn(PETSC_SUCCESS);
1478: }

1480: /* Restore subvector y to x */
1481: PetscErrorCode VecRestoreSubVector_SeqKokkos(Vec x, IS is, Vec *y)
1482: {
1483:   VecScatter                    vscat;
1484:   PETSC_UNUSED PetscObjectState dummystate = 0;
1485:   PetscBool                     unchanged;

1487:   PetscFunctionBegin;
1488:   PetscCall(PetscObjectComposedDataGetInt((PetscObject)*y, VecGetSubVectorSavedStateId, dummystate, unchanged));
1489:   if (unchanged) PetscFunctionReturn(PETSC_SUCCESS); /* If y's state has not changed since VecGetSubVector(), we only need to destroy it */

1491:   PetscCall(PetscObjectQuery((PetscObject)*y, "VecGetSubVector_Scatter", (PetscObject *)&vscat));
1492:   if (vscat) {
1493:     PetscCall(VecScatterBegin(vscat, *y, x, INSERT_VALUES, SCATTER_REVERSE));
1494:     PetscCall(VecScatterEnd(vscat, *y, x, INSERT_VALUES, SCATTER_REVERSE));
1495:   } else { /* y and x's (host and device) arrays overlap */
1496:     Vec_Kokkos *xkok = static_cast<Vec_Kokkos *>(x->spptr);
1497:     Vec_Kokkos *ykok = static_cast<Vec_Kokkos *>((*y)->spptr);
1498:     PetscInt    state;

1500:     PetscCall(VecLockGet(x, &state));
1501:     PetscCheck(!state, PetscObjectComm((PetscObject)x), PETSC_ERR_ARG_WRONGSTATE, "Vec x is locked for read-only or read/write access");

1503:     /* The tricky part: one has to carefully sync the arrays */
1504:     auto &exec = PetscGetKokkosExecutionSpace();
1505:     if (xkok->v_dual.need_sync_device()) { /* x's host has newer data */
1506:       /* Move y's latest values to host (since y is just a subset of x) */
1507:       PetscCall(KokkosDualViewSync<Kokkos::HostSpace>(ykok->v_dual, exec));
1508:     } else if (xkok->v_dual.need_sync_host()) {                              /* x's device has newer data */
1509:       PetscCall(KokkosDualViewSync<DefaultMemorySpace>(ykok->v_dual, exec)); /* Move y's latest data to device */
1510:     } else {                                                                 /* x's host and device data is already sync'ed; Copy y's sync state to x */
1511:       PetscCall(VecCopySyncState_Kokkos_Private(*y, x));
1512:     }
1513:     PetscCall(PetscObjectStateIncrease((PetscObject)x)); /* Since x is updated */
1514:   }
1515:   PetscFunctionReturn(PETSC_SUCCESS);
1516: }

1518: static PetscErrorCode VecSetPreallocationCOO_SeqKokkos(Vec x, PetscCount ncoo, const PetscInt coo_i[])
1519: {
1520:   Vec_Seq    *vecseq = static_cast<Vec_Seq *>(x->data);
1521:   Vec_Kokkos *veckok = static_cast<Vec_Kokkos *>(x->spptr);
1522:   PetscInt    m;

1524:   PetscFunctionBegin;
1525:   PetscCall(VecSetPreallocationCOO_Seq(x, ncoo, coo_i));
1526:   PetscCall(VecGetLocalSize(x, &m));
1527:   PetscCall(veckok->SetUpCOO(vecseq, m));
1528:   PetscFunctionReturn(PETSC_SUCCESS);
1529: }

1531: static PetscErrorCode VecSetValuesCOO_SeqKokkos(Vec x, const PetscScalar v[], InsertMode imode)
1532: {
1533:   Vec_Seq                    *vecseq = static_cast<Vec_Seq *>(x->data);
1534:   Vec_Kokkos                 *veckok = static_cast<Vec_Kokkos *>(x->spptr);
1535:   const PetscCountKokkosView &jmap1  = veckok->jmap1_d;
1536:   const PetscCountKokkosView &perm1  = veckok->perm1_d;
1537:   PetscScalarKokkosView       xv; /* View for vector x */
1538:   ConstPetscScalarKokkosView  vv; /* View for array v[] */
1539:   PetscInt                    m;
1540:   PetscMemType                memtype;

1542:   PetscFunctionBegin;
1543:   PetscCall(VecGetLocalSize(x, &m));
1544:   PetscCall(PetscGetMemType(v, &memtype));
1545:   if (PetscMemTypeHost(memtype)) { /* If user gave v[] in host, we might need to copy it to device if any */
1546:     PetscCallCXX(vv = Kokkos::create_mirror_view_and_copy(DefaultMemorySpace(), ConstPetscScalarKokkosViewHost(v, vecseq->coo_n)));
1547:   } else {
1548:     PetscCallCXX(vv = ConstPetscScalarKokkosView(v, vecseq->coo_n)); /* Directly use v[]'s memory */
1549:   }

1551:   if (imode == INSERT_VALUES) PetscCall(VecGetKokkosViewWrite(x, &xv)); /* write vector */
1552:   else PetscCall(VecGetKokkosView(x, &xv));                             /* read & write vector */

1554:   PetscCallCXX(Kokkos::parallel_for(
1555:     Kokkos::RangePolicy<>(PetscGetKokkosExecutionSpace(), 0, m), KOKKOS_LAMBDA(const PetscInt &i) {
1556:       PetscScalar sum = 0.0;
1557:       for (PetscCount k = jmap1(i); k < jmap1(i + 1); k++) sum += vv(perm1(k));
1558:       xv(i) = (imode == INSERT_VALUES ? 0.0 : xv(i)) + sum;
1559:     }));

1561:   if (imode == INSERT_VALUES) PetscCall(VecRestoreKokkosViewWrite(x, &xv));
1562:   else PetscCall(VecRestoreKokkosView(x, &xv));
1563:   PetscFunctionReturn(PETSC_SUCCESS);
1564: }

1566: /* Duplicate layout etc but not the values in the input vector */
1567: static PetscErrorCode VecDuplicate_SeqKokkos(Vec win, Vec *v)
1568: {
1569:   PetscFunctionBegin;
1570:   PetscCall(VecDuplicate_Seq(win, v)); /* It also dups ops of win */
1571:   PetscFunctionReturn(PETSC_SUCCESS);
1572: }

1574: static PetscErrorCode VecDestroy_SeqKokkos(Vec v)
1575: {
1576:   Vec_Kokkos *veckok = static_cast<Vec_Kokkos *>(v->spptr);
1577:   Vec_Seq    *vecseq = static_cast<Vec_Seq *>(v->data);

1579:   PetscFunctionBegin;
1580:   delete veckok;
1581:   v->spptr = NULL;
1582:   if (vecseq) PetscCall(VecDestroy_Seq(v));
1583:   PetscFunctionReturn(PETSC_SUCCESS);
1584: }

1586: static PetscErrorCode VecSetOps_SeqKokkos(Vec v)
1587: {
1588:   PetscFunctionBegin;
1589:   v->ops->abs             = VecAbs_SeqKokkos;
1590:   v->ops->reciprocal      = VecReciprocal_SeqKokkos;
1591:   v->ops->pointwisemult   = VecPointwiseMult_SeqKokkos;
1592:   v->ops->min             = VecMin_SeqKokkos;
1593:   v->ops->max             = VecMax_SeqKokkos;
1594:   v->ops->sum             = VecSum_SeqKokkos;
1595:   v->ops->shift           = VecShift_SeqKokkos;
1596:   v->ops->norm            = VecNorm_SeqKokkos;
1597:   v->ops->scale           = VecScale_SeqKokkos;
1598:   v->ops->copy            = VecCopy_SeqKokkos;
1599:   v->ops->set             = VecSet_SeqKokkos;
1600:   v->ops->swap            = VecSwap_SeqKokkos;
1601:   v->ops->axpy            = VecAXPY_SeqKokkos;
1602:   v->ops->axpby           = VecAXPBY_SeqKokkos;
1603:   v->ops->axpbypcz        = VecAXPBYPCZ_SeqKokkos;
1604:   v->ops->pointwisedivide = VecPointwiseDivide_SeqKokkos;
1605:   v->ops->setrandom       = VecSetRandom_SeqKokkos;

1607:   v->ops->dot   = VecDot_SeqKokkos;
1608:   v->ops->tdot  = VecTDot_SeqKokkos;
1609:   v->ops->mdot  = VecMDot_SeqKokkos;
1610:   v->ops->mtdot = VecMTDot_SeqKokkos;

1612:   v->ops->dot_local   = VecDot_SeqKokkos;
1613:   v->ops->tdot_local  = VecTDot_SeqKokkos;
1614:   v->ops->mdot_local  = VecMDot_SeqKokkos;
1615:   v->ops->mtdot_local = VecMTDot_SeqKokkos;

1617:   v->ops->norm_local             = VecNorm_SeqKokkos;
1618:   v->ops->maxpy                  = VecMAXPY_SeqKokkos;
1619:   v->ops->aypx                   = VecAYPX_SeqKokkos;
1620:   v->ops->waxpy                  = VecWAXPY_SeqKokkos;
1621:   v->ops->dotnorm2               = VecDotNorm2_SeqKokkos;
1622:   v->ops->errorwnorm             = VecErrorWeightedNorms_SeqKokkos;
1623:   v->ops->placearray             = VecPlaceArray_SeqKokkos;
1624:   v->ops->replacearray           = VecReplaceArray_SeqKokkos;
1625:   v->ops->resetarray             = VecResetArray_SeqKokkos;
1626:   v->ops->destroy                = VecDestroy_SeqKokkos;
1627:   v->ops->duplicate              = VecDuplicate_SeqKokkos;
1628:   v->ops->conjugate              = VecConjugate_SeqKokkos;
1629:   v->ops->getlocalvector         = VecGetLocalVector_SeqKokkos;
1630:   v->ops->restorelocalvector     = VecRestoreLocalVector_SeqKokkos;
1631:   v->ops->getlocalvectorread     = VecGetLocalVector_SeqKokkos;
1632:   v->ops->restorelocalvectorread = VecRestoreLocalVector_SeqKokkos;
1633:   v->ops->getarraywrite          = VecGetArrayWrite_SeqKokkos;
1634:   v->ops->getarray               = VecGetArray_SeqKokkos;
1635:   v->ops->restorearray           = VecRestoreArray_SeqKokkos;

1637:   v->ops->getarrayandmemtype      = VecGetArrayAndMemType_SeqKokkos;
1638:   v->ops->restorearrayandmemtype  = VecRestoreArrayAndMemType_SeqKokkos;
1639:   v->ops->getarraywriteandmemtype = VecGetArrayWriteAndMemType_SeqKokkos;
1640:   v->ops->getsubvector            = VecGetSubVector_SeqKokkos;
1641:   v->ops->restoresubvector        = VecRestoreSubVector_SeqKokkos;

1643:   v->ops->setpreallocationcoo = VecSetPreallocationCOO_SeqKokkos;
1644:   v->ops->setvaluescoo        = VecSetValuesCOO_SeqKokkos;
1645:   PetscFunctionReturn(PETSC_SUCCESS);
1646: }

1648: /*@C
1649:   VecCreateSeqKokkosWithArray - Creates a Kokkos sequential array-style vector,
1650:   where the user provides the array space to store the vector values. The array
1651:   provided must be a device array.

1653:   Collective

1655:   Input Parameters:
1656: + comm   - the communicator, should be PETSC_COMM_SELF
1657: . bs     - the block size
1658: . n      - the vector length
1659: - darray - device memory where the vector elements are to be stored.

1661:   Output Parameter:
1662: . v - the vector

1664:   Notes:
1665:   Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
1666:   same type as an existing vector.

1668:   PETSc does NOT free the array when the vector is destroyed via VecDestroy().
1669:   The user should not free the array until the vector is destroyed.

1671:   Level: intermediate

1673: .seealso: `VecCreateMPICUDAWithArray()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`,
1674:           `VecCreateGhost()`, `VecCreateSeq()`, `VecCreateSeqWithArray()`,
1675:           `VecCreateMPIWithArray()`
1676: @*/
1677: PetscErrorCode VecCreateSeqKokkosWithArray(MPI_Comm comm, PetscInt bs, PetscInt n, const PetscScalar darray[], Vec *v)
1678: {
1679:   PetscMPIInt  size;
1680:   Vec          w;
1681:   Vec_Kokkos  *veckok = NULL;
1682:   PetscScalar *harray;

1684:   PetscFunctionBegin;
1685:   PetscCallMPI(MPI_Comm_size(comm, &size));
1686:   PetscCheck(size <= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot create VECSEQKOKKOS on more than one process");

1688:   PetscCall(PetscKokkosInitializeCheck());
1689:   PetscCall(VecCreate(comm, &w));
1690:   PetscCall(VecSetSizes(w, n, n));
1691:   PetscCall(VecSetBlockSize(w, bs));
1692:   if (!darray) { /* Allocate memory ourself if user provided NULL */
1693:     PetscCall(VecSetType(w, VECSEQKOKKOS));
1694:   } else {
1695:     /* Build a VECSEQ, get its harray, and then build Vec_Kokkos along with darray */
1696:     if (std::is_same<DefaultMemorySpace, Kokkos::HostSpace>::value) {
1697:       harray = const_cast<PetscScalar *>(darray);
1698:       PetscCall(VecCreate_Seq_Private(w, harray)); /* Build a sequential vector with harray */
1699:     } else {
1700:       PetscCall(VecSetType(w, VECSEQ));
1701:       harray = static_cast<Vec_Seq *>(w->data)->array;
1702:     }
1703:     PetscCall(PetscObjectChangeTypeName((PetscObject)w, VECSEQKOKKOS)); /* Change it to Kokkos */
1704:     PetscCall(VecSetOps_SeqKokkos(w));
1705:     PetscCallCXX(veckok = new Vec_Kokkos{n, harray, const_cast<PetscScalar *>(darray)});
1706:     PetscCallCXX(veckok->v_dual.modify_device()); /* Mark the device is modified */
1707:     w->offloadmask = PETSC_OFFLOAD_KOKKOS;
1708:     w->spptr       = static_cast<void *>(veckok);
1709:   }
1710:   *v = w;
1711:   PetscFunctionReturn(PETSC_SUCCESS);
1712: }

1714: PetscErrorCode VecConvert_Seq_SeqKokkos_inplace(Vec v)
1715: {
1716:   Vec_Seq *vecseq;

1718:   PetscFunctionBegin;
1719:   PetscCall(PetscKokkosInitializeCheck());
1720:   PetscCall(PetscLayoutSetUp(v->map));
1721:   PetscCall(PetscObjectChangeTypeName((PetscObject)v, VECSEQKOKKOS));
1722:   PetscCall(VecSetOps_SeqKokkos(v));
1723:   PetscCheck(!v->spptr, PETSC_COMM_SELF, PETSC_ERR_PLIB, "v->spptr not NULL");
1724:   vecseq = static_cast<Vec_Seq *>(v->data);
1725:   PetscCallCXX(v->spptr = new Vec_Kokkos(v->map->n, vecseq->array, NULL));
1726:   v->offloadmask = PETSC_OFFLOAD_KOKKOS;
1727:   PetscFunctionReturn(PETSC_SUCCESS);
1728: }

1730: // Create a VECSEQKOKKOS with layout and arrays
1731: static PetscErrorCode VecCreateSeqKokkosWithLayoutAndArrays_Private(PetscLayout map, const PetscScalar harray[], const PetscScalar darray[], Vec *v)
1732: {
1733:   Vec w;

1735:   PetscFunctionBegin;
1736:   if (map->n > 0) PetscCheck(darray, map->comm, PETSC_ERR_ARG_WRONG, "darray cannot be NULL");
1737: #if defined(KOKKOS_ENABLE_DEFAULT_DEVICE_TYPE_HOST)
1738:   PetscCheck(harray == darray, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "harray and darray must be the same");
1739: #endif
1740:   PetscCall(VecCreateSeqWithLayoutAndArray_Private(map, harray, &w));
1741:   PetscCall(PetscObjectChangeTypeName((PetscObject)w, VECSEQKOKKOS)); // Change it to VECKOKKOS
1742:   PetscCall(VecSetOps_SeqKokkos(w));
1743:   PetscCallCXX(w->spptr = new Vec_Kokkos(map->n, const_cast<PetscScalar *>(harray), const_cast<PetscScalar *>(darray)));
1744:   w->offloadmask = PETSC_OFFLOAD_KOKKOS;
1745:   *v             = w;
1746:   PetscFunctionReturn(PETSC_SUCCESS);
1747: }

1749: /*
1750:    VecCreateSeqKokkosWithArrays_Private - Creates a Kokkos sequential array-style vector
1751:    with user-provided arrays on host and device.

1753:    Collective

1755:    Input Parameter:
1756: +  comm - the communicator, should be PETSC_COMM_SELF
1757: .  bs - the block size
1758: .  n - the vector length
1759: .  harray - host memory where the vector elements are to be stored.
1760: -  darray - device memory where the vector elements are to be stored.

1762:    Output Parameter:
1763: .  v - the vector

1765:    Notes:
1766:    Unlike VecCreate{Seq,MPI}CUDAWithArrays(), this routine is private since we do not expect users to use it directly.

1768:    If there is no device, then harray and darray must be the same.
1769:    If n is not zero, then harray and darray must be allocated.
1770:    After the call, the created vector is supposed to be in a synchronized state, i.e.,
1771:    we suppose harray and darray have the same data.

1773:    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
1774:    Caller should not free the array until the vector is destroyed.
1775: */
1776: static PetscErrorCode VecCreateSeqKokkosWithArrays_Private(MPI_Comm comm, PetscInt bs, PetscInt n, const PetscScalar harray[], const PetscScalar darray[], Vec *v)
1777: {
1778:   PetscMPIInt size;
1779:   PetscLayout map;

1781:   PetscFunctionBegin;
1782:   PetscCall(PetscKokkosInitializeCheck());
1783:   PetscCallMPI(MPI_Comm_size(comm, &size));
1784:   PetscCheck(size == 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot create VECSEQKOKKOS on more than one process");
1785:   PetscCall(PetscLayoutCreateFromSizes(comm, n, n, bs, &map));
1786:   PetscCall(VecCreateSeqKokkosWithLayoutAndArrays_Private(map, harray, darray, v));
1787:   PetscCall(PetscLayoutDestroy(&map));
1788:   PetscFunctionReturn(PETSC_SUCCESS);
1789: }

1791: /* TODO: ftn-auto generates veckok.kokkosf.c */
1792: /*@C
1793:   VecCreateSeqKokkos - Creates a standard, sequential array-style vector.

1795:   Collective

1797:   Input Parameters:
1798: + comm - the communicator, should be `PETSC_COMM_SELF`
1799: - n    - the vector length

1801:   Output Parameter:
1802: . v - the vector

1804:   Notes:
1805:   Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
1806:   same type as an existing vector.

1808:   Level: intermediate

1810: .seealso: `VecCreateMPI()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateGhost()`
1811:  @*/
1812: PetscErrorCode VecCreateSeqKokkos(MPI_Comm comm, PetscInt n, Vec *v)
1813: {
1814:   PetscFunctionBegin;
1815:   PetscCall(PetscKokkosInitializeCheck());
1816:   PetscCall(VecCreate(comm, v));
1817:   PetscCall(VecSetSizes(*v, n, n));
1818:   PetscCall(VecSetType(*v, VECSEQKOKKOS)); /* Calls VecCreate_SeqKokkos */
1819:   PetscFunctionReturn(PETSC_SUCCESS);
1820: }

1822: // Duplicate a VECSEQKOKKOS
1823: static PetscErrorCode VecDuplicateVecs_SeqKokkos_GEMV(Vec w, PetscInt m, Vec *V[])
1824: {
1825:   PetscInt64   lda; // use 64-bit as we will do "m * lda"
1826:   PetscScalar *array_h, *array_d;
1827:   PetscLayout  map;

1829:   PetscFunctionBegin;
1830:   PetscCall(PetscKokkosInitializeCheck()); // as we'll call kokkos_malloc()
1831:   PetscCall(PetscMalloc1(m, V));
1832:   PetscCall(VecGetLayout(w, &map));
1833:   VecGetLocalSizeAligned(w, 64, &lda); // get in lda the 64-bytes aligned local size
1834:   // allocate raw arrays on host and device for the whole m vectors
1835:   PetscCall(PetscCalloc1(m * lda, &array_h));
1836: #if defined(KOKKOS_ENABLE_DEFAULT_DEVICE_TYPE_HOST)
1837:   array_d = array_h;
1838: #else
1839:   PetscCallCXX(array_d = static_cast<PetscScalar *>(Kokkos::kokkos_malloc("VecDuplicateVecs", sizeof(PetscScalar) * (m * lda))));
1840: #endif

1842:   // create the m vectors with raw arrays
1843:   for (PetscInt i = 0; i < m; i++) {
1844:     Vec v;
1845:     PetscCall(VecCreateSeqKokkosWithLayoutAndArrays_Private(map, &array_h[i * lda], &array_d[i * lda], &v));
1846:     PetscCallCXX(static_cast<Vec_Kokkos *>(v->spptr)->v_dual.modify_host()); // as we only init'ed array_h
1847:     PetscCall(PetscObjectListDuplicate(((PetscObject)w)->olist, &((PetscObject)v)->olist));
1848:     PetscCall(PetscFunctionListDuplicate(((PetscObject)w)->qlist, &((PetscObject)v)->qlist));
1849:     v->ops[0] = w->ops[0];
1850:     (*V)[i]   = v;
1851:   }

1853:   // let the first vector own the raw arrays, so when it is destroyed it will free the arrays
1854:   if (m) {
1855:     Vec v = (*V)[0];

1857:     static_cast<Vec_Seq *>(v->data)->array_allocated = array_h;
1858: #if !defined(KOKKOS_ENABLE_DEFAULT_DEVICE_TYPE_HOST)
1859:     static_cast<Vec_Kokkos *>(v->spptr)->raw_array_d_allocated = array_d;
1860: #endif
1861:     // disable replacearray of the first vector, as freeing its memory also frees others in the group.
1862:     // But replacearray of others is ok, as they don't own their array.
1863:     if (m > 1) v->ops->replacearray = VecReplaceArray_Default_GEMV_Error;
1864:   }
1865:   PetscFunctionReturn(PETSC_SUCCESS);
1866: }

1868: /*MC
1869:    VECSEQKOKKOS - VECSEQKOKKOS = "seqkokkos" - The basic sequential vector, modified to use Kokkos

1871:    Options Database Keys:
1872: . -vec_type seqkokkos - sets the vector type to VECSEQKOKKOS during a call to VecSetFromOptions()

1874:   Level: beginner

1876: .seealso: `VecCreate()`, `VecSetType()`, `VecSetFromOptions()`, `VecCreateMPIWithArray()`, `VECMPI`, `VecType`, `VecCreateMPI()`
1877: M*/
1878: PetscErrorCode VecCreate_SeqKokkos(Vec v)
1879: {
1880:   Vec_Seq  *vecseq;
1881:   PetscBool mdot_use_gemv  = PETSC_TRUE;
1882:   PetscBool maxpy_use_gemv = PETSC_FALSE; // default is false as we saw bad performance with vendors' GEMV with tall skinny matrices.

1884:   PetscFunctionBegin;
1885:   PetscCall(PetscKokkosInitializeCheck());
1886:   PetscCall(PetscLayoutSetUp(v->map));
1887:   PetscCall(VecCreate_Seq(v)); /* Build a sequential vector, allocate array */
1888:   PetscCall(PetscObjectChangeTypeName((PetscObject)v, VECSEQKOKKOS));
1889:   PetscCall(VecSetOps_SeqKokkos(v));
1890:   PetscCheck(!v->spptr, PETSC_COMM_SELF, PETSC_ERR_PLIB, "v->spptr not NULL");
1891:   vecseq = static_cast<Vec_Seq *>(v->data);
1892:   PetscCallCXX(v->spptr = new Vec_Kokkos(v->map->n, vecseq->array, NULL)); // Let host claim it has the latest data (zero)
1893:   v->offloadmask = PETSC_OFFLOAD_KOKKOS;
1894:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-vec_mdot_use_gemv", &mdot_use_gemv, NULL));
1895:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-vec_maxpy_use_gemv", &maxpy_use_gemv, NULL));

1897:   // allocate multiple vectors together
1898:   if (mdot_use_gemv || maxpy_use_gemv) v->ops[0].duplicatevecs = VecDuplicateVecs_SeqKokkos_GEMV;

1900:   if (mdot_use_gemv) {
1901:     v->ops[0].mdot        = VecMDot_SeqKokkos_GEMV;
1902:     v->ops[0].mtdot       = VecMTDot_SeqKokkos_GEMV;
1903:     v->ops[0].mdot_local  = VecMDot_SeqKokkos_GEMV;
1904:     v->ops[0].mtdot_local = VecMTDot_SeqKokkos_GEMV;
1905:   }
1906:   if (maxpy_use_gemv) v->ops[0].maxpy = VecMAXPY_SeqKokkos_GEMV;
1907:   PetscFunctionReturn(PETSC_SUCCESS);
1908: }