Actual source code: ex9bus.c
1: static char help[] = "Power grid stability analysis of WECC 9 bus system.\n\
2: This example is based on the 9-bus (node) example given in the book Power\n\
3: Systems Dynamics and Stability (Chapter 7) by P. Sauer and M. A. Pai.\n\
4: The power grid in this example consists of 9 buses (nodes), 3 generators,\n\
5: 3 loads, and 9 transmission lines. The network equations are written\n\
6: in current balance form using rectangular coordinates.\n\n";
8: /*
9: The equations for the stability analysis are described by the DAE
11: \dot{x} = f(x,y,t)
12: 0 = g(x,y,t)
14: where the generators are described by differential equations, while the algebraic
15: constraints define the network equations.
17: The generators are modeled with a 4th order differential equation describing the electrical
18: and mechanical dynamics. Each generator also has an exciter system modeled by 3rd order
19: diff. eqns. describing the exciter, voltage regulator, and the feedback stabilizer
20: mechanism.
22: The network equations are described by nodal current balance equations.
23: I(x,y) - Y*V = 0
25: where:
26: I(x,y) is the current injected from generators and loads.
27: Y is the admittance matrix, and
28: V is the voltage vector
29: */
31: /*
32: Include "petscts.h" so that we can use TS solvers. Note that this
33: file automatically includes:
34: petscsys.h - base PETSc routines petscvec.h - vectors
35: petscmat.h - matrices
36: petscis.h - index sets petscksp.h - Krylov subspace methods
37: petscviewer.h - viewers petscpc.h - preconditioners
38: petscksp.h - linear solvers
39: */
41: #include <petscts.h>
42: #include <petscdm.h>
43: #include <petscdmda.h>
44: #include <petscdmcomposite.h>
46: #define freq 60
47: #define w_s (2 * PETSC_PI * freq)
49: /* Sizes and indices */
50: const PetscInt nbus = 9; /* Number of network buses */
51: const PetscInt ngen = 3; /* Number of generators */
52: const PetscInt nload = 3; /* Number of loads */
53: const PetscInt gbus[3] = {0, 1, 2}; /* Buses at which generators are incident */
54: const PetscInt lbus[3] = {4, 5, 7}; /* Buses at which loads are incident */
56: /* Generator real and reactive powers (found via loadflow) */
57: const PetscScalar PG[3] = {0.716786142395021, 1.630000000000000, 0.850000000000000};
58: const PetscScalar QG[3] = {0.270702180178785, 0.066120127797275, -0.108402221791588};
59: /* Generator constants */
60: const PetscScalar H[3] = {23.64, 6.4, 3.01}; /* Inertia constant */
61: const PetscScalar Rs[3] = {0.0, 0.0, 0.0}; /* Stator Resistance */
62: const PetscScalar Xd[3] = {0.146, 0.8958, 1.3125}; /* d-axis reactance */
63: const PetscScalar Xdp[3] = {0.0608, 0.1198, 0.1813}; /* d-axis transient reactance */
64: const PetscScalar Xq[3] = {0.4360, 0.8645, 1.2578}; /* q-axis reactance Xq(1) set to 0.4360, value given in text 0.0969 */
65: const PetscScalar Xqp[3] = {0.0969, 0.1969, 0.25}; /* q-axis transient reactance */
66: const PetscScalar Td0p[3] = {8.96, 6.0, 5.89}; /* d-axis open circuit time constant */
67: const PetscScalar Tq0p[3] = {0.31, 0.535, 0.6}; /* q-axis open circuit time constant */
68: PetscScalar M[3]; /* M = 2*H/w_s */
69: PetscScalar D[3]; /* D = 0.1*M */
71: PetscScalar TM[3]; /* Mechanical Torque */
72: /* Exciter system constants */
73: const PetscScalar KA[3] = {20.0, 20.0, 20.0}; /* Voltage regulartor gain constant */
74: const PetscScalar TA[3] = {0.2, 0.2, 0.2}; /* Voltage regulator time constant */
75: const PetscScalar KE[3] = {1.0, 1.0, 1.0}; /* Exciter gain constant */
76: const PetscScalar TE[3] = {0.314, 0.314, 0.314}; /* Exciter time constant */
77: const PetscScalar KF[3] = {0.063, 0.063, 0.063}; /* Feedback stabilizer gain constant */
78: const PetscScalar TF[3] = {0.35, 0.35, 0.35}; /* Feedback stabilizer time constant */
79: const PetscScalar k1[3] = {0.0039, 0.0039, 0.0039};
80: const PetscScalar k2[3] = {1.555, 1.555, 1.555}; /* k1 and k2 for calculating the saturation function SE = k1*exp(k2*Efd) */
81: const PetscScalar VRMIN[3] = {-4.0, -4.0, -4.0};
82: const PetscScalar VRMAX[3] = {7.0, 7.0, 7.0};
83: PetscInt VRatmin[3];
84: PetscInt VRatmax[3];
86: PetscScalar Vref[3];
87: /* Load constants
88: We use a composite load model that describes the load and reactive powers at each time instant as follows
89: P(t) = \sum\limits_{i=0}^ld_nsegsp \ld_alphap_i*P_D0(\frac{V_m(t)}{V_m0})^\ld_betap_i
90: Q(t) = \sum\limits_{i=0}^ld_nsegsq \ld_alphaq_i*Q_D0(\frac{V_m(t)}{V_m0})^\ld_betaq_i
91: where
92: ld_nsegsp,ld_nsegsq - Number of individual load models for real and reactive power loads
93: ld_alphap,ld_alphap - Percentage contribution (weights) or loads
94: P_D0 - Real power load
95: Q_D0 - Reactive power load
96: V_m(t) - Voltage magnitude at time t
97: V_m0 - Voltage magnitude at t = 0
98: ld_betap, ld_betaq - exponents describing the load model for real and reactive part
100: Note: All loads have the same characteristic currently.
101: */
102: const PetscScalar PD0[3] = {1.25, 0.9, 1.0};
103: const PetscScalar QD0[3] = {0.5, 0.3, 0.35};
104: const PetscInt ld_nsegsp[3] = {3, 3, 3};
105: const PetscScalar ld_alphap[3] = {1.0, 0.0, 0.0};
106: const PetscScalar ld_betap[3] = {2.0, 1.0, 0.0};
107: const PetscInt ld_nsegsq[3] = {3, 3, 3};
108: const PetscScalar ld_alphaq[3] = {1.0, 0.0, 0.0};
109: const PetscScalar ld_betaq[3] = {2.0, 1.0, 0.0};
111: typedef struct {
112: DM dmgen, dmnet; /* DMs to manage generator and network subsystem */
113: DM dmpgrid; /* Composite DM to manage the entire power grid */
114: Mat Ybus; /* Network admittance matrix */
115: Vec V0; /* Initial voltage vector (Power flow solution) */
116: PetscReal tfaulton, tfaultoff; /* Fault on and off times */
117: PetscInt faultbus; /* Fault bus */
118: PetscScalar Rfault;
119: PetscReal t0, tmax;
120: PetscInt neqs_gen, neqs_net, neqs_pgrid;
121: Mat Sol; /* Matrix to save solution at each time step */
122: PetscInt stepnum;
123: PetscReal t;
124: SNES snes_alg;
125: IS is_diff; /* indices for differential equations */
126: IS is_alg; /* indices for algebraic equations */
127: PetscBool setisdiff; /* TS computes truncation error based only on the differential variables */
128: PetscBool semiexplicit; /* If the flag is set then a semi-explicit method is used using TSRK */
129: } Userctx;
131: /*
132: The first two events are for fault on and off, respectively. The following events are
133: to check the min/max limits on the state variable VR. A non windup limiter is used for
134: the VR limits.
135: */
136: PetscErrorCode EventFunction(TS ts, PetscReal t, Vec X, PetscReal *fvalue, PetscCtx ctx)
137: {
138: Userctx *user = (Userctx *)ctx;
139: Vec Xgen, Xnet;
140: PetscInt i, idx = 0;
141: const PetscScalar *xgen, *xnet;
142: PetscScalar Efd, RF, VR, Vr, Vi, Vm;
144: PetscFunctionBegin;
145: PetscCall(DMCompositeGetLocalVectors(user->dmpgrid, &Xgen, &Xnet));
146: PetscCall(DMCompositeScatter(user->dmpgrid, X, Xgen, Xnet));
148: PetscCall(VecGetArrayRead(Xgen, &xgen));
149: PetscCall(VecGetArrayRead(Xnet, &xnet));
151: /* Event for fault-on time */
152: fvalue[0] = t - user->tfaulton;
153: /* Event for fault-off time */
154: fvalue[1] = t - user->tfaultoff;
156: for (i = 0; i < ngen; i++) {
157: Efd = xgen[idx + 6];
158: RF = xgen[idx + 7];
159: VR = xgen[idx + 8];
161: Vr = xnet[2 * gbus[i]]; /* Real part of generator terminal voltage */
162: Vi = xnet[2 * gbus[i] + 1]; /* Imaginary part of the generator terminal voltage */
163: Vm = PetscSqrtScalar(Vr * Vr + Vi * Vi);
165: if (!VRatmax[i]) {
166: fvalue[2 + 2 * i] = PetscRealPart(VRMAX[i] - VR);
167: } else {
168: fvalue[2 + 2 * i] = PetscRealPart((VR - KA[i] * RF + KA[i] * KF[i] * Efd / TF[i] - KA[i] * (Vref[i] - Vm)) / TA[i]);
169: }
170: if (!VRatmin[i]) {
171: fvalue[2 + 2 * i + 1] = PetscRealPart(VRMIN[i] - VR);
172: } else {
173: fvalue[2 + 2 * i + 1] = PetscRealPart((VR - KA[i] * RF + KA[i] * KF[i] * Efd / TF[i] - KA[i] * (Vref[i] - Vm)) / TA[i]);
174: }
175: idx = idx + 9;
176: }
177: PetscCall(VecRestoreArrayRead(Xgen, &xgen));
178: PetscCall(VecRestoreArrayRead(Xnet, &xnet));
180: PetscCall(DMCompositeRestoreLocalVectors(user->dmpgrid, &Xgen, &Xnet));
181: PetscFunctionReturn(PETSC_SUCCESS);
182: }
184: PetscErrorCode PostEventFunction(TS ts, PetscInt nevents, PetscInt event_list[], PetscReal t, Vec X, PetscBool forwardsolve, PetscCtx ctx)
185: {
186: Userctx *user = (Userctx *)ctx;
187: Vec Xgen, Xnet;
188: PetscScalar *xgen, *xnet;
189: PetscInt row_loc, col_loc;
190: PetscScalar val;
191: PetscInt i, idx = 0, event_num;
192: PetscScalar fvalue;
193: PetscScalar Efd, RF, VR;
194: PetscScalar Vr, Vi, Vm;
196: PetscFunctionBegin;
197: PetscCall(DMCompositeGetLocalVectors(user->dmpgrid, &Xgen, &Xnet));
198: PetscCall(DMCompositeScatter(user->dmpgrid, X, Xgen, Xnet));
200: PetscCall(VecGetArray(Xgen, &xgen));
201: PetscCall(VecGetArray(Xnet, &xnet));
203: for (i = 0; i < nevents; i++) {
204: if (event_list[i] == 0) {
205: /* Apply disturbance - resistive fault at user->faultbus */
206: /* This is done by adding shunt conductance to the diagonal location
207: in the Ybus matrix */
208: row_loc = 2 * user->faultbus;
209: col_loc = 2 * user->faultbus + 1; /* Location for G */
210: val = 1 / user->Rfault;
211: PetscCall(MatSetValues(user->Ybus, 1, &row_loc, 1, &col_loc, &val, ADD_VALUES));
212: row_loc = 2 * user->faultbus + 1;
213: col_loc = 2 * user->faultbus; /* Location for G */
214: val = 1 / user->Rfault;
215: PetscCall(MatSetValues(user->Ybus, 1, &row_loc, 1, &col_loc, &val, ADD_VALUES));
217: PetscCall(MatAssemblyBegin(user->Ybus, MAT_FINAL_ASSEMBLY));
218: PetscCall(MatAssemblyEnd(user->Ybus, MAT_FINAL_ASSEMBLY));
220: /* Solve the algebraic equations */
221: PetscCall(SNESSolve(user->snes_alg, NULL, X));
222: } else if (event_list[i] == 1) {
223: /* Remove the fault */
224: row_loc = 2 * user->faultbus;
225: col_loc = 2 * user->faultbus + 1;
226: val = -1 / user->Rfault;
227: PetscCall(MatSetValues(user->Ybus, 1, &row_loc, 1, &col_loc, &val, ADD_VALUES));
228: row_loc = 2 * user->faultbus + 1;
229: col_loc = 2 * user->faultbus;
230: val = -1 / user->Rfault;
231: PetscCall(MatSetValues(user->Ybus, 1, &row_loc, 1, &col_loc, &val, ADD_VALUES));
233: PetscCall(MatAssemblyBegin(user->Ybus, MAT_FINAL_ASSEMBLY));
234: PetscCall(MatAssemblyEnd(user->Ybus, MAT_FINAL_ASSEMBLY));
236: /* Solve the algebraic equations */
237: PetscCall(SNESSolve(user->snes_alg, NULL, X));
239: /* Check the VR derivatives and reset flags if needed */
240: for (i = 0; i < ngen; i++) {
241: Efd = xgen[idx + 6];
242: RF = xgen[idx + 7];
243: VR = xgen[idx + 8];
245: Vr = xnet[2 * gbus[i]]; /* Real part of generator terminal voltage */
246: Vi = xnet[2 * gbus[i] + 1]; /* Imaginary part of the generator terminal voltage */
247: Vm = PetscSqrtScalar(Vr * Vr + Vi * Vi);
249: if (VRatmax[i]) {
250: fvalue = (VR - KA[i] * RF + KA[i] * KF[i] * Efd / TF[i] - KA[i] * (Vref[i] - Vm)) / TA[i];
251: if (fvalue < 0) {
252: VRatmax[i] = 0;
253: PetscCall(PetscPrintf(PETSC_COMM_SELF, "VR[%" PetscInt_FMT "]: dVR_dt went negative on fault clearing at time %g\n", i, (double)t));
254: }
255: }
256: if (VRatmin[i]) {
257: fvalue = (VR - KA[i] * RF + KA[i] * KF[i] * Efd / TF[i] - KA[i] * (Vref[i] - Vm)) / TA[i];
259: if (fvalue > 0) {
260: VRatmin[i] = 0;
261: PetscCall(PetscPrintf(PETSC_COMM_SELF, "VR[%" PetscInt_FMT "]: dVR_dt went positive on fault clearing at time %g\n", i, (double)t));
262: }
263: }
264: idx = idx + 9;
265: }
266: } else {
267: idx = (event_list[i] - 2) / 2;
268: event_num = (event_list[i] - 2) % 2;
269: if (event_num == 0) { /* Max VR */
270: if (!VRatmax[idx]) {
271: VRatmax[idx] = 1;
272: PetscCall(PetscPrintf(PETSC_COMM_SELF, "VR[%" PetscInt_FMT "]: hit upper limit at time %g\n", idx, (double)t));
273: } else {
274: VRatmax[idx] = 0;
275: PetscCall(PetscPrintf(PETSC_COMM_SELF, "VR[%" PetscInt_FMT "]: freeing variable as dVR_dt is negative at time %g\n", idx, (double)t));
276: }
277: } else {
278: if (!VRatmin[idx]) {
279: VRatmin[idx] = 1;
280: PetscCall(PetscPrintf(PETSC_COMM_SELF, "VR[%" PetscInt_FMT "]: hit lower limit at time %g\n", idx, (double)t));
281: } else {
282: VRatmin[idx] = 0;
283: PetscCall(PetscPrintf(PETSC_COMM_SELF, "VR[%" PetscInt_FMT "]: freeing variable as dVR_dt is positive at time %g\n", idx, (double)t));
284: }
285: }
286: }
287: }
289: PetscCall(VecRestoreArray(Xgen, &xgen));
290: PetscCall(VecRestoreArray(Xnet, &xnet));
292: PetscCall(DMCompositeRestoreLocalVectors(user->dmpgrid, &Xgen, &Xnet));
293: PetscFunctionReturn(PETSC_SUCCESS);
294: }
296: /* Converts from machine frame (dq) to network (phase a real,imag) reference frame */
297: PetscErrorCode dq2ri(PetscScalar Fd, PetscScalar Fq, PetscScalar delta, PetscScalar *Fr, PetscScalar *Fi)
298: {
299: PetscFunctionBegin;
300: *Fr = Fd * PetscSinScalar(delta) + Fq * PetscCosScalar(delta);
301: *Fi = -Fd * PetscCosScalar(delta) + Fq * PetscSinScalar(delta);
302: PetscFunctionReturn(PETSC_SUCCESS);
303: }
305: /* Converts from network frame ([phase a real,imag) to machine (dq) reference frame */
306: PetscErrorCode ri2dq(PetscScalar Fr, PetscScalar Fi, PetscScalar delta, PetscScalar *Fd, PetscScalar *Fq)
307: {
308: PetscFunctionBegin;
309: *Fd = Fr * PetscSinScalar(delta) - Fi * PetscCosScalar(delta);
310: *Fq = Fr * PetscCosScalar(delta) + Fi * PetscSinScalar(delta);
311: PetscFunctionReturn(PETSC_SUCCESS);
312: }
314: /* Saves the solution at each time to a matrix */
315: PetscErrorCode SaveSolution(TS ts)
316: {
317: Userctx *user;
318: Vec X;
319: const PetscScalar *x;
320: PetscScalar *mat;
321: PetscInt idx;
322: PetscReal t;
324: PetscFunctionBegin;
325: PetscCall(TSGetApplicationContext(ts, &user));
326: PetscCall(TSGetTime(ts, &t));
327: PetscCall(TSGetSolution(ts, &X));
328: idx = user->stepnum * (user->neqs_pgrid + 1);
329: PetscCall(MatDenseGetArray(user->Sol, &mat));
330: PetscCall(VecGetArrayRead(X, &x));
331: mat[idx] = t;
332: PetscCall(PetscArraycpy(mat + idx + 1, x, user->neqs_pgrid));
333: PetscCall(MatDenseRestoreArray(user->Sol, &mat));
334: PetscCall(VecRestoreArrayRead(X, &x));
335: user->stepnum++;
336: PetscFunctionReturn(PETSC_SUCCESS);
337: }
339: PetscErrorCode SetInitialGuess(Vec X, Userctx *user)
340: {
341: Vec Xgen, Xnet;
342: PetscScalar *xgen;
343: const PetscScalar *xnet;
344: PetscInt i, idx = 0;
345: PetscScalar Vr, Vi, IGr, IGi, Vm, Vm2;
346: PetscScalar Eqp, Edp, delta;
347: PetscScalar Efd, RF, VR; /* Exciter variables */
348: PetscScalar Id, Iq; /* Generator dq axis currents */
349: PetscScalar theta, Vd, Vq, SE;
351: PetscFunctionBegin;
352: M[0] = 2 * H[0] / w_s;
353: M[1] = 2 * H[1] / w_s;
354: M[2] = 2 * H[2] / w_s;
355: D[0] = 0.1 * M[0];
356: D[1] = 0.1 * M[1];
357: D[2] = 0.1 * M[2];
359: PetscCall(DMCompositeGetLocalVectors(user->dmpgrid, &Xgen, &Xnet));
361: /* Network subsystem initialization */
362: PetscCall(VecCopy(user->V0, Xnet));
364: /* Generator subsystem initialization */
365: PetscCall(VecGetArrayWrite(Xgen, &xgen));
366: PetscCall(VecGetArrayRead(Xnet, &xnet));
368: for (i = 0; i < ngen; i++) {
369: Vr = xnet[2 * gbus[i]]; /* Real part of generator terminal voltage */
370: Vi = xnet[2 * gbus[i] + 1]; /* Imaginary part of the generator terminal voltage */
371: Vm = PetscSqrtScalar(Vr * Vr + Vi * Vi);
372: Vm2 = Vm * Vm;
373: IGr = (Vr * PG[i] + Vi * QG[i]) / Vm2;
374: IGi = (Vi * PG[i] - Vr * QG[i]) / Vm2;
376: delta = PetscAtan2Real(Vi + Xq[i] * IGr, Vr - Xq[i] * IGi); /* Machine angle */
378: theta = PETSC_PI / 2.0 - delta;
380: Id = IGr * PetscCosScalar(theta) - IGi * PetscSinScalar(theta); /* d-axis stator current */
381: Iq = IGr * PetscSinScalar(theta) + IGi * PetscCosScalar(theta); /* q-axis stator current */
383: Vd = Vr * PetscCosScalar(theta) - Vi * PetscSinScalar(theta);
384: Vq = Vr * PetscSinScalar(theta) + Vi * PetscCosScalar(theta);
386: Edp = Vd + Rs[i] * Id - Xqp[i] * Iq; /* d-axis transient EMF */
387: Eqp = Vq + Rs[i] * Iq + Xdp[i] * Id; /* q-axis transient EMF */
389: TM[i] = PG[i];
391: /* The generator variables are ordered as [Eqp,Edp,delta,w,Id,Iq] */
392: xgen[idx] = Eqp;
393: xgen[idx + 1] = Edp;
394: xgen[idx + 2] = delta;
395: xgen[idx + 3] = w_s;
397: idx = idx + 4;
399: xgen[idx] = Id;
400: xgen[idx + 1] = Iq;
402: idx = idx + 2;
404: /* Exciter */
405: Efd = Eqp + (Xd[i] - Xdp[i]) * Id;
406: SE = k1[i] * PetscExpScalar(k2[i] * Efd);
407: VR = KE[i] * Efd + SE;
408: RF = KF[i] * Efd / TF[i];
410: xgen[idx] = Efd;
411: xgen[idx + 1] = RF;
412: xgen[idx + 2] = VR;
414: Vref[i] = Vm + (VR / KA[i]);
416: VRatmin[i] = VRatmax[i] = 0;
418: idx = idx + 3;
419: }
420: PetscCall(VecRestoreArrayWrite(Xgen, &xgen));
421: PetscCall(VecRestoreArrayRead(Xnet, &xnet));
423: /* PetscCall(VecView(Xgen,0)); */
424: PetscCall(DMCompositeGather(user->dmpgrid, INSERT_VALUES, X, Xgen, Xnet));
425: PetscCall(DMCompositeRestoreLocalVectors(user->dmpgrid, &Xgen, &Xnet));
426: PetscFunctionReturn(PETSC_SUCCESS);
427: }
429: /* Computes F = [f(x,y);g(x,y)] */
430: PetscErrorCode ResidualFunction(Vec X, Vec F, Userctx *user)
431: {
432: Vec Xgen, Xnet, Fgen, Fnet;
433: const PetscScalar *xgen, *xnet;
434: PetscScalar *fgen, *fnet;
435: PetscInt i, idx = 0;
436: PetscScalar Vr, Vi, Vm, Vm2;
437: PetscScalar Eqp, Edp, delta, w; /* Generator variables */
438: PetscScalar Efd, RF, VR; /* Exciter variables */
439: PetscScalar Id, Iq; /* Generator dq axis currents */
440: PetscScalar Vd, Vq, SE;
441: PetscScalar IGr, IGi, IDr, IDi;
442: PetscScalar Zdq_inv[4], det;
443: PetscScalar PD, QD, Vm0, *v0;
445: PetscFunctionBegin;
446: PetscCall(VecZeroEntries(F));
447: PetscCall(DMCompositeGetLocalVectors(user->dmpgrid, &Xgen, &Xnet));
448: PetscCall(DMCompositeGetLocalVectors(user->dmpgrid, &Fgen, &Fnet));
449: PetscCall(DMCompositeScatter(user->dmpgrid, X, Xgen, Xnet));
450: PetscCall(DMCompositeScatter(user->dmpgrid, F, Fgen, Fnet));
452: /* Network current balance residual IG + Y*V + IL = 0. Only YV is added here.
453: The generator current injection, IG, and load current injection, ID are added later
454: */
455: /* Note that the values in Ybus are stored assuming the imaginary current balance
456: equation is ordered first followed by real current balance equation for each bus.
457: Thus imaginary current contribution goes in location 2*i, and
458: real current contribution in 2*i+1
459: */
460: PetscCall(MatMult(user->Ybus, Xnet, Fnet));
462: PetscCall(VecGetArrayRead(Xgen, &xgen));
463: PetscCall(VecGetArrayRead(Xnet, &xnet));
464: PetscCall(VecGetArrayWrite(Fgen, &fgen));
465: PetscCall(VecGetArrayWrite(Fnet, &fnet));
467: /* Generator subsystem */
468: for (i = 0; i < ngen; i++) {
469: Eqp = xgen[idx];
470: Edp = xgen[idx + 1];
471: delta = xgen[idx + 2];
472: w = xgen[idx + 3];
473: Id = xgen[idx + 4];
474: Iq = xgen[idx + 5];
475: Efd = xgen[idx + 6];
476: RF = xgen[idx + 7];
477: VR = xgen[idx + 8];
479: /* Generator differential equations */
480: fgen[idx] = (-Eqp - (Xd[i] - Xdp[i]) * Id + Efd) / Td0p[i];
481: fgen[idx + 1] = (-Edp + (Xq[i] - Xqp[i]) * Iq) / Tq0p[i];
482: fgen[idx + 2] = w - w_s;
483: fgen[idx + 3] = (TM[i] - Edp * Id - Eqp * Iq - (Xqp[i] - Xdp[i]) * Id * Iq - D[i] * (w - w_s)) / M[i];
485: Vr = xnet[2 * gbus[i]]; /* Real part of generator terminal voltage */
486: Vi = xnet[2 * gbus[i] + 1]; /* Imaginary part of the generator terminal voltage */
488: PetscCall(ri2dq(Vr, Vi, delta, &Vd, &Vq));
489: /* Algebraic equations for stator currents */
490: det = Rs[i] * Rs[i] + Xdp[i] * Xqp[i];
492: Zdq_inv[0] = Rs[i] / det;
493: Zdq_inv[1] = Xqp[i] / det;
494: Zdq_inv[2] = -Xdp[i] / det;
495: Zdq_inv[3] = Rs[i] / det;
497: fgen[idx + 4] = Zdq_inv[0] * (-Edp + Vd) + Zdq_inv[1] * (-Eqp + Vq) + Id;
498: fgen[idx + 5] = Zdq_inv[2] * (-Edp + Vd) + Zdq_inv[3] * (-Eqp + Vq) + Iq;
500: /* Add generator current injection to network */
501: PetscCall(dq2ri(Id, Iq, delta, &IGr, &IGi));
503: fnet[2 * gbus[i]] -= IGi;
504: fnet[2 * gbus[i] + 1] -= IGr;
506: Vm = PetscSqrtScalar(Vd * Vd + Vq * Vq);
508: SE = k1[i] * PetscExpScalar(k2[i] * Efd);
510: /* Exciter differential equations */
511: fgen[idx + 6] = (-KE[i] * Efd - SE + VR) / TE[i];
512: fgen[idx + 7] = (-RF + KF[i] * Efd / TF[i]) / TF[i];
513: if (VRatmax[i]) fgen[idx + 8] = VR - VRMAX[i];
514: else if (VRatmin[i]) fgen[idx + 8] = VRMIN[i] - VR;
515: else fgen[idx + 8] = (-VR + KA[i] * RF - KA[i] * KF[i] * Efd / TF[i] + KA[i] * (Vref[i] - Vm)) / TA[i];
517: idx = idx + 9;
518: }
520: PetscCall(VecGetArray(user->V0, &v0));
521: for (i = 0; i < nload; i++) {
522: Vr = xnet[2 * lbus[i]]; /* Real part of load bus voltage */
523: Vi = xnet[2 * lbus[i] + 1]; /* Imaginary part of the load bus voltage */
524: Vm = PetscSqrtScalar(Vr * Vr + Vi * Vi);
525: Vm2 = Vm * Vm;
526: Vm0 = PetscSqrtScalar(v0[2 * lbus[i]] * v0[2 * lbus[i]] + v0[2 * lbus[i] + 1] * v0[2 * lbus[i] + 1]);
527: PD = QD = 0.0;
528: for (PetscInt k = 0; k < ld_nsegsp[i]; k++) PD += ld_alphap[k] * PD0[i] * PetscPowScalar(Vm / Vm0, ld_betap[k]);
529: for (PetscInt k = 0; k < ld_nsegsq[i]; k++) QD += ld_alphaq[k] * QD0[i] * PetscPowScalar(Vm / Vm0, ld_betaq[k]);
531: /* Load currents */
532: IDr = (PD * Vr + QD * Vi) / Vm2;
533: IDi = (-QD * Vr + PD * Vi) / Vm2;
535: fnet[2 * lbus[i]] += IDi;
536: fnet[2 * lbus[i] + 1] += IDr;
537: }
538: PetscCall(VecRestoreArray(user->V0, &v0));
540: PetscCall(VecRestoreArrayRead(Xgen, &xgen));
541: PetscCall(VecRestoreArrayRead(Xnet, &xnet));
542: PetscCall(VecRestoreArrayWrite(Fgen, &fgen));
543: PetscCall(VecRestoreArrayWrite(Fnet, &fnet));
545: PetscCall(DMCompositeGather(user->dmpgrid, INSERT_VALUES, F, Fgen, Fnet));
546: PetscCall(DMCompositeRestoreLocalVectors(user->dmpgrid, &Xgen, &Xnet));
547: PetscCall(DMCompositeRestoreLocalVectors(user->dmpgrid, &Fgen, &Fnet));
548: PetscFunctionReturn(PETSC_SUCCESS);
549: }
551: /* f(x,y)
552: g(x,y)
553: */
554: PetscErrorCode RHSFunction(TS ts, PetscReal t, Vec X, Vec F, PetscCtx ctx)
555: {
556: Userctx *user = (Userctx *)ctx;
558: PetscFunctionBegin;
559: user->t = t;
560: PetscCall(ResidualFunction(X, F, user));
561: PetscFunctionReturn(PETSC_SUCCESS);
562: }
564: /* f(x,y) - \dot{x}
565: g(x,y) = 0
566: */
567: PetscErrorCode IFunction(TS ts, PetscReal t, Vec X, Vec Xdot, Vec F, PetscCtx ctx)
568: {
569: PetscScalar *f;
570: const PetscScalar *xdot;
571: PetscInt i;
573: PetscFunctionBegin;
574: PetscCall(RHSFunction(ts, t, X, F, ctx));
575: PetscCall(VecScale(F, -1.0));
576: PetscCall(VecGetArray(F, &f));
577: PetscCall(VecGetArrayRead(Xdot, &xdot));
578: for (i = 0; i < ngen; i++) {
579: f[9 * i] += xdot[9 * i];
580: f[9 * i + 1] += xdot[9 * i + 1];
581: f[9 * i + 2] += xdot[9 * i + 2];
582: f[9 * i + 3] += xdot[9 * i + 3];
583: f[9 * i + 6] += xdot[9 * i + 6];
584: f[9 * i + 7] += xdot[9 * i + 7];
585: f[9 * i + 8] += xdot[9 * i + 8];
586: }
587: PetscCall(VecRestoreArray(F, &f));
588: PetscCall(VecRestoreArrayRead(Xdot, &xdot));
589: PetscFunctionReturn(PETSC_SUCCESS);
590: }
592: /* This function is used for solving the algebraic system only during fault on and
593: off times. It computes the entire F and then zeros out the part corresponding to
594: differential equations
595: F = [0;g(y)];
596: */
597: PetscErrorCode AlgFunction(SNES snes, Vec X, Vec F, PetscCtx ctx)
598: {
599: Userctx *user = (Userctx *)ctx;
600: PetscScalar *f;
601: PetscInt i;
603: PetscFunctionBegin;
604: PetscCall(ResidualFunction(X, F, user));
605: PetscCall(VecGetArray(F, &f));
606: for (i = 0; i < ngen; i++) {
607: f[9 * i] = 0;
608: f[9 * i + 1] = 0;
609: f[9 * i + 2] = 0;
610: f[9 * i + 3] = 0;
611: f[9 * i + 6] = 0;
612: f[9 * i + 7] = 0;
613: f[9 * i + 8] = 0;
614: }
615: PetscCall(VecRestoreArray(F, &f));
616: PetscFunctionReturn(PETSC_SUCCESS);
617: }
619: PetscErrorCode PostStage(TS ts, PetscReal t, PetscInt i, Vec *X)
620: {
621: Userctx *user;
623: PetscFunctionBegin;
624: PetscCall(TSGetApplicationContext(ts, &user));
625: PetscCall(SNESSolve(user->snes_alg, NULL, X[i]));
626: PetscFunctionReturn(PETSC_SUCCESS);
627: }
629: PetscErrorCode PostEvaluate(TS ts)
630: {
631: Userctx *user;
632: Vec X;
634: PetscFunctionBegin;
635: PetscCall(TSGetApplicationContext(ts, &user));
636: PetscCall(TSGetSolution(ts, &X));
637: PetscCall(SNESSolve(user->snes_alg, NULL, X));
638: PetscFunctionReturn(PETSC_SUCCESS);
639: }
641: PetscErrorCode PreallocateJacobian(Mat J, Userctx *user)
642: {
643: PetscInt *d_nnz;
644: PetscInt i, idx = 0, start = 0;
645: PetscInt ncols;
647: PetscFunctionBegin;
648: PetscCall(PetscMalloc1(user->neqs_pgrid, &d_nnz));
649: for (i = 0; i < user->neqs_pgrid; i++) d_nnz[i] = 0;
650: /* Generator subsystem */
651: for (i = 0; i < ngen; i++) {
652: d_nnz[idx] += 3;
653: d_nnz[idx + 1] += 2;
654: d_nnz[idx + 2] += 2;
655: d_nnz[idx + 3] += 5;
656: d_nnz[idx + 4] += 6;
657: d_nnz[idx + 5] += 6;
659: d_nnz[user->neqs_gen + 2 * gbus[i]] += 3;
660: d_nnz[user->neqs_gen + 2 * gbus[i] + 1] += 3;
662: d_nnz[idx + 6] += 2;
663: d_nnz[idx + 7] += 2;
664: d_nnz[idx + 8] += 5;
666: idx = idx + 9;
667: }
668: start = user->neqs_gen;
670: for (i = 0; i < nbus; i++) {
671: PetscCall(MatGetRow(user->Ybus, 2 * i, &ncols, NULL, NULL));
672: d_nnz[start + 2 * i] += ncols;
673: d_nnz[start + 2 * i + 1] += ncols;
674: PetscCall(MatRestoreRow(user->Ybus, 2 * i, &ncols, NULL, NULL));
675: }
676: PetscCall(MatSeqAIJSetPreallocation(J, 0, d_nnz));
677: PetscCall(PetscFree(d_nnz));
678: PetscFunctionReturn(PETSC_SUCCESS);
679: }
681: /*
682: J = [df_dx, df_dy
683: dg_dx, dg_dy]
684: */
685: PetscErrorCode ResidualJacobian(Vec X, Mat J, Mat B, PetscCtx ctx)
686: {
687: Userctx *user = (Userctx *)ctx;
688: Vec Xgen, Xnet;
689: const PetscScalar *xgen, *xnet;
690: PetscInt i, idx = 0;
691: PetscScalar Vr, Vi, Vm, Vm2;
692: PetscScalar Eqp, Edp, delta; /* Generator variables */
693: PetscScalar Efd;
694: PetscScalar Id, Iq; /* Generator dq axis currents */
695: PetscScalar Vd, Vq;
696: PetscScalar val[10];
697: PetscInt row[2], col[10];
698: PetscInt net_start = user->neqs_gen;
699: PetscScalar Zdq_inv[4], det;
700: PetscScalar dVd_dVr, dVd_dVi, dVq_dVr, dVq_dVi, dVd_ddelta, dVq_ddelta;
701: PetscScalar dIGr_ddelta, dIGi_ddelta, dIGr_dId, dIGr_dIq, dIGi_dId, dIGi_dIq;
702: PetscScalar dSE_dEfd;
703: PetscScalar dVm_dVd, dVm_dVq, dVm_dVr, dVm_dVi;
704: PetscInt ncols;
705: const PetscInt *cols;
706: const PetscScalar *yvals;
707: PetscScalar PD, QD, Vm0, Vm4;
708: const PetscScalar *v0;
709: PetscScalar dPD_dVr, dPD_dVi, dQD_dVr, dQD_dVi;
710: PetscScalar dIDr_dVr, dIDr_dVi, dIDi_dVr, dIDi_dVi;
712: PetscFunctionBegin;
713: PetscCall(MatZeroEntries(B));
714: PetscCall(DMCompositeGetLocalVectors(user->dmpgrid, &Xgen, &Xnet));
715: PetscCall(DMCompositeScatter(user->dmpgrid, X, Xgen, Xnet));
717: PetscCall(VecGetArrayRead(Xgen, &xgen));
718: PetscCall(VecGetArrayRead(Xnet, &xnet));
720: /* Generator subsystem */
721: for (i = 0; i < ngen; i++) {
722: Eqp = xgen[idx];
723: Edp = xgen[idx + 1];
724: delta = xgen[idx + 2];
725: Id = xgen[idx + 4];
726: Iq = xgen[idx + 5];
727: Efd = xgen[idx + 6];
729: /* fgen[idx] = (-Eqp - (Xd[i] - Xdp[i])*Id + Efd)/Td0p[i]; */
730: row[0] = idx;
731: col[0] = idx;
732: col[1] = idx + 4;
733: col[2] = idx + 6;
734: val[0] = -1 / Td0p[i];
735: val[1] = -(Xd[i] - Xdp[i]) / Td0p[i];
736: val[2] = 1 / Td0p[i];
738: PetscCall(MatSetValues(J, 1, row, 3, col, val, INSERT_VALUES));
740: /* fgen[idx+1] = (-Edp + (Xq[i] - Xqp[i])*Iq)/Tq0p[i]; */
741: row[0] = idx + 1;
742: col[0] = idx + 1;
743: col[1] = idx + 5;
744: val[0] = -1 / Tq0p[i];
745: val[1] = (Xq[i] - Xqp[i]) / Tq0p[i];
746: PetscCall(MatSetValues(J, 1, row, 2, col, val, INSERT_VALUES));
748: /* fgen[idx+2] = w - w_s; */
749: row[0] = idx + 2;
750: col[0] = idx + 2;
751: col[1] = idx + 3;
752: val[0] = 0;
753: val[1] = 1;
754: PetscCall(MatSetValues(J, 1, row, 2, col, val, INSERT_VALUES));
756: /* fgen[idx+3] = (TM[i] - Edp*Id - Eqp*Iq - (Xqp[i] - Xdp[i])*Id*Iq - D[i]*(w - w_s))/M[i]; */
757: row[0] = idx + 3;
758: col[0] = idx;
759: col[1] = idx + 1;
760: col[2] = idx + 3;
761: col[3] = idx + 4;
762: col[4] = idx + 5;
763: val[0] = -Iq / M[i];
764: val[1] = -Id / M[i];
765: val[2] = -D[i] / M[i];
766: val[3] = (-Edp - (Xqp[i] - Xdp[i]) * Iq) / M[i];
767: val[4] = (-Eqp - (Xqp[i] - Xdp[i]) * Id) / M[i];
768: PetscCall(MatSetValues(J, 1, row, 5, col, val, INSERT_VALUES));
770: Vr = xnet[2 * gbus[i]]; /* Real part of generator terminal voltage */
771: Vi = xnet[2 * gbus[i] + 1]; /* Imaginary part of the generator terminal voltage */
772: PetscCall(ri2dq(Vr, Vi, delta, &Vd, &Vq));
774: det = Rs[i] * Rs[i] + Xdp[i] * Xqp[i];
776: Zdq_inv[0] = Rs[i] / det;
777: Zdq_inv[1] = Xqp[i] / det;
778: Zdq_inv[2] = -Xdp[i] / det;
779: Zdq_inv[3] = Rs[i] / det;
781: dVd_dVr = PetscSinScalar(delta);
782: dVd_dVi = -PetscCosScalar(delta);
783: dVq_dVr = PetscCosScalar(delta);
784: dVq_dVi = PetscSinScalar(delta);
785: dVd_ddelta = Vr * PetscCosScalar(delta) + Vi * PetscSinScalar(delta);
786: dVq_ddelta = -Vr * PetscSinScalar(delta) + Vi * PetscCosScalar(delta);
788: /* fgen[idx+4] = Zdq_inv[0]*(-Edp + Vd) + Zdq_inv[1]*(-Eqp + Vq) + Id; */
789: row[0] = idx + 4;
790: col[0] = idx;
791: col[1] = idx + 1;
792: col[2] = idx + 2;
793: val[0] = -Zdq_inv[1];
794: val[1] = -Zdq_inv[0];
795: val[2] = Zdq_inv[0] * dVd_ddelta + Zdq_inv[1] * dVq_ddelta;
796: col[3] = idx + 4;
797: col[4] = net_start + 2 * gbus[i];
798: col[5] = net_start + 2 * gbus[i] + 1;
799: val[3] = 1;
800: val[4] = Zdq_inv[0] * dVd_dVr + Zdq_inv[1] * dVq_dVr;
801: val[5] = Zdq_inv[0] * dVd_dVi + Zdq_inv[1] * dVq_dVi;
802: PetscCall(MatSetValues(J, 1, row, 6, col, val, INSERT_VALUES));
804: /* fgen[idx+5] = Zdq_inv[2]*(-Edp + Vd) + Zdq_inv[3]*(-Eqp + Vq) + Iq; */
805: row[0] = idx + 5;
806: col[0] = idx;
807: col[1] = idx + 1;
808: col[2] = idx + 2;
809: val[0] = -Zdq_inv[3];
810: val[1] = -Zdq_inv[2];
811: val[2] = Zdq_inv[2] * dVd_ddelta + Zdq_inv[3] * dVq_ddelta;
812: col[3] = idx + 5;
813: col[4] = net_start + 2 * gbus[i];
814: col[5] = net_start + 2 * gbus[i] + 1;
815: val[3] = 1;
816: val[4] = Zdq_inv[2] * dVd_dVr + Zdq_inv[3] * dVq_dVr;
817: val[5] = Zdq_inv[2] * dVd_dVi + Zdq_inv[3] * dVq_dVi;
818: PetscCall(MatSetValues(J, 1, row, 6, col, val, INSERT_VALUES));
820: dIGr_ddelta = Id * PetscCosScalar(delta) - Iq * PetscSinScalar(delta);
821: dIGi_ddelta = Id * PetscSinScalar(delta) + Iq * PetscCosScalar(delta);
822: dIGr_dId = PetscSinScalar(delta);
823: dIGr_dIq = PetscCosScalar(delta);
824: dIGi_dId = -PetscCosScalar(delta);
825: dIGi_dIq = PetscSinScalar(delta);
827: /* fnet[2*gbus[i]] -= IGi; */
828: row[0] = net_start + 2 * gbus[i];
829: col[0] = idx + 2;
830: col[1] = idx + 4;
831: col[2] = idx + 5;
832: val[0] = -dIGi_ddelta;
833: val[1] = -dIGi_dId;
834: val[2] = -dIGi_dIq;
835: PetscCall(MatSetValues(J, 1, row, 3, col, val, INSERT_VALUES));
837: /* fnet[2*gbus[i]+1] -= IGr; */
838: row[0] = net_start + 2 * gbus[i] + 1;
839: col[0] = idx + 2;
840: col[1] = idx + 4;
841: col[2] = idx + 5;
842: val[0] = -dIGr_ddelta;
843: val[1] = -dIGr_dId;
844: val[2] = -dIGr_dIq;
845: PetscCall(MatSetValues(J, 1, row, 3, col, val, INSERT_VALUES));
847: Vm = PetscSqrtScalar(Vd * Vd + Vq * Vq);
849: /* fgen[idx+6] = (-KE[i]*Efd - SE + VR)/TE[i]; */
850: /* SE = k1[i]*PetscExpScalar(k2[i]*Efd); */
852: dSE_dEfd = k1[i] * k2[i] * PetscExpScalar(k2[i] * Efd);
854: row[0] = idx + 6;
855: col[0] = idx + 6;
856: col[1] = idx + 8;
857: val[0] = (-KE[i] - dSE_dEfd) / TE[i];
858: val[1] = 1 / TE[i];
859: PetscCall(MatSetValues(J, 1, row, 2, col, val, INSERT_VALUES));
861: /* Exciter differential equations */
863: /* fgen[idx+7] = (-RF + KF[i]*Efd/TF[i])/TF[i]; */
864: row[0] = idx + 7;
865: col[0] = idx + 6;
866: col[1] = idx + 7;
867: val[0] = (KF[i] / TF[i]) / TF[i];
868: val[1] = -1 / TF[i];
869: PetscCall(MatSetValues(J, 1, row, 2, col, val, INSERT_VALUES));
871: /* fgen[idx+8] = (-VR + KA[i]*RF - KA[i]*KF[i]*Efd/TF[i] + KA[i]*(Vref[i] - Vm))/TA[i]; */
872: /* Vm = (Vd^2 + Vq^2)^0.5; */
874: row[0] = idx + 8;
875: if (VRatmax[i]) {
876: col[0] = idx + 8;
877: val[0] = 1.0;
878: PetscCall(MatSetValues(J, 1, row, 1, col, val, INSERT_VALUES));
879: } else if (VRatmin[i]) {
880: col[0] = idx + 8;
881: val[0] = -1.0;
882: PetscCall(MatSetValues(J, 1, row, 1, col, val, INSERT_VALUES));
883: } else {
884: dVm_dVd = Vd / Vm;
885: dVm_dVq = Vq / Vm;
886: dVm_dVr = dVm_dVd * dVd_dVr + dVm_dVq * dVq_dVr;
887: dVm_dVi = dVm_dVd * dVd_dVi + dVm_dVq * dVq_dVi;
888: row[0] = idx + 8;
889: col[0] = idx + 6;
890: col[1] = idx + 7;
891: col[2] = idx + 8;
892: val[0] = -(KA[i] * KF[i] / TF[i]) / TA[i];
893: val[1] = KA[i] / TA[i];
894: val[2] = -1 / TA[i];
895: col[3] = net_start + 2 * gbus[i];
896: col[4] = net_start + 2 * gbus[i] + 1;
897: val[3] = -KA[i] * dVm_dVr / TA[i];
898: val[4] = -KA[i] * dVm_dVi / TA[i];
899: PetscCall(MatSetValues(J, 1, row, 5, col, val, INSERT_VALUES));
900: }
901: idx = idx + 9;
902: }
904: for (i = 0; i < nbus; i++) {
905: PetscCall(MatGetRow(user->Ybus, 2 * i, &ncols, &cols, &yvals));
906: row[0] = net_start + 2 * i;
907: for (PetscInt k = 0; k < ncols; k++) {
908: col[k] = net_start + cols[k];
909: val[k] = yvals[k];
910: }
911: PetscCall(MatSetValues(J, 1, row, ncols, col, val, INSERT_VALUES));
912: PetscCall(MatRestoreRow(user->Ybus, 2 * i, &ncols, &cols, &yvals));
914: PetscCall(MatGetRow(user->Ybus, 2 * i + 1, &ncols, &cols, &yvals));
915: row[0] = net_start + 2 * i + 1;
916: for (PetscInt k = 0; k < ncols; k++) {
917: col[k] = net_start + cols[k];
918: val[k] = yvals[k];
919: }
920: PetscCall(MatSetValues(J, 1, row, ncols, col, val, INSERT_VALUES));
921: PetscCall(MatRestoreRow(user->Ybus, 2 * i + 1, &ncols, &cols, &yvals));
922: }
924: PetscCall(MatAssemblyBegin(J, MAT_FLUSH_ASSEMBLY));
925: PetscCall(MatAssemblyEnd(J, MAT_FLUSH_ASSEMBLY));
927: PetscCall(VecGetArrayRead(user->V0, &v0));
928: for (i = 0; i < nload; i++) {
929: Vr = xnet[2 * lbus[i]]; /* Real part of load bus voltage */
930: Vi = xnet[2 * lbus[i] + 1]; /* Imaginary part of the load bus voltage */
931: Vm = PetscSqrtScalar(Vr * Vr + Vi * Vi);
932: Vm2 = Vm * Vm;
933: Vm4 = Vm2 * Vm2;
934: Vm0 = PetscSqrtScalar(v0[2 * lbus[i]] * v0[2 * lbus[i]] + v0[2 * lbus[i] + 1] * v0[2 * lbus[i] + 1]);
935: PD = QD = 0.0;
936: dPD_dVr = dPD_dVi = dQD_dVr = dQD_dVi = 0.0;
937: for (PetscInt k = 0; k < ld_nsegsp[i]; k++) {
938: PD += ld_alphap[k] * PD0[i] * PetscPowScalar(Vm / Vm0, ld_betap[k]);
939: dPD_dVr += ld_alphap[k] * ld_betap[k] * PD0[i] * PetscPowScalar(1 / Vm0, ld_betap[k]) * Vr * PetscPowScalar(Vm, ld_betap[k] - 2);
940: dPD_dVi += ld_alphap[k] * ld_betap[k] * PD0[i] * PetscPowScalar(1 / Vm0, ld_betap[k]) * Vi * PetscPowScalar(Vm, ld_betap[k] - 2);
941: }
942: for (PetscInt k = 0; k < ld_nsegsq[i]; k++) {
943: QD += ld_alphaq[k] * QD0[i] * PetscPowScalar(Vm / Vm0, ld_betaq[k]);
944: dQD_dVr += ld_alphaq[k] * ld_betaq[k] * QD0[i] * PetscPowScalar(1 / Vm0, ld_betaq[k]) * Vr * PetscPowScalar(Vm, ld_betaq[k] - 2);
945: dQD_dVi += ld_alphaq[k] * ld_betaq[k] * QD0[i] * PetscPowScalar(1 / Vm0, ld_betaq[k]) * Vi * PetscPowScalar(Vm, ld_betaq[k] - 2);
946: }
948: /* IDr = (PD*Vr + QD*Vi)/Vm2; */
949: /* IDi = (-QD*Vr + PD*Vi)/Vm2; */
951: dIDr_dVr = (dPD_dVr * Vr + dQD_dVr * Vi + PD) / Vm2 - ((PD * Vr + QD * Vi) * 2 * Vr) / Vm4;
952: dIDr_dVi = (dPD_dVi * Vr + dQD_dVi * Vi + QD) / Vm2 - ((PD * Vr + QD * Vi) * 2 * Vi) / Vm4;
954: dIDi_dVr = (-dQD_dVr * Vr + dPD_dVr * Vi - QD) / Vm2 - ((-QD * Vr + PD * Vi) * 2 * Vr) / Vm4;
955: dIDi_dVi = (-dQD_dVi * Vr + dPD_dVi * Vi + PD) / Vm2 - ((-QD * Vr + PD * Vi) * 2 * Vi) / Vm4;
957: /* fnet[2*lbus[i]] += IDi; */
958: row[0] = net_start + 2 * lbus[i];
959: col[0] = net_start + 2 * lbus[i];
960: col[1] = net_start + 2 * lbus[i] + 1;
961: val[0] = dIDi_dVr;
962: val[1] = dIDi_dVi;
963: PetscCall(MatSetValues(J, 1, row, 2, col, val, ADD_VALUES));
964: /* fnet[2*lbus[i]+1] += IDr; */
965: row[0] = net_start + 2 * lbus[i] + 1;
966: col[0] = net_start + 2 * lbus[i];
967: col[1] = net_start + 2 * lbus[i] + 1;
968: val[0] = dIDr_dVr;
969: val[1] = dIDr_dVi;
970: PetscCall(MatSetValues(J, 1, row, 2, col, val, ADD_VALUES));
971: }
972: PetscCall(VecRestoreArrayRead(user->V0, &v0));
974: PetscCall(VecRestoreArrayRead(Xgen, &xgen));
975: PetscCall(VecRestoreArrayRead(Xnet, &xnet));
977: PetscCall(DMCompositeRestoreLocalVectors(user->dmpgrid, &Xgen, &Xnet));
979: PetscCall(MatAssemblyBegin(J, MAT_FINAL_ASSEMBLY));
980: PetscCall(MatAssemblyEnd(J, MAT_FINAL_ASSEMBLY));
981: PetscFunctionReturn(PETSC_SUCCESS);
982: }
984: /*
985: J = [I, 0
986: dg_dx, dg_dy]
987: */
988: PetscErrorCode AlgJacobian(SNES snes, Vec X, Mat A, Mat B, PetscCtx ctx)
989: {
990: Userctx *user = (Userctx *)ctx;
992: PetscFunctionBegin;
993: PetscCall(ResidualJacobian(X, A, B, ctx));
994: PetscCall(MatSetOption(A, MAT_KEEP_NONZERO_PATTERN, PETSC_TRUE));
995: PetscCall(MatZeroRowsIS(A, user->is_diff, 1.0, NULL, NULL));
996: PetscFunctionReturn(PETSC_SUCCESS);
997: }
999: /*
1000: J = [-df_dx, -df_dy
1001: dg_dx, dg_dy]
1002: */
1004: PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec X, Mat A, Mat B, PetscCtx ctx)
1005: {
1006: Userctx *user = (Userctx *)ctx;
1008: PetscFunctionBegin;
1009: user->t = t;
1011: PetscCall(ResidualJacobian(X, A, B, user));
1012: PetscFunctionReturn(PETSC_SUCCESS);
1013: }
1015: /*
1016: J = [df_dx-aI, df_dy
1017: dg_dx, dg_dy]
1018: */
1020: PetscErrorCode IJacobian(TS ts, PetscReal t, Vec X, Vec Xdot, PetscReal a, Mat A, Mat B, Userctx *user)
1021: {
1022: PetscScalar atmp = (PetscScalar)a;
1023: PetscInt i, row;
1025: PetscFunctionBegin;
1026: user->t = t;
1028: PetscCall(RHSJacobian(ts, t, X, A, B, user));
1029: PetscCall(MatScale(B, -1.0));
1030: for (i = 0; i < ngen; i++) {
1031: row = 9 * i;
1032: PetscCall(MatSetValues(A, 1, &row, 1, &row, &atmp, ADD_VALUES));
1033: row = 9 * i + 1;
1034: PetscCall(MatSetValues(A, 1, &row, 1, &row, &atmp, ADD_VALUES));
1035: row = 9 * i + 2;
1036: PetscCall(MatSetValues(A, 1, &row, 1, &row, &atmp, ADD_VALUES));
1037: row = 9 * i + 3;
1038: PetscCall(MatSetValues(A, 1, &row, 1, &row, &atmp, ADD_VALUES));
1039: row = 9 * i + 6;
1040: PetscCall(MatSetValues(A, 1, &row, 1, &row, &atmp, ADD_VALUES));
1041: row = 9 * i + 7;
1042: PetscCall(MatSetValues(A, 1, &row, 1, &row, &atmp, ADD_VALUES));
1043: row = 9 * i + 8;
1044: PetscCall(MatSetValues(A, 1, &row, 1, &row, &atmp, ADD_VALUES));
1045: }
1046: PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
1047: PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
1048: PetscFunctionReturn(PETSC_SUCCESS);
1049: }
1051: int main(int argc, char **argv)
1052: {
1053: TS ts;
1054: SNES snes_alg;
1055: PetscMPIInt size;
1056: Userctx user;
1057: PetscViewer Xview, Ybusview, viewer;
1058: Vec X, F_alg;
1059: Mat J, A;
1060: PetscInt i, idx, *idx2;
1061: Vec Xdot;
1062: PetscScalar *x, *mat, *amat;
1063: const PetscScalar *rmat;
1064: Vec vatol;
1065: PetscInt *direction;
1066: PetscBool *terminate;
1067: const PetscInt *idx3;
1068: PetscScalar *vatoli;
1070: PetscFunctionBeginUser;
1071: PetscCall(PetscInitialize(&argc, &argv, "petscoptions", help));
1072: PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
1073: PetscCheck(size == 1, PETSC_COMM_WORLD, PETSC_ERR_WRONG_MPI_SIZE, "Only for sequential runs");
1075: user.neqs_gen = 9 * ngen; /* # eqs. for generator subsystem */
1076: user.neqs_net = 2 * nbus; /* # eqs. for network subsystem */
1077: user.neqs_pgrid = user.neqs_gen + user.neqs_net;
1079: /* Create indices for differential and algebraic equations */
1081: PetscCall(PetscMalloc1(7 * ngen, &idx2));
1082: for (i = 0; i < ngen; i++) {
1083: idx2[7 * i] = 9 * i;
1084: idx2[7 * i + 1] = 9 * i + 1;
1085: idx2[7 * i + 2] = 9 * i + 2;
1086: idx2[7 * i + 3] = 9 * i + 3;
1087: idx2[7 * i + 4] = 9 * i + 6;
1088: idx2[7 * i + 5] = 9 * i + 7;
1089: idx2[7 * i + 6] = 9 * i + 8;
1090: }
1091: PetscCall(ISCreateGeneral(PETSC_COMM_WORLD, 7 * ngen, idx2, PETSC_COPY_VALUES, &user.is_diff));
1092: PetscCall(ISComplement(user.is_diff, 0, user.neqs_pgrid, &user.is_alg));
1093: PetscCall(PetscFree(idx2));
1095: /* Read initial voltage vector and Ybus */
1096: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, "X.bin", FILE_MODE_READ, &Xview));
1097: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, "Ybus.bin", FILE_MODE_READ, &Ybusview));
1099: PetscCall(VecCreate(PETSC_COMM_WORLD, &user.V0));
1100: PetscCall(VecSetSizes(user.V0, PETSC_DECIDE, user.neqs_net));
1101: PetscCall(VecLoad(user.V0, Xview));
1103: PetscCall(MatCreate(PETSC_COMM_WORLD, &user.Ybus));
1104: PetscCall(MatSetSizes(user.Ybus, PETSC_DECIDE, PETSC_DECIDE, user.neqs_net, user.neqs_net));
1105: PetscCall(MatSetType(user.Ybus, MATBAIJ));
1106: /* PetscCall(MatSetBlockSize(user.Ybus,2)); */
1107: PetscCall(MatLoad(user.Ybus, Ybusview));
1109: /* Set run time options */
1110: PetscOptionsBegin(PETSC_COMM_WORLD, NULL, "Transient stability fault options", "");
1111: {
1112: user.tfaulton = 1.0;
1113: user.tfaultoff = 1.2;
1114: user.Rfault = 0.0001;
1115: user.setisdiff = PETSC_FALSE;
1116: user.semiexplicit = PETSC_FALSE;
1117: user.faultbus = 8;
1118: PetscCall(PetscOptionsReal("-tfaulton", "", "", user.tfaulton, &user.tfaulton, NULL));
1119: PetscCall(PetscOptionsReal("-tfaultoff", "", "", user.tfaultoff, &user.tfaultoff, NULL));
1120: PetscCall(PetscOptionsInt("-faultbus", "", "", user.faultbus, &user.faultbus, NULL));
1121: user.t0 = 0.0;
1122: user.tmax = 5.0;
1123: PetscCall(PetscOptionsReal("-t0", "", "", user.t0, &user.t0, NULL));
1124: PetscCall(PetscOptionsReal("-tmax", "", "", user.tmax, &user.tmax, NULL));
1125: PetscCall(PetscOptionsBool("-setisdiff", "", "", user.setisdiff, &user.setisdiff, NULL));
1126: PetscCall(PetscOptionsBool("-dae_semiexplicit", "", "", user.semiexplicit, &user.semiexplicit, NULL));
1127: }
1128: PetscOptionsEnd();
1130: PetscCall(PetscViewerDestroy(&Xview));
1131: PetscCall(PetscViewerDestroy(&Ybusview));
1133: /* Create DMs for generator and network subsystems */
1134: PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, user.neqs_gen, 1, 1, NULL, &user.dmgen));
1135: PetscCall(DMSetOptionsPrefix(user.dmgen, "dmgen_"));
1136: PetscCall(DMSetFromOptions(user.dmgen));
1137: PetscCall(DMSetUp(user.dmgen));
1138: PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, user.neqs_net, 1, 1, NULL, &user.dmnet));
1139: PetscCall(DMSetOptionsPrefix(user.dmnet, "dmnet_"));
1140: PetscCall(DMSetFromOptions(user.dmnet));
1141: PetscCall(DMSetUp(user.dmnet));
1142: /* Create a composite DM packer and add the two DMs */
1143: PetscCall(DMCompositeCreate(PETSC_COMM_WORLD, &user.dmpgrid));
1144: PetscCall(DMSetOptionsPrefix(user.dmpgrid, "pgrid_"));
1145: PetscCall(DMCompositeAddDM(user.dmpgrid, user.dmgen));
1146: PetscCall(DMCompositeAddDM(user.dmpgrid, user.dmnet));
1148: PetscCall(DMCreateGlobalVector(user.dmpgrid, &X));
1150: PetscCall(MatCreate(PETSC_COMM_WORLD, &J));
1151: PetscCall(MatSetSizes(J, PETSC_DECIDE, PETSC_DECIDE, user.neqs_pgrid, user.neqs_pgrid));
1152: PetscCall(MatSetFromOptions(J));
1153: PetscCall(PreallocateJacobian(J, &user));
1155: /* Create matrix to save solutions at each time step */
1156: user.stepnum = 0;
1158: PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, user.neqs_pgrid + 1, 1002, NULL, &user.Sol));
1159: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
1160: Create timestepping solver context
1161: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
1162: PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
1163: PetscCall(TSSetProblemType(ts, TS_NONLINEAR));
1164: if (user.semiexplicit) {
1165: PetscCall(TSSetType(ts, TSRK));
1166: PetscCall(TSSetRHSFunction(ts, NULL, RHSFunction, &user));
1167: PetscCall(TSSetRHSJacobian(ts, J, J, RHSJacobian, &user));
1168: } else {
1169: PetscCall(TSSetType(ts, TSCN));
1170: PetscCall(TSSetEquationType(ts, TS_EQ_DAE_IMPLICIT_INDEX1));
1171: PetscCall(TSSetIFunction(ts, NULL, (TSIFunctionFn *)IFunction, &user));
1172: PetscCall(TSSetIJacobian(ts, J, J, (TSIJacobianFn *)IJacobian, &user));
1173: }
1174: PetscCall(TSSetApplicationContext(ts, &user));
1176: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
1177: Set initial conditions
1178: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
1179: PetscCall(SetInitialGuess(X, &user));
1180: /* Just to set up the Jacobian structure */
1182: PetscCall(VecDuplicate(X, &Xdot));
1183: PetscCall(IJacobian(ts, 0.0, X, Xdot, 0.0, J, J, &user));
1184: PetscCall(VecDestroy(&Xdot));
1186: /* Save initial solution */
1188: idx = user.stepnum * (user.neqs_pgrid + 1);
1189: PetscCall(MatDenseGetArray(user.Sol, &mat));
1190: PetscCall(VecGetArray(X, &x));
1192: mat[idx] = 0.0;
1194: PetscCall(PetscArraycpy(mat + idx + 1, x, user.neqs_pgrid));
1195: PetscCall(MatDenseRestoreArray(user.Sol, &mat));
1196: PetscCall(VecRestoreArray(X, &x));
1197: user.stepnum++;
1199: PetscCall(TSSetMaxTime(ts, user.tmax));
1200: PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_MATCHSTEP));
1201: PetscCall(TSSetTimeStep(ts, 0.01));
1202: PetscCall(TSSetFromOptions(ts));
1203: PetscCall(TSSetPostStep(ts, SaveSolution));
1204: PetscCall(TSSetSolution(ts, X));
1206: PetscCall(PetscMalloc1(2 * ngen + 2, &direction));
1207: PetscCall(PetscMalloc1(2 * ngen + 2, &terminate));
1208: direction[0] = direction[1] = 1;
1209: terminate[0] = terminate[1] = PETSC_FALSE;
1210: for (i = 0; i < ngen; i++) {
1211: direction[2 + 2 * i] = -1;
1212: direction[2 + 2 * i + 1] = 1;
1213: terminate[2 + 2 * i] = terminate[2 + 2 * i + 1] = PETSC_FALSE;
1214: }
1216: PetscCall(TSSetEventHandler(ts, 2 * ngen + 2, direction, terminate, EventFunction, PostEventFunction, (void *)&user));
1218: if (user.semiexplicit) {
1219: /* Use a semi-explicit approach with the time-stepping done by an explicit method and the
1220: algrebraic part solved via PostStage and PostEvaluate callbacks
1221: */
1222: PetscCall(TSSetType(ts, TSRK));
1223: PetscCall(TSSetPostStage(ts, PostStage));
1224: PetscCall(TSSetPostEvaluate(ts, PostEvaluate));
1225: }
1227: if (user.setisdiff) {
1228: /* Create vector of absolute tolerances and set the algebraic part to infinity */
1229: PetscCall(VecDuplicate(X, &vatol));
1230: PetscCall(VecSet(vatol, 100000.0));
1231: PetscCall(VecGetArray(vatol, &vatoli));
1232: PetscCall(ISGetIndices(user.is_diff, &idx3));
1233: for (PetscInt k = 0; k < 7 * ngen; k++) vatoli[idx3[k]] = 1e-2;
1234: PetscCall(VecRestoreArray(vatol, &vatoli));
1235: }
1237: /* Create the nonlinear solver for solving the algebraic system */
1238: /* Note that although the algebraic system needs to be solved only for
1239: Idq and V, we reuse the entire system including xgen. The xgen
1240: variables are held constant by setting their residuals to 0 and
1241: putting a 1 on the Jacobian diagonal for xgen rows
1242: */
1244: PetscCall(VecDuplicate(X, &F_alg));
1245: PetscCall(SNESCreate(PETSC_COMM_WORLD, &snes_alg));
1246: PetscCall(SNESSetFunction(snes_alg, F_alg, AlgFunction, &user));
1247: PetscCall(SNESSetJacobian(snes_alg, J, J, AlgJacobian, &user));
1248: PetscCall(SNESSetFromOptions(snes_alg));
1250: user.snes_alg = snes_alg;
1252: /* Solve */
1253: PetscCall(TSSolve(ts, X));
1255: PetscCall(MatAssemblyBegin(user.Sol, MAT_FINAL_ASSEMBLY));
1256: PetscCall(MatAssemblyEnd(user.Sol, MAT_FINAL_ASSEMBLY));
1258: PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, user.neqs_pgrid + 1, user.stepnum, NULL, &A));
1259: PetscCall(MatDenseGetArrayRead(user.Sol, &rmat));
1260: PetscCall(MatDenseGetArray(A, &amat));
1261: PetscCall(PetscArraycpy(amat, rmat, user.stepnum * (user.neqs_pgrid + 1)));
1262: PetscCall(MatDenseRestoreArray(A, &amat));
1263: PetscCall(MatDenseRestoreArrayRead(user.Sol, &rmat));
1264: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_SELF, "out.bin", FILE_MODE_WRITE, &viewer));
1265: PetscCall(MatView(A, viewer));
1266: PetscCall(PetscViewerDestroy(&viewer));
1267: PetscCall(MatDestroy(&A));
1269: PetscCall(PetscFree(direction));
1270: PetscCall(PetscFree(terminate));
1271: PetscCall(SNESDestroy(&snes_alg));
1272: PetscCall(VecDestroy(&F_alg));
1273: PetscCall(MatDestroy(&J));
1274: PetscCall(MatDestroy(&user.Ybus));
1275: PetscCall(MatDestroy(&user.Sol));
1276: PetscCall(VecDestroy(&X));
1277: PetscCall(VecDestroy(&user.V0));
1278: PetscCall(DMDestroy(&user.dmgen));
1279: PetscCall(DMDestroy(&user.dmnet));
1280: PetscCall(DMDestroy(&user.dmpgrid));
1281: PetscCall(ISDestroy(&user.is_diff));
1282: PetscCall(ISDestroy(&user.is_alg));
1283: PetscCall(TSDestroy(&ts));
1284: if (user.setisdiff) PetscCall(VecDestroy(&vatol));
1285: PetscCall(PetscFinalize());
1286: return 0;
1287: }
1289: /*TEST
1291: build:
1292: requires: double !complex !defined(PETSC_USE_64BIT_INDICES)
1294: test:
1295: suffix: implicit
1296: args: -ts_monitor -snes_monitor_short
1297: localrunfiles: petscoptions X.bin Ybus.bin
1299: test:
1300: suffix: semiexplicit
1301: args: -ts_monitor -dae_semiexplicit -snes_error_if_not_converged -ts_rk_type 2a
1302: localrunfiles: petscoptions X.bin Ybus.bin
1304: test:
1305: suffix: steprestart
1306: # needs ARKIMEX methods with all implicit stages since the mass matrix is not the identity
1307: args: -ts_monitor -snes_monitor_short -ts_type arkimex -ts_arkimex_type prssp2
1308: localrunfiles: petscoptions X.bin Ybus.bin
1310: TEST*/