Actual source code: ex9bus.c
1: static char help[] = "Power grid stability analysis of WECC 9 bus system.\n\
2: This example is based on the 9-bus (node) example given in the book Power\n\
3: Systems Dynamics and Stability (Chapter 7) by P. Sauer and M. A. Pai.\n\
4: The power grid in this example consists of 9 buses (nodes), 3 generators,\n\
5: 3 loads, and 9 transmission lines. The network equations are written\n\
6: in current balance form using rectangular coordinates.\n\n";
8: /*
9: The equations for the stability analysis are described by the DAE
11: \dot{x} = f(x,y,t)
12: 0 = g(x,y,t)
14: where the generators are described by differential equations, while the algebraic
15: constraints define the network equations.
17: The generators are modeled with a 4th order differential equation describing the electrical
18: and mechanical dynamics. Each generator also has an exciter system modeled by 3rd order
19: diff. eqns. describing the exciter, voltage regulator, and the feedback stabilizer
20: mechanism.
22: The network equations are described by nodal current balance equations.
23: I(x,y) - Y*V = 0
25: where:
26: I(x,y) is the current injected from generators and loads.
27: Y is the admittance matrix, and
28: V is the voltage vector
29: */
31: /*
32: Include "petscts.h" so that we can use TS solvers. Note that this
33: file automatically includes:
34: petscsys.h - base PETSc routines petscvec.h - vectors
35: petscmat.h - matrices
36: petscis.h - index sets petscksp.h - Krylov subspace methods
37: petscviewer.h - viewers petscpc.h - preconditioners
38: petscksp.h - linear solvers
39: */
41: #include <petscts.h>
42: #include <petscdm.h>
43: #include <petscdmda.h>
44: #include <petscdmcomposite.h>
46: #define freq 60
47: #define w_s (2 * PETSC_PI * freq)
49: /* Sizes and indices */
50: const PetscInt nbus = 9; /* Number of network buses */
51: const PetscInt ngen = 3; /* Number of generators */
52: const PetscInt nload = 3; /* Number of loads */
53: const PetscInt gbus[3] = {0, 1, 2}; /* Buses at which generators are incident */
54: const PetscInt lbus[3] = {4, 5, 7}; /* Buses at which loads are incident */
56: /* Generator real and reactive powers (found via loadflow) */
57: const PetscScalar PG[3] = {0.716786142395021, 1.630000000000000, 0.850000000000000};
58: const PetscScalar QG[3] = {0.270702180178785, 0.066120127797275, -0.108402221791588};
59: /* Generator constants */
60: const PetscScalar H[3] = {23.64, 6.4, 3.01}; /* Inertia constant */
61: const PetscScalar Rs[3] = {0.0, 0.0, 0.0}; /* Stator Resistance */
62: const PetscScalar Xd[3] = {0.146, 0.8958, 1.3125}; /* d-axis reactance */
63: const PetscScalar Xdp[3] = {0.0608, 0.1198, 0.1813}; /* d-axis transient reactance */
64: const PetscScalar Xq[3] = {0.4360, 0.8645, 1.2578}; /* q-axis reactance Xq(1) set to 0.4360, value given in text 0.0969 */
65: const PetscScalar Xqp[3] = {0.0969, 0.1969, 0.25}; /* q-axis transient reactance */
66: const PetscScalar Td0p[3] = {8.96, 6.0, 5.89}; /* d-axis open circuit time constant */
67: const PetscScalar Tq0p[3] = {0.31, 0.535, 0.6}; /* q-axis open circuit time constant */
68: PetscScalar M[3]; /* M = 2*H/w_s */
69: PetscScalar D[3]; /* D = 0.1*M */
71: PetscScalar TM[3]; /* Mechanical Torque */
72: /* Exciter system constants */
73: const PetscScalar KA[3] = {20.0, 20.0, 20.0}; /* Voltage regulartor gain constant */
74: const PetscScalar TA[3] = {0.2, 0.2, 0.2}; /* Voltage regulator time constant */
75: const PetscScalar KE[3] = {1.0, 1.0, 1.0}; /* Exciter gain constant */
76: const PetscScalar TE[3] = {0.314, 0.314, 0.314}; /* Exciter time constant */
77: const PetscScalar KF[3] = {0.063, 0.063, 0.063}; /* Feedback stabilizer gain constant */
78: const PetscScalar TF[3] = {0.35, 0.35, 0.35}; /* Feedback stabilizer time constant */
79: const PetscScalar k1[3] = {0.0039, 0.0039, 0.0039};
80: const PetscScalar k2[3] = {1.555, 1.555, 1.555}; /* k1 and k2 for calculating the saturation function SE = k1*exp(k2*Efd) */
81: const PetscScalar VRMIN[3] = {-4.0, -4.0, -4.0};
82: const PetscScalar VRMAX[3] = {7.0, 7.0, 7.0};
83: PetscInt VRatmin[3];
84: PetscInt VRatmax[3];
86: PetscScalar Vref[3];
87: /* Load constants
88: We use a composite load model that describes the load and reactive powers at each time instant as follows
89: P(t) = \sum\limits_{i=0}^ld_nsegsp \ld_alphap_i*P_D0(\frac{V_m(t)}{V_m0})^\ld_betap_i
90: Q(t) = \sum\limits_{i=0}^ld_nsegsq \ld_alphaq_i*Q_D0(\frac{V_m(t)}{V_m0})^\ld_betaq_i
91: where
92: ld_nsegsp,ld_nsegsq - Number of individual load models for real and reactive power loads
93: ld_alphap,ld_alphap - Percentage contribution (weights) or loads
94: P_D0 - Real power load
95: Q_D0 - Reactive power load
96: V_m(t) - Voltage magnitude at time t
97: V_m0 - Voltage magnitude at t = 0
98: ld_betap, ld_betaq - exponents describing the load model for real and reactive part
100: Note: All loads have the same characteristic currently.
101: */
102: const PetscScalar PD0[3] = {1.25, 0.9, 1.0};
103: const PetscScalar QD0[3] = {0.5, 0.3, 0.35};
104: const PetscInt ld_nsegsp[3] = {3, 3, 3};
105: const PetscScalar ld_alphap[3] = {1.0, 0.0, 0.0};
106: const PetscScalar ld_betap[3] = {2.0, 1.0, 0.0};
107: const PetscInt ld_nsegsq[3] = {3, 3, 3};
108: const PetscScalar ld_alphaq[3] = {1.0, 0.0, 0.0};
109: const PetscScalar ld_betaq[3] = {2.0, 1.0, 0.0};
111: typedef struct {
112: DM dmgen, dmnet; /* DMs to manage generator and network subsystem */
113: DM dmpgrid; /* Composite DM to manage the entire power grid */
114: Mat Ybus; /* Network admittance matrix */
115: Vec V0; /* Initial voltage vector (Power flow solution) */
116: PetscReal tfaulton, tfaultoff; /* Fault on and off times */
117: PetscInt faultbus; /* Fault bus */
118: PetscScalar Rfault;
119: PetscReal t0, tmax;
120: PetscInt neqs_gen, neqs_net, neqs_pgrid;
121: Mat Sol; /* Matrix to save solution at each time step */
122: PetscInt stepnum;
123: PetscReal t;
124: SNES snes_alg;
125: IS is_diff; /* indices for differential equations */
126: IS is_alg; /* indices for algebraic equations */
127: PetscBool setisdiff; /* TS computes truncation error based only on the differential variables */
128: PetscBool semiexplicit; /* If the flag is set then a semi-explicit method is used using TSRK */
129: } Userctx;
131: /*
132: The first two events are for fault on and off, respectively. The following events are
133: to check the min/max limits on the state variable VR. A non windup limiter is used for
134: the VR limits.
135: */
136: PetscErrorCode EventFunction(TS ts, PetscReal t, Vec X, PetscReal *fvalue, void *ctx)
137: {
138: Userctx *user = (Userctx *)ctx;
139: Vec Xgen, Xnet;
140: PetscInt i, idx = 0;
141: const PetscScalar *xgen, *xnet;
142: PetscScalar Efd, RF, VR, Vr, Vi, Vm;
144: PetscFunctionBegin;
145: PetscCall(DMCompositeGetLocalVectors(user->dmpgrid, &Xgen, &Xnet));
146: PetscCall(DMCompositeScatter(user->dmpgrid, X, Xgen, Xnet));
148: PetscCall(VecGetArrayRead(Xgen, &xgen));
149: PetscCall(VecGetArrayRead(Xnet, &xnet));
151: /* Event for fault-on time */
152: fvalue[0] = t - user->tfaulton;
153: /* Event for fault-off time */
154: fvalue[1] = t - user->tfaultoff;
156: for (i = 0; i < ngen; i++) {
157: Efd = xgen[idx + 6];
158: RF = xgen[idx + 7];
159: VR = xgen[idx + 8];
161: Vr = xnet[2 * gbus[i]]; /* Real part of generator terminal voltage */
162: Vi = xnet[2 * gbus[i] + 1]; /* Imaginary part of the generator terminal voltage */
163: Vm = PetscSqrtScalar(Vr * Vr + Vi * Vi);
165: if (!VRatmax[i]) {
166: fvalue[2 + 2 * i] = PetscRealPart(VRMAX[i] - VR);
167: } else {
168: fvalue[2 + 2 * i] = PetscRealPart((VR - KA[i] * RF + KA[i] * KF[i] * Efd / TF[i] - KA[i] * (Vref[i] - Vm)) / TA[i]);
169: }
170: if (!VRatmin[i]) {
171: fvalue[2 + 2 * i + 1] = PetscRealPart(VRMIN[i] - VR);
172: } else {
173: fvalue[2 + 2 * i + 1] = PetscRealPart((VR - KA[i] * RF + KA[i] * KF[i] * Efd / TF[i] - KA[i] * (Vref[i] - Vm)) / TA[i]);
174: }
175: idx = idx + 9;
176: }
177: PetscCall(VecRestoreArrayRead(Xgen, &xgen));
178: PetscCall(VecRestoreArrayRead(Xnet, &xnet));
180: PetscCall(DMCompositeRestoreLocalVectors(user->dmpgrid, &Xgen, &Xnet));
181: PetscFunctionReturn(PETSC_SUCCESS);
182: }
184: PetscErrorCode PostEventFunction(TS ts, PetscInt nevents, PetscInt event_list[], PetscReal t, Vec X, PetscBool forwardsolve, void *ctx)
185: {
186: Userctx *user = (Userctx *)ctx;
187: Vec Xgen, Xnet;
188: PetscScalar *xgen, *xnet;
189: PetscInt row_loc, col_loc;
190: PetscScalar val;
191: PetscInt i, idx = 0, event_num;
192: PetscScalar fvalue;
193: PetscScalar Efd, RF, VR;
194: PetscScalar Vr, Vi, Vm;
196: PetscFunctionBegin;
197: PetscCall(DMCompositeGetLocalVectors(user->dmpgrid, &Xgen, &Xnet));
198: PetscCall(DMCompositeScatter(user->dmpgrid, X, Xgen, Xnet));
200: PetscCall(VecGetArray(Xgen, &xgen));
201: PetscCall(VecGetArray(Xnet, &xnet));
203: for (i = 0; i < nevents; i++) {
204: if (event_list[i] == 0) {
205: /* Apply disturbance - resistive fault at user->faultbus */
206: /* This is done by adding shunt conductance to the diagonal location
207: in the Ybus matrix */
208: row_loc = 2 * user->faultbus;
209: col_loc = 2 * user->faultbus + 1; /* Location for G */
210: val = 1 / user->Rfault;
211: PetscCall(MatSetValues(user->Ybus, 1, &row_loc, 1, &col_loc, &val, ADD_VALUES));
212: row_loc = 2 * user->faultbus + 1;
213: col_loc = 2 * user->faultbus; /* Location for G */
214: val = 1 / user->Rfault;
215: PetscCall(MatSetValues(user->Ybus, 1, &row_loc, 1, &col_loc, &val, ADD_VALUES));
217: PetscCall(MatAssemblyBegin(user->Ybus, MAT_FINAL_ASSEMBLY));
218: PetscCall(MatAssemblyEnd(user->Ybus, MAT_FINAL_ASSEMBLY));
220: /* Solve the algebraic equations */
221: PetscCall(SNESSolve(user->snes_alg, NULL, X));
222: } else if (event_list[i] == 1) {
223: /* Remove the fault */
224: row_loc = 2 * user->faultbus;
225: col_loc = 2 * user->faultbus + 1;
226: val = -1 / user->Rfault;
227: PetscCall(MatSetValues(user->Ybus, 1, &row_loc, 1, &col_loc, &val, ADD_VALUES));
228: row_loc = 2 * user->faultbus + 1;
229: col_loc = 2 * user->faultbus;
230: val = -1 / user->Rfault;
231: PetscCall(MatSetValues(user->Ybus, 1, &row_loc, 1, &col_loc, &val, ADD_VALUES));
233: PetscCall(MatAssemblyBegin(user->Ybus, MAT_FINAL_ASSEMBLY));
234: PetscCall(MatAssemblyEnd(user->Ybus, MAT_FINAL_ASSEMBLY));
236: /* Solve the algebraic equations */
237: PetscCall(SNESSolve(user->snes_alg, NULL, X));
239: /* Check the VR derivatives and reset flags if needed */
240: for (i = 0; i < ngen; i++) {
241: Efd = xgen[idx + 6];
242: RF = xgen[idx + 7];
243: VR = xgen[idx + 8];
245: Vr = xnet[2 * gbus[i]]; /* Real part of generator terminal voltage */
246: Vi = xnet[2 * gbus[i] + 1]; /* Imaginary part of the generator terminal voltage */
247: Vm = PetscSqrtScalar(Vr * Vr + Vi * Vi);
249: if (VRatmax[i]) {
250: fvalue = (VR - KA[i] * RF + KA[i] * KF[i] * Efd / TF[i] - KA[i] * (Vref[i] - Vm)) / TA[i];
251: if (fvalue < 0) {
252: VRatmax[i] = 0;
253: PetscCall(PetscPrintf(PETSC_COMM_SELF, "VR[%" PetscInt_FMT "]: dVR_dt went negative on fault clearing at time %g\n", i, (double)t));
254: }
255: }
256: if (VRatmin[i]) {
257: fvalue = (VR - KA[i] * RF + KA[i] * KF[i] * Efd / TF[i] - KA[i] * (Vref[i] - Vm)) / TA[i];
259: if (fvalue > 0) {
260: VRatmin[i] = 0;
261: PetscCall(PetscPrintf(PETSC_COMM_SELF, "VR[%" PetscInt_FMT "]: dVR_dt went positive on fault clearing at time %g\n", i, (double)t));
262: }
263: }
264: idx = idx + 9;
265: }
266: } else {
267: idx = (event_list[i] - 2) / 2;
268: event_num = (event_list[i] - 2) % 2;
269: if (event_num == 0) { /* Max VR */
270: if (!VRatmax[idx]) {
271: VRatmax[idx] = 1;
272: PetscCall(PetscPrintf(PETSC_COMM_SELF, "VR[%" PetscInt_FMT "]: hit upper limit at time %g\n", idx, (double)t));
273: } else {
274: VRatmax[idx] = 0;
275: PetscCall(PetscPrintf(PETSC_COMM_SELF, "VR[%" PetscInt_FMT "]: freeing variable as dVR_dt is negative at time %g\n", idx, (double)t));
276: }
277: } else {
278: if (!VRatmin[idx]) {
279: VRatmin[idx] = 1;
280: PetscCall(PetscPrintf(PETSC_COMM_SELF, "VR[%" PetscInt_FMT "]: hit lower limit at time %g\n", idx, (double)t));
281: } else {
282: VRatmin[idx] = 0;
283: PetscCall(PetscPrintf(PETSC_COMM_SELF, "VR[%" PetscInt_FMT "]: freeing variable as dVR_dt is positive at time %g\n", idx, (double)t));
284: }
285: }
286: }
287: }
289: PetscCall(VecRestoreArray(Xgen, &xgen));
290: PetscCall(VecRestoreArray(Xnet, &xnet));
292: PetscCall(DMCompositeRestoreLocalVectors(user->dmpgrid, &Xgen, &Xnet));
293: PetscFunctionReturn(PETSC_SUCCESS);
294: }
296: /* Converts from machine frame (dq) to network (phase a real,imag) reference frame */
297: PetscErrorCode dq2ri(PetscScalar Fd, PetscScalar Fq, PetscScalar delta, PetscScalar *Fr, PetscScalar *Fi)
298: {
299: PetscFunctionBegin;
300: *Fr = Fd * PetscSinScalar(delta) + Fq * PetscCosScalar(delta);
301: *Fi = -Fd * PetscCosScalar(delta) + Fq * PetscSinScalar(delta);
302: PetscFunctionReturn(PETSC_SUCCESS);
303: }
305: /* Converts from network frame ([phase a real,imag) to machine (dq) reference frame */
306: PetscErrorCode ri2dq(PetscScalar Fr, PetscScalar Fi, PetscScalar delta, PetscScalar *Fd, PetscScalar *Fq)
307: {
308: PetscFunctionBegin;
309: *Fd = Fr * PetscSinScalar(delta) - Fi * PetscCosScalar(delta);
310: *Fq = Fr * PetscCosScalar(delta) + Fi * PetscSinScalar(delta);
311: PetscFunctionReturn(PETSC_SUCCESS);
312: }
314: /* Saves the solution at each time to a matrix */
315: PetscErrorCode SaveSolution(TS ts)
316: {
317: Userctx *user;
318: Vec X;
319: const PetscScalar *x;
320: PetscScalar *mat;
321: PetscInt idx;
322: PetscReal t;
324: PetscFunctionBegin;
325: PetscCall(TSGetApplicationContext(ts, &user));
326: PetscCall(TSGetTime(ts, &t));
327: PetscCall(TSGetSolution(ts, &X));
328: idx = user->stepnum * (user->neqs_pgrid + 1);
329: PetscCall(MatDenseGetArray(user->Sol, &mat));
330: PetscCall(VecGetArrayRead(X, &x));
331: mat[idx] = t;
332: PetscCall(PetscArraycpy(mat + idx + 1, x, user->neqs_pgrid));
333: PetscCall(MatDenseRestoreArray(user->Sol, &mat));
334: PetscCall(VecRestoreArrayRead(X, &x));
335: user->stepnum++;
336: PetscFunctionReturn(PETSC_SUCCESS);
337: }
339: PetscErrorCode SetInitialGuess(Vec X, Userctx *user)
340: {
341: Vec Xgen, Xnet;
342: PetscScalar *xgen;
343: const PetscScalar *xnet;
344: PetscInt i, idx = 0;
345: PetscScalar Vr, Vi, IGr, IGi, Vm, Vm2;
346: PetscScalar Eqp, Edp, delta;
347: PetscScalar Efd, RF, VR; /* Exciter variables */
348: PetscScalar Id, Iq; /* Generator dq axis currents */
349: PetscScalar theta, Vd, Vq, SE;
351: PetscFunctionBegin;
352: M[0] = 2 * H[0] / w_s;
353: M[1] = 2 * H[1] / w_s;
354: M[2] = 2 * H[2] / w_s;
355: D[0] = 0.1 * M[0];
356: D[1] = 0.1 * M[1];
357: D[2] = 0.1 * M[2];
359: PetscCall(DMCompositeGetLocalVectors(user->dmpgrid, &Xgen, &Xnet));
361: /* Network subsystem initialization */
362: PetscCall(VecCopy(user->V0, Xnet));
364: /* Generator subsystem initialization */
365: PetscCall(VecGetArrayWrite(Xgen, &xgen));
366: PetscCall(VecGetArrayRead(Xnet, &xnet));
368: for (i = 0; i < ngen; i++) {
369: Vr = xnet[2 * gbus[i]]; /* Real part of generator terminal voltage */
370: Vi = xnet[2 * gbus[i] + 1]; /* Imaginary part of the generator terminal voltage */
371: Vm = PetscSqrtScalar(Vr * Vr + Vi * Vi);
372: Vm2 = Vm * Vm;
373: IGr = (Vr * PG[i] + Vi * QG[i]) / Vm2;
374: IGi = (Vi * PG[i] - Vr * QG[i]) / Vm2;
376: delta = PetscAtan2Real(Vi + Xq[i] * IGr, Vr - Xq[i] * IGi); /* Machine angle */
378: theta = PETSC_PI / 2.0 - delta;
380: Id = IGr * PetscCosScalar(theta) - IGi * PetscSinScalar(theta); /* d-axis stator current */
381: Iq = IGr * PetscSinScalar(theta) + IGi * PetscCosScalar(theta); /* q-axis stator current */
383: Vd = Vr * PetscCosScalar(theta) - Vi * PetscSinScalar(theta);
384: Vq = Vr * PetscSinScalar(theta) + Vi * PetscCosScalar(theta);
386: Edp = Vd + Rs[i] * Id - Xqp[i] * Iq; /* d-axis transient EMF */
387: Eqp = Vq + Rs[i] * Iq + Xdp[i] * Id; /* q-axis transient EMF */
389: TM[i] = PG[i];
391: /* The generator variables are ordered as [Eqp,Edp,delta,w,Id,Iq] */
392: xgen[idx] = Eqp;
393: xgen[idx + 1] = Edp;
394: xgen[idx + 2] = delta;
395: xgen[idx + 3] = w_s;
397: idx = idx + 4;
399: xgen[idx] = Id;
400: xgen[idx + 1] = Iq;
402: idx = idx + 2;
404: /* Exciter */
405: Efd = Eqp + (Xd[i] - Xdp[i]) * Id;
406: SE = k1[i] * PetscExpScalar(k2[i] * Efd);
407: VR = KE[i] * Efd + SE;
408: RF = KF[i] * Efd / TF[i];
410: xgen[idx] = Efd;
411: xgen[idx + 1] = RF;
412: xgen[idx + 2] = VR;
414: Vref[i] = Vm + (VR / KA[i]);
416: VRatmin[i] = VRatmax[i] = 0;
418: idx = idx + 3;
419: }
420: PetscCall(VecRestoreArrayWrite(Xgen, &xgen));
421: PetscCall(VecRestoreArrayRead(Xnet, &xnet));
423: /* PetscCall(VecView(Xgen,0)); */
424: PetscCall(DMCompositeGather(user->dmpgrid, INSERT_VALUES, X, Xgen, Xnet));
425: PetscCall(DMCompositeRestoreLocalVectors(user->dmpgrid, &Xgen, &Xnet));
426: PetscFunctionReturn(PETSC_SUCCESS);
427: }
429: /* Computes F = [f(x,y);g(x,y)] */
430: PetscErrorCode ResidualFunction(Vec X, Vec F, Userctx *user)
431: {
432: Vec Xgen, Xnet, Fgen, Fnet;
433: const PetscScalar *xgen, *xnet;
434: PetscScalar *fgen, *fnet;
435: PetscInt i, idx = 0;
436: PetscScalar Vr, Vi, Vm, Vm2;
437: PetscScalar Eqp, Edp, delta, w; /* Generator variables */
438: PetscScalar Efd, RF, VR; /* Exciter variables */
439: PetscScalar Id, Iq; /* Generator dq axis currents */
440: PetscScalar Vd, Vq, SE;
441: PetscScalar IGr, IGi, IDr, IDi;
442: PetscScalar Zdq_inv[4], det;
443: PetscScalar PD, QD, Vm0, *v0;
444: PetscInt k;
446: PetscFunctionBegin;
447: PetscCall(VecZeroEntries(F));
448: PetscCall(DMCompositeGetLocalVectors(user->dmpgrid, &Xgen, &Xnet));
449: PetscCall(DMCompositeGetLocalVectors(user->dmpgrid, &Fgen, &Fnet));
450: PetscCall(DMCompositeScatter(user->dmpgrid, X, Xgen, Xnet));
451: PetscCall(DMCompositeScatter(user->dmpgrid, F, Fgen, Fnet));
453: /* Network current balance residual IG + Y*V + IL = 0. Only YV is added here.
454: The generator current injection, IG, and load current injection, ID are added later
455: */
456: /* Note that the values in Ybus are stored assuming the imaginary current balance
457: equation is ordered first followed by real current balance equation for each bus.
458: Thus imaginary current contribution goes in location 2*i, and
459: real current contribution in 2*i+1
460: */
461: PetscCall(MatMult(user->Ybus, Xnet, Fnet));
463: PetscCall(VecGetArrayRead(Xgen, &xgen));
464: PetscCall(VecGetArrayRead(Xnet, &xnet));
465: PetscCall(VecGetArrayWrite(Fgen, &fgen));
466: PetscCall(VecGetArrayWrite(Fnet, &fnet));
468: /* Generator subsystem */
469: for (i = 0; i < ngen; i++) {
470: Eqp = xgen[idx];
471: Edp = xgen[idx + 1];
472: delta = xgen[idx + 2];
473: w = xgen[idx + 3];
474: Id = xgen[idx + 4];
475: Iq = xgen[idx + 5];
476: Efd = xgen[idx + 6];
477: RF = xgen[idx + 7];
478: VR = xgen[idx + 8];
480: /* Generator differential equations */
481: fgen[idx] = (-Eqp - (Xd[i] - Xdp[i]) * Id + Efd) / Td0p[i];
482: fgen[idx + 1] = (-Edp + (Xq[i] - Xqp[i]) * Iq) / Tq0p[i];
483: fgen[idx + 2] = w - w_s;
484: fgen[idx + 3] = (TM[i] - Edp * Id - Eqp * Iq - (Xqp[i] - Xdp[i]) * Id * Iq - D[i] * (w - w_s)) / M[i];
486: Vr = xnet[2 * gbus[i]]; /* Real part of generator terminal voltage */
487: Vi = xnet[2 * gbus[i] + 1]; /* Imaginary part of the generator terminal voltage */
489: PetscCall(ri2dq(Vr, Vi, delta, &Vd, &Vq));
490: /* Algebraic equations for stator currents */
491: det = Rs[i] * Rs[i] + Xdp[i] * Xqp[i];
493: Zdq_inv[0] = Rs[i] / det;
494: Zdq_inv[1] = Xqp[i] / det;
495: Zdq_inv[2] = -Xdp[i] / det;
496: Zdq_inv[3] = Rs[i] / det;
498: fgen[idx + 4] = Zdq_inv[0] * (-Edp + Vd) + Zdq_inv[1] * (-Eqp + Vq) + Id;
499: fgen[idx + 5] = Zdq_inv[2] * (-Edp + Vd) + Zdq_inv[3] * (-Eqp + Vq) + Iq;
501: /* Add generator current injection to network */
502: PetscCall(dq2ri(Id, Iq, delta, &IGr, &IGi));
504: fnet[2 * gbus[i]] -= IGi;
505: fnet[2 * gbus[i] + 1] -= IGr;
507: Vm = PetscSqrtScalar(Vd * Vd + Vq * Vq);
509: SE = k1[i] * PetscExpScalar(k2[i] * Efd);
511: /* Exciter differential equations */
512: fgen[idx + 6] = (-KE[i] * Efd - SE + VR) / TE[i];
513: fgen[idx + 7] = (-RF + KF[i] * Efd / TF[i]) / TF[i];
514: if (VRatmax[i]) fgen[idx + 8] = VR - VRMAX[i];
515: else if (VRatmin[i]) fgen[idx + 8] = VRMIN[i] - VR;
516: else fgen[idx + 8] = (-VR + KA[i] * RF - KA[i] * KF[i] * Efd / TF[i] + KA[i] * (Vref[i] - Vm)) / TA[i];
518: idx = idx + 9;
519: }
521: PetscCall(VecGetArray(user->V0, &v0));
522: for (i = 0; i < nload; i++) {
523: Vr = xnet[2 * lbus[i]]; /* Real part of load bus voltage */
524: Vi = xnet[2 * lbus[i] + 1]; /* Imaginary part of the load bus voltage */
525: Vm = PetscSqrtScalar(Vr * Vr + Vi * Vi);
526: Vm2 = Vm * Vm;
527: Vm0 = PetscSqrtScalar(v0[2 * lbus[i]] * v0[2 * lbus[i]] + v0[2 * lbus[i] + 1] * v0[2 * lbus[i] + 1]);
528: PD = QD = 0.0;
529: for (k = 0; k < ld_nsegsp[i]; k++) PD += ld_alphap[k] * PD0[i] * PetscPowScalar(Vm / Vm0, ld_betap[k]);
530: for (k = 0; k < ld_nsegsq[i]; k++) QD += ld_alphaq[k] * QD0[i] * PetscPowScalar(Vm / Vm0, ld_betaq[k]);
532: /* Load currents */
533: IDr = (PD * Vr + QD * Vi) / Vm2;
534: IDi = (-QD * Vr + PD * Vi) / Vm2;
536: fnet[2 * lbus[i]] += IDi;
537: fnet[2 * lbus[i] + 1] += IDr;
538: }
539: PetscCall(VecRestoreArray(user->V0, &v0));
541: PetscCall(VecRestoreArrayRead(Xgen, &xgen));
542: PetscCall(VecRestoreArrayRead(Xnet, &xnet));
543: PetscCall(VecRestoreArrayWrite(Fgen, &fgen));
544: PetscCall(VecRestoreArrayWrite(Fnet, &fnet));
546: PetscCall(DMCompositeGather(user->dmpgrid, INSERT_VALUES, F, Fgen, Fnet));
547: PetscCall(DMCompositeRestoreLocalVectors(user->dmpgrid, &Xgen, &Xnet));
548: PetscCall(DMCompositeRestoreLocalVectors(user->dmpgrid, &Fgen, &Fnet));
549: PetscFunctionReturn(PETSC_SUCCESS);
550: }
552: /* f(x,y)
553: g(x,y)
554: */
555: PetscErrorCode RHSFunction(TS ts, PetscReal t, Vec X, Vec F, void *ctx)
556: {
557: Userctx *user = (Userctx *)ctx;
559: PetscFunctionBegin;
560: user->t = t;
561: PetscCall(ResidualFunction(X, F, user));
562: PetscFunctionReturn(PETSC_SUCCESS);
563: }
565: /* f(x,y) - \dot{x}
566: g(x,y) = 0
567: */
568: PetscErrorCode IFunction(TS ts, PetscReal t, Vec X, Vec Xdot, Vec F, void *ctx)
569: {
570: PetscScalar *f;
571: const PetscScalar *xdot;
572: PetscInt i;
574: PetscFunctionBegin;
575: PetscCall(RHSFunction(ts, t, X, F, ctx));
576: PetscCall(VecScale(F, -1.0));
577: PetscCall(VecGetArray(F, &f));
578: PetscCall(VecGetArrayRead(Xdot, &xdot));
579: for (i = 0; i < ngen; i++) {
580: f[9 * i] += xdot[9 * i];
581: f[9 * i + 1] += xdot[9 * i + 1];
582: f[9 * i + 2] += xdot[9 * i + 2];
583: f[9 * i + 3] += xdot[9 * i + 3];
584: f[9 * i + 6] += xdot[9 * i + 6];
585: f[9 * i + 7] += xdot[9 * i + 7];
586: f[9 * i + 8] += xdot[9 * i + 8];
587: }
588: PetscCall(VecRestoreArray(F, &f));
589: PetscCall(VecRestoreArrayRead(Xdot, &xdot));
590: PetscFunctionReturn(PETSC_SUCCESS);
591: }
593: /* This function is used for solving the algebraic system only during fault on and
594: off times. It computes the entire F and then zeros out the part corresponding to
595: differential equations
596: F = [0;g(y)];
597: */
598: PetscErrorCode AlgFunction(SNES snes, Vec X, Vec F, void *ctx)
599: {
600: Userctx *user = (Userctx *)ctx;
601: PetscScalar *f;
602: PetscInt i;
604: PetscFunctionBegin;
605: PetscCall(ResidualFunction(X, F, user));
606: PetscCall(VecGetArray(F, &f));
607: for (i = 0; i < ngen; i++) {
608: f[9 * i] = 0;
609: f[9 * i + 1] = 0;
610: f[9 * i + 2] = 0;
611: f[9 * i + 3] = 0;
612: f[9 * i + 6] = 0;
613: f[9 * i + 7] = 0;
614: f[9 * i + 8] = 0;
615: }
616: PetscCall(VecRestoreArray(F, &f));
617: PetscFunctionReturn(PETSC_SUCCESS);
618: }
620: PetscErrorCode PostStage(TS ts, PetscReal t, PetscInt i, Vec *X)
621: {
622: Userctx *user;
624: PetscFunctionBegin;
625: PetscCall(TSGetApplicationContext(ts, &user));
626: PetscCall(SNESSolve(user->snes_alg, NULL, X[i]));
627: PetscFunctionReturn(PETSC_SUCCESS);
628: }
630: PetscErrorCode PostEvaluate(TS ts)
631: {
632: Userctx *user;
633: Vec X;
635: PetscFunctionBegin;
636: PetscCall(TSGetApplicationContext(ts, &user));
637: PetscCall(TSGetSolution(ts, &X));
638: PetscCall(SNESSolve(user->snes_alg, NULL, X));
639: PetscFunctionReturn(PETSC_SUCCESS);
640: }
642: PetscErrorCode PreallocateJacobian(Mat J, Userctx *user)
643: {
644: PetscInt *d_nnz;
645: PetscInt i, idx = 0, start = 0;
646: PetscInt ncols;
648: PetscFunctionBegin;
649: PetscCall(PetscMalloc1(user->neqs_pgrid, &d_nnz));
650: for (i = 0; i < user->neqs_pgrid; i++) d_nnz[i] = 0;
651: /* Generator subsystem */
652: for (i = 0; i < ngen; i++) {
653: d_nnz[idx] += 3;
654: d_nnz[idx + 1] += 2;
655: d_nnz[idx + 2] += 2;
656: d_nnz[idx + 3] += 5;
657: d_nnz[idx + 4] += 6;
658: d_nnz[idx + 5] += 6;
660: d_nnz[user->neqs_gen + 2 * gbus[i]] += 3;
661: d_nnz[user->neqs_gen + 2 * gbus[i] + 1] += 3;
663: d_nnz[idx + 6] += 2;
664: d_nnz[idx + 7] += 2;
665: d_nnz[idx + 8] += 5;
667: idx = idx + 9;
668: }
669: start = user->neqs_gen;
671: for (i = 0; i < nbus; i++) {
672: PetscCall(MatGetRow(user->Ybus, 2 * i, &ncols, NULL, NULL));
673: d_nnz[start + 2 * i] += ncols;
674: d_nnz[start + 2 * i + 1] += ncols;
675: PetscCall(MatRestoreRow(user->Ybus, 2 * i, &ncols, NULL, NULL));
676: }
677: PetscCall(MatSeqAIJSetPreallocation(J, 0, d_nnz));
678: PetscCall(PetscFree(d_nnz));
679: PetscFunctionReturn(PETSC_SUCCESS);
680: }
682: /*
683: J = [df_dx, df_dy
684: dg_dx, dg_dy]
685: */
686: PetscErrorCode ResidualJacobian(Vec X, Mat J, Mat B, void *ctx)
687: {
688: Userctx *user = (Userctx *)ctx;
689: Vec Xgen, Xnet;
690: const PetscScalar *xgen, *xnet;
691: PetscInt i, idx = 0;
692: PetscScalar Vr, Vi, Vm, Vm2;
693: PetscScalar Eqp, Edp, delta; /* Generator variables */
694: PetscScalar Efd;
695: PetscScalar Id, Iq; /* Generator dq axis currents */
696: PetscScalar Vd, Vq;
697: PetscScalar val[10];
698: PetscInt row[2], col[10];
699: PetscInt net_start = user->neqs_gen;
700: PetscScalar Zdq_inv[4], det;
701: PetscScalar dVd_dVr, dVd_dVi, dVq_dVr, dVq_dVi, dVd_ddelta, dVq_ddelta;
702: PetscScalar dIGr_ddelta, dIGi_ddelta, dIGr_dId, dIGr_dIq, dIGi_dId, dIGi_dIq;
703: PetscScalar dSE_dEfd;
704: PetscScalar dVm_dVd, dVm_dVq, dVm_dVr, dVm_dVi;
705: PetscInt ncols;
706: const PetscInt *cols;
707: const PetscScalar *yvals;
708: PetscInt k;
709: PetscScalar PD, QD, Vm0, Vm4;
710: const PetscScalar *v0;
711: PetscScalar dPD_dVr, dPD_dVi, dQD_dVr, dQD_dVi;
712: PetscScalar dIDr_dVr, dIDr_dVi, dIDi_dVr, dIDi_dVi;
714: PetscFunctionBegin;
715: PetscCall(MatZeroEntries(B));
716: PetscCall(DMCompositeGetLocalVectors(user->dmpgrid, &Xgen, &Xnet));
717: PetscCall(DMCompositeScatter(user->dmpgrid, X, Xgen, Xnet));
719: PetscCall(VecGetArrayRead(Xgen, &xgen));
720: PetscCall(VecGetArrayRead(Xnet, &xnet));
722: /* Generator subsystem */
723: for (i = 0; i < ngen; i++) {
724: Eqp = xgen[idx];
725: Edp = xgen[idx + 1];
726: delta = xgen[idx + 2];
727: Id = xgen[idx + 4];
728: Iq = xgen[idx + 5];
729: Efd = xgen[idx + 6];
731: /* fgen[idx] = (-Eqp - (Xd[i] - Xdp[i])*Id + Efd)/Td0p[i]; */
732: row[0] = idx;
733: col[0] = idx;
734: col[1] = idx + 4;
735: col[2] = idx + 6;
736: val[0] = -1 / Td0p[i];
737: val[1] = -(Xd[i] - Xdp[i]) / Td0p[i];
738: val[2] = 1 / Td0p[i];
740: PetscCall(MatSetValues(J, 1, row, 3, col, val, INSERT_VALUES));
742: /* fgen[idx+1] = (-Edp + (Xq[i] - Xqp[i])*Iq)/Tq0p[i]; */
743: row[0] = idx + 1;
744: col[0] = idx + 1;
745: col[1] = idx + 5;
746: val[0] = -1 / Tq0p[i];
747: val[1] = (Xq[i] - Xqp[i]) / Tq0p[i];
748: PetscCall(MatSetValues(J, 1, row, 2, col, val, INSERT_VALUES));
750: /* fgen[idx+2] = w - w_s; */
751: row[0] = idx + 2;
752: col[0] = idx + 2;
753: col[1] = idx + 3;
754: val[0] = 0;
755: val[1] = 1;
756: PetscCall(MatSetValues(J, 1, row, 2, col, val, INSERT_VALUES));
758: /* fgen[idx+3] = (TM[i] - Edp*Id - Eqp*Iq - (Xqp[i] - Xdp[i])*Id*Iq - D[i]*(w - w_s))/M[i]; */
759: row[0] = idx + 3;
760: col[0] = idx;
761: col[1] = idx + 1;
762: col[2] = idx + 3;
763: col[3] = idx + 4;
764: col[4] = idx + 5;
765: val[0] = -Iq / M[i];
766: val[1] = -Id / M[i];
767: val[2] = -D[i] / M[i];
768: val[3] = (-Edp - (Xqp[i] - Xdp[i]) * Iq) / M[i];
769: val[4] = (-Eqp - (Xqp[i] - Xdp[i]) * Id) / M[i];
770: PetscCall(MatSetValues(J, 1, row, 5, col, val, INSERT_VALUES));
772: Vr = xnet[2 * gbus[i]]; /* Real part of generator terminal voltage */
773: Vi = xnet[2 * gbus[i] + 1]; /* Imaginary part of the generator terminal voltage */
774: PetscCall(ri2dq(Vr, Vi, delta, &Vd, &Vq));
776: det = Rs[i] * Rs[i] + Xdp[i] * Xqp[i];
778: Zdq_inv[0] = Rs[i] / det;
779: Zdq_inv[1] = Xqp[i] / det;
780: Zdq_inv[2] = -Xdp[i] / det;
781: Zdq_inv[3] = Rs[i] / det;
783: dVd_dVr = PetscSinScalar(delta);
784: dVd_dVi = -PetscCosScalar(delta);
785: dVq_dVr = PetscCosScalar(delta);
786: dVq_dVi = PetscSinScalar(delta);
787: dVd_ddelta = Vr * PetscCosScalar(delta) + Vi * PetscSinScalar(delta);
788: dVq_ddelta = -Vr * PetscSinScalar(delta) + Vi * PetscCosScalar(delta);
790: /* fgen[idx+4] = Zdq_inv[0]*(-Edp + Vd) + Zdq_inv[1]*(-Eqp + Vq) + Id; */
791: row[0] = idx + 4;
792: col[0] = idx;
793: col[1] = idx + 1;
794: col[2] = idx + 2;
795: val[0] = -Zdq_inv[1];
796: val[1] = -Zdq_inv[0];
797: val[2] = Zdq_inv[0] * dVd_ddelta + Zdq_inv[1] * dVq_ddelta;
798: col[3] = idx + 4;
799: col[4] = net_start + 2 * gbus[i];
800: col[5] = net_start + 2 * gbus[i] + 1;
801: val[3] = 1;
802: val[4] = Zdq_inv[0] * dVd_dVr + Zdq_inv[1] * dVq_dVr;
803: val[5] = Zdq_inv[0] * dVd_dVi + Zdq_inv[1] * dVq_dVi;
804: PetscCall(MatSetValues(J, 1, row, 6, col, val, INSERT_VALUES));
806: /* fgen[idx+5] = Zdq_inv[2]*(-Edp + Vd) + Zdq_inv[3]*(-Eqp + Vq) + Iq; */
807: row[0] = idx + 5;
808: col[0] = idx;
809: col[1] = idx + 1;
810: col[2] = idx + 2;
811: val[0] = -Zdq_inv[3];
812: val[1] = -Zdq_inv[2];
813: val[2] = Zdq_inv[2] * dVd_ddelta + Zdq_inv[3] * dVq_ddelta;
814: col[3] = idx + 5;
815: col[4] = net_start + 2 * gbus[i];
816: col[5] = net_start + 2 * gbus[i] + 1;
817: val[3] = 1;
818: val[4] = Zdq_inv[2] * dVd_dVr + Zdq_inv[3] * dVq_dVr;
819: val[5] = Zdq_inv[2] * dVd_dVi + Zdq_inv[3] * dVq_dVi;
820: PetscCall(MatSetValues(J, 1, row, 6, col, val, INSERT_VALUES));
822: dIGr_ddelta = Id * PetscCosScalar(delta) - Iq * PetscSinScalar(delta);
823: dIGi_ddelta = Id * PetscSinScalar(delta) + Iq * PetscCosScalar(delta);
824: dIGr_dId = PetscSinScalar(delta);
825: dIGr_dIq = PetscCosScalar(delta);
826: dIGi_dId = -PetscCosScalar(delta);
827: dIGi_dIq = PetscSinScalar(delta);
829: /* fnet[2*gbus[i]] -= IGi; */
830: row[0] = net_start + 2 * gbus[i];
831: col[0] = idx + 2;
832: col[1] = idx + 4;
833: col[2] = idx + 5;
834: val[0] = -dIGi_ddelta;
835: val[1] = -dIGi_dId;
836: val[2] = -dIGi_dIq;
837: PetscCall(MatSetValues(J, 1, row, 3, col, val, INSERT_VALUES));
839: /* fnet[2*gbus[i]+1] -= IGr; */
840: row[0] = net_start + 2 * gbus[i] + 1;
841: col[0] = idx + 2;
842: col[1] = idx + 4;
843: col[2] = idx + 5;
844: val[0] = -dIGr_ddelta;
845: val[1] = -dIGr_dId;
846: val[2] = -dIGr_dIq;
847: PetscCall(MatSetValues(J, 1, row, 3, col, val, INSERT_VALUES));
849: Vm = PetscSqrtScalar(Vd * Vd + Vq * Vq);
851: /* fgen[idx+6] = (-KE[i]*Efd - SE + VR)/TE[i]; */
852: /* SE = k1[i]*PetscExpScalar(k2[i]*Efd); */
854: dSE_dEfd = k1[i] * k2[i] * PetscExpScalar(k2[i] * Efd);
856: row[0] = idx + 6;
857: col[0] = idx + 6;
858: col[1] = idx + 8;
859: val[0] = (-KE[i] - dSE_dEfd) / TE[i];
860: val[1] = 1 / TE[i];
861: PetscCall(MatSetValues(J, 1, row, 2, col, val, INSERT_VALUES));
863: /* Exciter differential equations */
865: /* fgen[idx+7] = (-RF + KF[i]*Efd/TF[i])/TF[i]; */
866: row[0] = idx + 7;
867: col[0] = idx + 6;
868: col[1] = idx + 7;
869: val[0] = (KF[i] / TF[i]) / TF[i];
870: val[1] = -1 / TF[i];
871: PetscCall(MatSetValues(J, 1, row, 2, col, val, INSERT_VALUES));
873: /* fgen[idx+8] = (-VR + KA[i]*RF - KA[i]*KF[i]*Efd/TF[i] + KA[i]*(Vref[i] - Vm))/TA[i]; */
874: /* Vm = (Vd^2 + Vq^2)^0.5; */
876: row[0] = idx + 8;
877: if (VRatmax[i]) {
878: col[0] = idx + 8;
879: val[0] = 1.0;
880: PetscCall(MatSetValues(J, 1, row, 1, col, val, INSERT_VALUES));
881: } else if (VRatmin[i]) {
882: col[0] = idx + 8;
883: val[0] = -1.0;
884: PetscCall(MatSetValues(J, 1, row, 1, col, val, INSERT_VALUES));
885: } else {
886: dVm_dVd = Vd / Vm;
887: dVm_dVq = Vq / Vm;
888: dVm_dVr = dVm_dVd * dVd_dVr + dVm_dVq * dVq_dVr;
889: dVm_dVi = dVm_dVd * dVd_dVi + dVm_dVq * dVq_dVi;
890: row[0] = idx + 8;
891: col[0] = idx + 6;
892: col[1] = idx + 7;
893: col[2] = idx + 8;
894: val[0] = -(KA[i] * KF[i] / TF[i]) / TA[i];
895: val[1] = KA[i] / TA[i];
896: val[2] = -1 / TA[i];
897: col[3] = net_start + 2 * gbus[i];
898: col[4] = net_start + 2 * gbus[i] + 1;
899: val[3] = -KA[i] * dVm_dVr / TA[i];
900: val[4] = -KA[i] * dVm_dVi / TA[i];
901: PetscCall(MatSetValues(J, 1, row, 5, col, val, INSERT_VALUES));
902: }
903: idx = idx + 9;
904: }
906: for (i = 0; i < nbus; i++) {
907: PetscCall(MatGetRow(user->Ybus, 2 * i, &ncols, &cols, &yvals));
908: row[0] = net_start + 2 * i;
909: for (k = 0; k < ncols; k++) {
910: col[k] = net_start + cols[k];
911: val[k] = yvals[k];
912: }
913: PetscCall(MatSetValues(J, 1, row, ncols, col, val, INSERT_VALUES));
914: PetscCall(MatRestoreRow(user->Ybus, 2 * i, &ncols, &cols, &yvals));
916: PetscCall(MatGetRow(user->Ybus, 2 * i + 1, &ncols, &cols, &yvals));
917: row[0] = net_start + 2 * i + 1;
918: for (k = 0; k < ncols; k++) {
919: col[k] = net_start + cols[k];
920: val[k] = yvals[k];
921: }
922: PetscCall(MatSetValues(J, 1, row, ncols, col, val, INSERT_VALUES));
923: PetscCall(MatRestoreRow(user->Ybus, 2 * i + 1, &ncols, &cols, &yvals));
924: }
926: PetscCall(MatAssemblyBegin(J, MAT_FLUSH_ASSEMBLY));
927: PetscCall(MatAssemblyEnd(J, MAT_FLUSH_ASSEMBLY));
929: PetscCall(VecGetArrayRead(user->V0, &v0));
930: for (i = 0; i < nload; i++) {
931: Vr = xnet[2 * lbus[i]]; /* Real part of load bus voltage */
932: Vi = xnet[2 * lbus[i] + 1]; /* Imaginary part of the load bus voltage */
933: Vm = PetscSqrtScalar(Vr * Vr + Vi * Vi);
934: Vm2 = Vm * Vm;
935: Vm4 = Vm2 * Vm2;
936: Vm0 = PetscSqrtScalar(v0[2 * lbus[i]] * v0[2 * lbus[i]] + v0[2 * lbus[i] + 1] * v0[2 * lbus[i] + 1]);
937: PD = QD = 0.0;
938: dPD_dVr = dPD_dVi = dQD_dVr = dQD_dVi = 0.0;
939: for (k = 0; k < ld_nsegsp[i]; k++) {
940: PD += ld_alphap[k] * PD0[i] * PetscPowScalar(Vm / Vm0, ld_betap[k]);
941: dPD_dVr += ld_alphap[k] * ld_betap[k] * PD0[i] * PetscPowScalar(1 / Vm0, ld_betap[k]) * Vr * PetscPowScalar(Vm, ld_betap[k] - 2);
942: dPD_dVi += ld_alphap[k] * ld_betap[k] * PD0[i] * PetscPowScalar(1 / Vm0, ld_betap[k]) * Vi * PetscPowScalar(Vm, ld_betap[k] - 2);
943: }
944: for (k = 0; k < ld_nsegsq[i]; k++) {
945: QD += ld_alphaq[k] * QD0[i] * PetscPowScalar(Vm / Vm0, ld_betaq[k]);
946: dQD_dVr += ld_alphaq[k] * ld_betaq[k] * QD0[i] * PetscPowScalar(1 / Vm0, ld_betaq[k]) * Vr * PetscPowScalar(Vm, ld_betaq[k] - 2);
947: dQD_dVi += ld_alphaq[k] * ld_betaq[k] * QD0[i] * PetscPowScalar(1 / Vm0, ld_betaq[k]) * Vi * PetscPowScalar(Vm, ld_betaq[k] - 2);
948: }
950: /* IDr = (PD*Vr + QD*Vi)/Vm2; */
951: /* IDi = (-QD*Vr + PD*Vi)/Vm2; */
953: dIDr_dVr = (dPD_dVr * Vr + dQD_dVr * Vi + PD) / Vm2 - ((PD * Vr + QD * Vi) * 2 * Vr) / Vm4;
954: dIDr_dVi = (dPD_dVi * Vr + dQD_dVi * Vi + QD) / Vm2 - ((PD * Vr + QD * Vi) * 2 * Vi) / Vm4;
956: dIDi_dVr = (-dQD_dVr * Vr + dPD_dVr * Vi - QD) / Vm2 - ((-QD * Vr + PD * Vi) * 2 * Vr) / Vm4;
957: dIDi_dVi = (-dQD_dVi * Vr + dPD_dVi * Vi + PD) / Vm2 - ((-QD * Vr + PD * Vi) * 2 * Vi) / Vm4;
959: /* fnet[2*lbus[i]] += IDi; */
960: row[0] = net_start + 2 * lbus[i];
961: col[0] = net_start + 2 * lbus[i];
962: col[1] = net_start + 2 * lbus[i] + 1;
963: val[0] = dIDi_dVr;
964: val[1] = dIDi_dVi;
965: PetscCall(MatSetValues(J, 1, row, 2, col, val, ADD_VALUES));
966: /* fnet[2*lbus[i]+1] += IDr; */
967: row[0] = net_start + 2 * lbus[i] + 1;
968: col[0] = net_start + 2 * lbus[i];
969: col[1] = net_start + 2 * lbus[i] + 1;
970: val[0] = dIDr_dVr;
971: val[1] = dIDr_dVi;
972: PetscCall(MatSetValues(J, 1, row, 2, col, val, ADD_VALUES));
973: }
974: PetscCall(VecRestoreArrayRead(user->V0, &v0));
976: PetscCall(VecRestoreArrayRead(Xgen, &xgen));
977: PetscCall(VecRestoreArrayRead(Xnet, &xnet));
979: PetscCall(DMCompositeRestoreLocalVectors(user->dmpgrid, &Xgen, &Xnet));
981: PetscCall(MatAssemblyBegin(J, MAT_FINAL_ASSEMBLY));
982: PetscCall(MatAssemblyEnd(J, MAT_FINAL_ASSEMBLY));
983: PetscFunctionReturn(PETSC_SUCCESS);
984: }
986: /*
987: J = [I, 0
988: dg_dx, dg_dy]
989: */
990: PetscErrorCode AlgJacobian(SNES snes, Vec X, Mat A, Mat B, void *ctx)
991: {
992: Userctx *user = (Userctx *)ctx;
994: PetscFunctionBegin;
995: PetscCall(ResidualJacobian(X, A, B, ctx));
996: PetscCall(MatSetOption(A, MAT_KEEP_NONZERO_PATTERN, PETSC_TRUE));
997: PetscCall(MatZeroRowsIS(A, user->is_diff, 1.0, NULL, NULL));
998: PetscFunctionReturn(PETSC_SUCCESS);
999: }
1001: /*
1002: J = [-df_dx, -df_dy
1003: dg_dx, dg_dy]
1004: */
1006: PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec X, Mat A, Mat B, void *ctx)
1007: {
1008: Userctx *user = (Userctx *)ctx;
1010: PetscFunctionBegin;
1011: user->t = t;
1013: PetscCall(ResidualJacobian(X, A, B, user));
1014: PetscFunctionReturn(PETSC_SUCCESS);
1015: }
1017: /*
1018: J = [df_dx-aI, df_dy
1019: dg_dx, dg_dy]
1020: */
1022: PetscErrorCode IJacobian(TS ts, PetscReal t, Vec X, Vec Xdot, PetscReal a, Mat A, Mat B, Userctx *user)
1023: {
1024: PetscScalar atmp = (PetscScalar)a;
1025: PetscInt i, row;
1027: PetscFunctionBegin;
1028: user->t = t;
1030: PetscCall(RHSJacobian(ts, t, X, A, B, user));
1031: PetscCall(MatScale(B, -1.0));
1032: for (i = 0; i < ngen; i++) {
1033: row = 9 * i;
1034: PetscCall(MatSetValues(A, 1, &row, 1, &row, &atmp, ADD_VALUES));
1035: row = 9 * i + 1;
1036: PetscCall(MatSetValues(A, 1, &row, 1, &row, &atmp, ADD_VALUES));
1037: row = 9 * i + 2;
1038: PetscCall(MatSetValues(A, 1, &row, 1, &row, &atmp, ADD_VALUES));
1039: row = 9 * i + 3;
1040: PetscCall(MatSetValues(A, 1, &row, 1, &row, &atmp, ADD_VALUES));
1041: row = 9 * i + 6;
1042: PetscCall(MatSetValues(A, 1, &row, 1, &row, &atmp, ADD_VALUES));
1043: row = 9 * i + 7;
1044: PetscCall(MatSetValues(A, 1, &row, 1, &row, &atmp, ADD_VALUES));
1045: row = 9 * i + 8;
1046: PetscCall(MatSetValues(A, 1, &row, 1, &row, &atmp, ADD_VALUES));
1047: }
1048: PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
1049: PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
1050: PetscFunctionReturn(PETSC_SUCCESS);
1051: }
1053: int main(int argc, char **argv)
1054: {
1055: TS ts;
1056: SNES snes_alg;
1057: PetscMPIInt size;
1058: Userctx user;
1059: PetscViewer Xview, Ybusview, viewer;
1060: Vec X, F_alg;
1061: Mat J, A;
1062: PetscInt i, idx, *idx2;
1063: Vec Xdot;
1064: PetscScalar *x, *mat, *amat;
1065: const PetscScalar *rmat;
1066: Vec vatol;
1067: PetscInt *direction;
1068: PetscBool *terminate;
1069: const PetscInt *idx3;
1070: PetscScalar *vatoli;
1071: PetscInt k;
1073: PetscFunctionBeginUser;
1074: PetscCall(PetscInitialize(&argc, &argv, "petscoptions", help));
1075: PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
1076: PetscCheck(size == 1, PETSC_COMM_WORLD, PETSC_ERR_WRONG_MPI_SIZE, "Only for sequential runs");
1078: user.neqs_gen = 9 * ngen; /* # eqs. for generator subsystem */
1079: user.neqs_net = 2 * nbus; /* # eqs. for network subsystem */
1080: user.neqs_pgrid = user.neqs_gen + user.neqs_net;
1082: /* Create indices for differential and algebraic equations */
1084: PetscCall(PetscMalloc1(7 * ngen, &idx2));
1085: for (i = 0; i < ngen; i++) {
1086: idx2[7 * i] = 9 * i;
1087: idx2[7 * i + 1] = 9 * i + 1;
1088: idx2[7 * i + 2] = 9 * i + 2;
1089: idx2[7 * i + 3] = 9 * i + 3;
1090: idx2[7 * i + 4] = 9 * i + 6;
1091: idx2[7 * i + 5] = 9 * i + 7;
1092: idx2[7 * i + 6] = 9 * i + 8;
1093: }
1094: PetscCall(ISCreateGeneral(PETSC_COMM_WORLD, 7 * ngen, idx2, PETSC_COPY_VALUES, &user.is_diff));
1095: PetscCall(ISComplement(user.is_diff, 0, user.neqs_pgrid, &user.is_alg));
1096: PetscCall(PetscFree(idx2));
1098: /* Read initial voltage vector and Ybus */
1099: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, "X.bin", FILE_MODE_READ, &Xview));
1100: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, "Ybus.bin", FILE_MODE_READ, &Ybusview));
1102: PetscCall(VecCreate(PETSC_COMM_WORLD, &user.V0));
1103: PetscCall(VecSetSizes(user.V0, PETSC_DECIDE, user.neqs_net));
1104: PetscCall(VecLoad(user.V0, Xview));
1106: PetscCall(MatCreate(PETSC_COMM_WORLD, &user.Ybus));
1107: PetscCall(MatSetSizes(user.Ybus, PETSC_DECIDE, PETSC_DECIDE, user.neqs_net, user.neqs_net));
1108: PetscCall(MatSetType(user.Ybus, MATBAIJ));
1109: /* PetscCall(MatSetBlockSize(user.Ybus,2)); */
1110: PetscCall(MatLoad(user.Ybus, Ybusview));
1112: /* Set run time options */
1113: PetscOptionsBegin(PETSC_COMM_WORLD, NULL, "Transient stability fault options", "");
1114: {
1115: user.tfaulton = 1.0;
1116: user.tfaultoff = 1.2;
1117: user.Rfault = 0.0001;
1118: user.setisdiff = PETSC_FALSE;
1119: user.semiexplicit = PETSC_FALSE;
1120: user.faultbus = 8;
1121: PetscCall(PetscOptionsReal("-tfaulton", "", "", user.tfaulton, &user.tfaulton, NULL));
1122: PetscCall(PetscOptionsReal("-tfaultoff", "", "", user.tfaultoff, &user.tfaultoff, NULL));
1123: PetscCall(PetscOptionsInt("-faultbus", "", "", user.faultbus, &user.faultbus, NULL));
1124: user.t0 = 0.0;
1125: user.tmax = 5.0;
1126: PetscCall(PetscOptionsReal("-t0", "", "", user.t0, &user.t0, NULL));
1127: PetscCall(PetscOptionsReal("-tmax", "", "", user.tmax, &user.tmax, NULL));
1128: PetscCall(PetscOptionsBool("-setisdiff", "", "", user.setisdiff, &user.setisdiff, NULL));
1129: PetscCall(PetscOptionsBool("-dae_semiexplicit", "", "", user.semiexplicit, &user.semiexplicit, NULL));
1130: }
1131: PetscOptionsEnd();
1133: PetscCall(PetscViewerDestroy(&Xview));
1134: PetscCall(PetscViewerDestroy(&Ybusview));
1136: /* Create DMs for generator and network subsystems */
1137: PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, user.neqs_gen, 1, 1, NULL, &user.dmgen));
1138: PetscCall(DMSetOptionsPrefix(user.dmgen, "dmgen_"));
1139: PetscCall(DMSetFromOptions(user.dmgen));
1140: PetscCall(DMSetUp(user.dmgen));
1141: PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, user.neqs_net, 1, 1, NULL, &user.dmnet));
1142: PetscCall(DMSetOptionsPrefix(user.dmnet, "dmnet_"));
1143: PetscCall(DMSetFromOptions(user.dmnet));
1144: PetscCall(DMSetUp(user.dmnet));
1145: /* Create a composite DM packer and add the two DMs */
1146: PetscCall(DMCompositeCreate(PETSC_COMM_WORLD, &user.dmpgrid));
1147: PetscCall(DMSetOptionsPrefix(user.dmpgrid, "pgrid_"));
1148: PetscCall(DMCompositeAddDM(user.dmpgrid, user.dmgen));
1149: PetscCall(DMCompositeAddDM(user.dmpgrid, user.dmnet));
1151: PetscCall(DMCreateGlobalVector(user.dmpgrid, &X));
1153: PetscCall(MatCreate(PETSC_COMM_WORLD, &J));
1154: PetscCall(MatSetSizes(J, PETSC_DECIDE, PETSC_DECIDE, user.neqs_pgrid, user.neqs_pgrid));
1155: PetscCall(MatSetFromOptions(J));
1156: PetscCall(PreallocateJacobian(J, &user));
1158: /* Create matrix to save solutions at each time step */
1159: user.stepnum = 0;
1161: PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, user.neqs_pgrid + 1, 1002, NULL, &user.Sol));
1162: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
1163: Create timestepping solver context
1164: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
1165: PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
1166: PetscCall(TSSetProblemType(ts, TS_NONLINEAR));
1167: if (user.semiexplicit) {
1168: PetscCall(TSSetType(ts, TSRK));
1169: PetscCall(TSSetRHSFunction(ts, NULL, RHSFunction, &user));
1170: PetscCall(TSSetRHSJacobian(ts, J, J, RHSJacobian, &user));
1171: } else {
1172: PetscCall(TSSetType(ts, TSCN));
1173: PetscCall(TSSetEquationType(ts, TS_EQ_DAE_IMPLICIT_INDEX1));
1174: PetscCall(TSSetIFunction(ts, NULL, (TSIFunctionFn *)IFunction, &user));
1175: PetscCall(TSSetIJacobian(ts, J, J, (TSIJacobianFn *)IJacobian, &user));
1176: }
1177: PetscCall(TSSetApplicationContext(ts, &user));
1179: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
1180: Set initial conditions
1181: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
1182: PetscCall(SetInitialGuess(X, &user));
1183: /* Just to set up the Jacobian structure */
1185: PetscCall(VecDuplicate(X, &Xdot));
1186: PetscCall(IJacobian(ts, 0.0, X, Xdot, 0.0, J, J, &user));
1187: PetscCall(VecDestroy(&Xdot));
1189: /* Save initial solution */
1191: idx = user.stepnum * (user.neqs_pgrid + 1);
1192: PetscCall(MatDenseGetArray(user.Sol, &mat));
1193: PetscCall(VecGetArray(X, &x));
1195: mat[idx] = 0.0;
1197: PetscCall(PetscArraycpy(mat + idx + 1, x, user.neqs_pgrid));
1198: PetscCall(MatDenseRestoreArray(user.Sol, &mat));
1199: PetscCall(VecRestoreArray(X, &x));
1200: user.stepnum++;
1202: PetscCall(TSSetMaxTime(ts, user.tmax));
1203: PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_MATCHSTEP));
1204: PetscCall(TSSetTimeStep(ts, 0.01));
1205: PetscCall(TSSetFromOptions(ts));
1206: PetscCall(TSSetPostStep(ts, SaveSolution));
1207: PetscCall(TSSetSolution(ts, X));
1209: PetscCall(PetscMalloc1(2 * ngen + 2, &direction));
1210: PetscCall(PetscMalloc1(2 * ngen + 2, &terminate));
1211: direction[0] = direction[1] = 1;
1212: terminate[0] = terminate[1] = PETSC_FALSE;
1213: for (i = 0; i < ngen; i++) {
1214: direction[2 + 2 * i] = -1;
1215: direction[2 + 2 * i + 1] = 1;
1216: terminate[2 + 2 * i] = terminate[2 + 2 * i + 1] = PETSC_FALSE;
1217: }
1219: PetscCall(TSSetEventHandler(ts, 2 * ngen + 2, direction, terminate, EventFunction, PostEventFunction, (void *)&user));
1221: if (user.semiexplicit) {
1222: /* Use a semi-explicit approach with the time-stepping done by an explicit method and the
1223: algrebraic part solved via PostStage and PostEvaluate callbacks
1224: */
1225: PetscCall(TSSetType(ts, TSRK));
1226: PetscCall(TSSetPostStage(ts, PostStage));
1227: PetscCall(TSSetPostEvaluate(ts, PostEvaluate));
1228: }
1230: if (user.setisdiff) {
1231: /* Create vector of absolute tolerances and set the algebraic part to infinity */
1232: PetscCall(VecDuplicate(X, &vatol));
1233: PetscCall(VecSet(vatol, 100000.0));
1234: PetscCall(VecGetArray(vatol, &vatoli));
1235: PetscCall(ISGetIndices(user.is_diff, &idx3));
1236: for (k = 0; k < 7 * ngen; k++) vatoli[idx3[k]] = 1e-2;
1237: PetscCall(VecRestoreArray(vatol, &vatoli));
1238: }
1240: /* Create the nonlinear solver for solving the algebraic system */
1241: /* Note that although the algebraic system needs to be solved only for
1242: Idq and V, we reuse the entire system including xgen. The xgen
1243: variables are held constant by setting their residuals to 0 and
1244: putting a 1 on the Jacobian diagonal for xgen rows
1245: */
1247: PetscCall(VecDuplicate(X, &F_alg));
1248: PetscCall(SNESCreate(PETSC_COMM_WORLD, &snes_alg));
1249: PetscCall(SNESSetFunction(snes_alg, F_alg, AlgFunction, &user));
1250: PetscCall(SNESSetJacobian(snes_alg, J, J, AlgJacobian, &user));
1251: PetscCall(SNESSetFromOptions(snes_alg));
1253: user.snes_alg = snes_alg;
1255: /* Solve */
1256: PetscCall(TSSolve(ts, X));
1258: PetscCall(MatAssemblyBegin(user.Sol, MAT_FINAL_ASSEMBLY));
1259: PetscCall(MatAssemblyEnd(user.Sol, MAT_FINAL_ASSEMBLY));
1261: PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, user.neqs_pgrid + 1, user.stepnum, NULL, &A));
1262: PetscCall(MatDenseGetArrayRead(user.Sol, &rmat));
1263: PetscCall(MatDenseGetArray(A, &amat));
1264: PetscCall(PetscArraycpy(amat, rmat, user.stepnum * (user.neqs_pgrid + 1)));
1265: PetscCall(MatDenseRestoreArray(A, &amat));
1266: PetscCall(MatDenseRestoreArrayRead(user.Sol, &rmat));
1267: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_SELF, "out.bin", FILE_MODE_WRITE, &viewer));
1268: PetscCall(MatView(A, viewer));
1269: PetscCall(PetscViewerDestroy(&viewer));
1270: PetscCall(MatDestroy(&A));
1272: PetscCall(PetscFree(direction));
1273: PetscCall(PetscFree(terminate));
1274: PetscCall(SNESDestroy(&snes_alg));
1275: PetscCall(VecDestroy(&F_alg));
1276: PetscCall(MatDestroy(&J));
1277: PetscCall(MatDestroy(&user.Ybus));
1278: PetscCall(MatDestroy(&user.Sol));
1279: PetscCall(VecDestroy(&X));
1280: PetscCall(VecDestroy(&user.V0));
1281: PetscCall(DMDestroy(&user.dmgen));
1282: PetscCall(DMDestroy(&user.dmnet));
1283: PetscCall(DMDestroy(&user.dmpgrid));
1284: PetscCall(ISDestroy(&user.is_diff));
1285: PetscCall(ISDestroy(&user.is_alg));
1286: PetscCall(TSDestroy(&ts));
1287: if (user.setisdiff) PetscCall(VecDestroy(&vatol));
1288: PetscCall(PetscFinalize());
1289: return 0;
1290: }
1292: /*TEST
1294: build:
1295: requires: double !complex !defined(PETSC_USE_64BIT_INDICES)
1297: test:
1298: suffix: implicit
1299: args: -ts_monitor -snes_monitor_short
1300: localrunfiles: petscoptions X.bin Ybus.bin
1302: test:
1303: suffix: semiexplicit
1304: args: -ts_monitor -dae_semiexplicit -snes_error_if_not_converged -ts_rk_type 2a
1305: localrunfiles: petscoptions X.bin Ybus.bin
1307: test:
1308: suffix: steprestart
1309: # needs ARKIMEX methods with all implicit stages since the mass matrix is not the identity
1310: args: -ts_monitor -snes_monitor_short -ts_type arkimex -ts_arkimex_type prssp2
1311: localrunfiles: petscoptions X.bin Ybus.bin
1313: TEST*/