Actual source code: ex9bus.c
2: static char help[] = "Power grid stability analysis of WECC 9 bus system.\n\
3: This example is based on the 9-bus (node) example given in the book Power\n\
4: Systems Dynamics and Stability (Chapter 7) by P. Sauer and M. A. Pai.\n\
5: The power grid in this example consists of 9 buses (nodes), 3 generators,\n\
6: 3 loads, and 9 transmission lines. The network equations are written\n\
7: in current balance form using rectangular coordinates.\n\n";
9: /*
10: The equations for the stability analysis are described by the DAE
12: \dot{x} = f(x,y,t)
13: 0 = g(x,y,t)
15: where the generators are described by differential equations, while the algebraic
16: constraints define the network equations.
18: The generators are modeled with a 4th order differential equation describing the electrical
19: and mechanical dynamics. Each generator also has an exciter system modeled by 3rd order
20: diff. eqns. describing the exciter, voltage regulator, and the feedback stabilizer
21: mechanism.
23: The network equations are described by nodal current balance equations.
24: I(x,y) - Y*V = 0
26: where:
27: I(x,y) is the current injected from generators and loads.
28: Y is the admittance matrix, and
29: V is the voltage vector
30: */
32: /*
33: Include "petscts.h" so that we can use TS solvers. Note that this
34: file automatically includes:
35: petscsys.h - base PETSc routines petscvec.h - vectors
36: petscmat.h - matrices
37: petscis.h - index sets petscksp.h - Krylov subspace methods
38: petscviewer.h - viewers petscpc.h - preconditioners
39: petscksp.h - linear solvers
40: */
42: #include <petscts.h>
43: #include <petscdm.h>
44: #include <petscdmda.h>
45: #include <petscdmcomposite.h>
47: #define freq 60
48: #define w_s (2 * PETSC_PI * freq)
50: /* Sizes and indices */
51: const PetscInt nbus = 9; /* Number of network buses */
52: const PetscInt ngen = 3; /* Number of generators */
53: const PetscInt nload = 3; /* Number of loads */
54: const PetscInt gbus[3] = {0, 1, 2}; /* Buses at which generators are incident */
55: const PetscInt lbus[3] = {4, 5, 7}; /* Buses at which loads are incident */
57: /* Generator real and reactive powers (found via loadflow) */
58: const PetscScalar PG[3] = {0.716786142395021, 1.630000000000000, 0.850000000000000};
59: const PetscScalar QG[3] = {0.270702180178785, 0.066120127797275, -0.108402221791588};
60: /* Generator constants */
61: const PetscScalar H[3] = {23.64, 6.4, 3.01}; /* Inertia constant */
62: const PetscScalar Rs[3] = {0.0, 0.0, 0.0}; /* Stator Resistance */
63: const PetscScalar Xd[3] = {0.146, 0.8958, 1.3125}; /* d-axis reactance */
64: const PetscScalar Xdp[3] = {0.0608, 0.1198, 0.1813}; /* d-axis transient reactance */
65: const PetscScalar Xq[3] = {0.4360, 0.8645, 1.2578}; /* q-axis reactance Xq(1) set to 0.4360, value given in text 0.0969 */
66: const PetscScalar Xqp[3] = {0.0969, 0.1969, 0.25}; /* q-axis transient reactance */
67: const PetscScalar Td0p[3] = {8.96, 6.0, 5.89}; /* d-axis open circuit time constant */
68: const PetscScalar Tq0p[3] = {0.31, 0.535, 0.6}; /* q-axis open circuit time constant */
69: PetscScalar M[3]; /* M = 2*H/w_s */
70: PetscScalar D[3]; /* D = 0.1*M */
72: PetscScalar TM[3]; /* Mechanical Torque */
73: /* Exciter system constants */
74: const PetscScalar KA[3] = {20.0, 20.0, 20.0}; /* Voltage regulartor gain constant */
75: const PetscScalar TA[3] = {0.2, 0.2, 0.2}; /* Voltage regulator time constant */
76: const PetscScalar KE[3] = {1.0, 1.0, 1.0}; /* Exciter gain constant */
77: const PetscScalar TE[3] = {0.314, 0.314, 0.314}; /* Exciter time constant */
78: const PetscScalar KF[3] = {0.063, 0.063, 0.063}; /* Feedback stabilizer gain constant */
79: const PetscScalar TF[3] = {0.35, 0.35, 0.35}; /* Feedback stabilizer time constant */
80: const PetscScalar k1[3] = {0.0039, 0.0039, 0.0039};
81: const PetscScalar k2[3] = {1.555, 1.555, 1.555}; /* k1 and k2 for calculating the saturation function SE = k1*exp(k2*Efd) */
82: const PetscScalar VRMIN[3] = {-4.0, -4.0, -4.0};
83: const PetscScalar VRMAX[3] = {7.0, 7.0, 7.0};
84: PetscInt VRatmin[3];
85: PetscInt VRatmax[3];
87: PetscScalar Vref[3];
88: /* Load constants
89: We use a composite load model that describes the load and reactive powers at each time instant as follows
90: P(t) = \sum\limits_{i=0}^ld_nsegsp \ld_alphap_i*P_D0(\frac{V_m(t)}{V_m0})^\ld_betap_i
91: Q(t) = \sum\limits_{i=0}^ld_nsegsq \ld_alphaq_i*Q_D0(\frac{V_m(t)}{V_m0})^\ld_betaq_i
92: where
93: ld_nsegsp,ld_nsegsq - Number of individual load models for real and reactive power loads
94: ld_alphap,ld_alphap - Percentage contribution (weights) or loads
95: P_D0 - Real power load
96: Q_D0 - Reactive power load
97: V_m(t) - Voltage magnitude at time t
98: V_m0 - Voltage magnitude at t = 0
99: ld_betap, ld_betaq - exponents describing the load model for real and reactive part
101: Note: All loads have the same characteristic currently.
102: */
103: const PetscScalar PD0[3] = {1.25, 0.9, 1.0};
104: const PetscScalar QD0[3] = {0.5, 0.3, 0.35};
105: const PetscInt ld_nsegsp[3] = {3, 3, 3};
106: const PetscScalar ld_alphap[3] = {1.0, 0.0, 0.0};
107: const PetscScalar ld_betap[3] = {2.0, 1.0, 0.0};
108: const PetscInt ld_nsegsq[3] = {3, 3, 3};
109: const PetscScalar ld_alphaq[3] = {1.0, 0.0, 0.0};
110: const PetscScalar ld_betaq[3] = {2.0, 1.0, 0.0};
112: typedef struct {
113: DM dmgen, dmnet; /* DMs to manage generator and network subsystem */
114: DM dmpgrid; /* Composite DM to manage the entire power grid */
115: Mat Ybus; /* Network admittance matrix */
116: Vec V0; /* Initial voltage vector (Power flow solution) */
117: PetscReal tfaulton, tfaultoff; /* Fault on and off times */
118: PetscInt faultbus; /* Fault bus */
119: PetscScalar Rfault;
120: PetscReal t0, tmax;
121: PetscInt neqs_gen, neqs_net, neqs_pgrid;
122: Mat Sol; /* Matrix to save solution at each time step */
123: PetscInt stepnum;
124: PetscReal t;
125: SNES snes_alg;
126: IS is_diff; /* indices for differential equations */
127: IS is_alg; /* indices for algebraic equations */
128: PetscBool setisdiff; /* TS computes truncation error based only on the differential variables */
129: PetscBool semiexplicit; /* If the flag is set then a semi-explicit method is used using TSRK */
130: } Userctx;
132: /*
133: The first two events are for fault on and off, respectively. The following events are
134: to check the min/max limits on the state variable VR. A non windup limiter is used for
135: the VR limits.
136: */
137: PetscErrorCode EventFunction(TS ts, PetscReal t, Vec X, PetscScalar *fvalue, void *ctx)
138: {
139: Userctx *user = (Userctx *)ctx;
140: Vec Xgen, Xnet;
141: PetscInt i, idx = 0;
142: const PetscScalar *xgen, *xnet;
143: PetscScalar Efd, RF, VR, Vr, Vi, Vm;
145: PetscFunctionBegin;
147: PetscCall(DMCompositeGetLocalVectors(user->dmpgrid, &Xgen, &Xnet));
148: PetscCall(DMCompositeScatter(user->dmpgrid, X, Xgen, Xnet));
150: PetscCall(VecGetArrayRead(Xgen, &xgen));
151: PetscCall(VecGetArrayRead(Xnet, &xnet));
153: /* Event for fault-on time */
154: fvalue[0] = t - user->tfaulton;
155: /* Event for fault-off time */
156: fvalue[1] = t - user->tfaultoff;
158: for (i = 0; i < ngen; i++) {
159: Efd = xgen[idx + 6];
160: RF = xgen[idx + 7];
161: VR = xgen[idx + 8];
163: Vr = xnet[2 * gbus[i]]; /* Real part of generator terminal voltage */
164: Vi = xnet[2 * gbus[i] + 1]; /* Imaginary part of the generator terminal voltage */
165: Vm = PetscSqrtScalar(Vr * Vr + Vi * Vi);
167: if (!VRatmax[i]) {
168: fvalue[2 + 2 * i] = VRMAX[i] - VR;
169: } else {
170: fvalue[2 + 2 * i] = (VR - KA[i] * RF + KA[i] * KF[i] * Efd / TF[i] - KA[i] * (Vref[i] - Vm)) / TA[i];
171: }
172: if (!VRatmin[i]) {
173: fvalue[2 + 2 * i + 1] = VRMIN[i] - VR;
174: } else {
175: fvalue[2 + 2 * i + 1] = (VR - KA[i] * RF + KA[i] * KF[i] * Efd / TF[i] - KA[i] * (Vref[i] - Vm)) / TA[i];
176: }
177: idx = idx + 9;
178: }
179: PetscCall(VecRestoreArrayRead(Xgen, &xgen));
180: PetscCall(VecRestoreArrayRead(Xnet, &xnet));
182: PetscCall(DMCompositeRestoreLocalVectors(user->dmpgrid, &Xgen, &Xnet));
184: PetscFunctionReturn(PETSC_SUCCESS);
185: }
187: PetscErrorCode PostEventFunction(TS ts, PetscInt nevents, PetscInt event_list[], PetscReal t, Vec X, PetscBool forwardsolve, void *ctx)
188: {
189: Userctx *user = (Userctx *)ctx;
190: Vec Xgen, Xnet;
191: PetscScalar *xgen, *xnet;
192: PetscInt row_loc, col_loc;
193: PetscScalar val;
194: PetscInt i, idx = 0, event_num;
195: PetscScalar fvalue;
196: PetscScalar Efd, RF, VR;
197: PetscScalar Vr, Vi, Vm;
199: PetscFunctionBegin;
201: PetscCall(DMCompositeGetLocalVectors(user->dmpgrid, &Xgen, &Xnet));
202: PetscCall(DMCompositeScatter(user->dmpgrid, X, Xgen, Xnet));
204: PetscCall(VecGetArray(Xgen, &xgen));
205: PetscCall(VecGetArray(Xnet, &xnet));
207: for (i = 0; i < nevents; i++) {
208: if (event_list[i] == 0) {
209: /* Apply disturbance - resistive fault at user->faultbus */
210: /* This is done by adding shunt conductance to the diagonal location
211: in the Ybus matrix */
212: row_loc = 2 * user->faultbus;
213: col_loc = 2 * user->faultbus + 1; /* Location for G */
214: val = 1 / user->Rfault;
215: PetscCall(MatSetValues(user->Ybus, 1, &row_loc, 1, &col_loc, &val, ADD_VALUES));
216: row_loc = 2 * user->faultbus + 1;
217: col_loc = 2 * user->faultbus; /* Location for G */
218: val = 1 / user->Rfault;
219: PetscCall(MatSetValues(user->Ybus, 1, &row_loc, 1, &col_loc, &val, ADD_VALUES));
221: PetscCall(MatAssemblyBegin(user->Ybus, MAT_FINAL_ASSEMBLY));
222: PetscCall(MatAssemblyEnd(user->Ybus, MAT_FINAL_ASSEMBLY));
224: /* Solve the algebraic equations */
225: PetscCall(SNESSolve(user->snes_alg, NULL, X));
226: } else if (event_list[i] == 1) {
227: /* Remove the fault */
228: row_loc = 2 * user->faultbus;
229: col_loc = 2 * user->faultbus + 1;
230: val = -1 / user->Rfault;
231: PetscCall(MatSetValues(user->Ybus, 1, &row_loc, 1, &col_loc, &val, ADD_VALUES));
232: row_loc = 2 * user->faultbus + 1;
233: col_loc = 2 * user->faultbus;
234: val = -1 / user->Rfault;
235: PetscCall(MatSetValues(user->Ybus, 1, &row_loc, 1, &col_loc, &val, ADD_VALUES));
237: PetscCall(MatAssemblyBegin(user->Ybus, MAT_FINAL_ASSEMBLY));
238: PetscCall(MatAssemblyEnd(user->Ybus, MAT_FINAL_ASSEMBLY));
240: /* Solve the algebraic equations */
241: PetscCall(SNESSolve(user->snes_alg, NULL, X));
243: /* Check the VR derivatives and reset flags if needed */
244: for (i = 0; i < ngen; i++) {
245: Efd = xgen[idx + 6];
246: RF = xgen[idx + 7];
247: VR = xgen[idx + 8];
249: Vr = xnet[2 * gbus[i]]; /* Real part of generator terminal voltage */
250: Vi = xnet[2 * gbus[i] + 1]; /* Imaginary part of the generator terminal voltage */
251: Vm = PetscSqrtScalar(Vr * Vr + Vi * Vi);
253: if (VRatmax[i]) {
254: fvalue = (VR - KA[i] * RF + KA[i] * KF[i] * Efd / TF[i] - KA[i] * (Vref[i] - Vm)) / TA[i];
255: if (fvalue < 0) {
256: VRatmax[i] = 0;
257: PetscCall(PetscPrintf(PETSC_COMM_SELF, "VR[%" PetscInt_FMT "]: dVR_dt went negative on fault clearing at time %g\n", i, (double)t));
258: }
259: }
260: if (VRatmin[i]) {
261: fvalue = (VR - KA[i] * RF + KA[i] * KF[i] * Efd / TF[i] - KA[i] * (Vref[i] - Vm)) / TA[i];
263: if (fvalue > 0) {
264: VRatmin[i] = 0;
265: PetscCall(PetscPrintf(PETSC_COMM_SELF, "VR[%" PetscInt_FMT "]: dVR_dt went positive on fault clearing at time %g\n", i, (double)t));
266: }
267: }
268: idx = idx + 9;
269: }
270: } else {
271: idx = (event_list[i] - 2) / 2;
272: event_num = (event_list[i] - 2) % 2;
273: if (event_num == 0) { /* Max VR */
274: if (!VRatmax[idx]) {
275: VRatmax[idx] = 1;
276: PetscCall(PetscPrintf(PETSC_COMM_SELF, "VR[%" PetscInt_FMT "]: hit upper limit at time %g\n", idx, (double)t));
277: } else {
278: VRatmax[idx] = 0;
279: PetscCall(PetscPrintf(PETSC_COMM_SELF, "VR[%" PetscInt_FMT "]: freeing variable as dVR_dt is negative at time %g\n", idx, (double)t));
280: }
281: } else {
282: if (!VRatmin[idx]) {
283: VRatmin[idx] = 1;
284: PetscCall(PetscPrintf(PETSC_COMM_SELF, "VR[%" PetscInt_FMT "]: hit lower limit at time %g\n", idx, (double)t));
285: } else {
286: VRatmin[idx] = 0;
287: PetscCall(PetscPrintf(PETSC_COMM_SELF, "VR[%" PetscInt_FMT "]: freeing variable as dVR_dt is positive at time %g\n", idx, (double)t));
288: }
289: }
290: }
291: }
293: PetscCall(VecRestoreArray(Xgen, &xgen));
294: PetscCall(VecRestoreArray(Xnet, &xnet));
296: PetscCall(DMCompositeRestoreLocalVectors(user->dmpgrid, &Xgen, &Xnet));
298: PetscFunctionReturn(PETSC_SUCCESS);
299: }
301: /* Converts from machine frame (dq) to network (phase a real,imag) reference frame */
302: PetscErrorCode dq2ri(PetscScalar Fd, PetscScalar Fq, PetscScalar delta, PetscScalar *Fr, PetscScalar *Fi)
303: {
304: PetscFunctionBegin;
305: *Fr = Fd * PetscSinScalar(delta) + Fq * PetscCosScalar(delta);
306: *Fi = -Fd * PetscCosScalar(delta) + Fq * PetscSinScalar(delta);
307: PetscFunctionReturn(PETSC_SUCCESS);
308: }
310: /* Converts from network frame ([phase a real,imag) to machine (dq) reference frame */
311: PetscErrorCode ri2dq(PetscScalar Fr, PetscScalar Fi, PetscScalar delta, PetscScalar *Fd, PetscScalar *Fq)
312: {
313: PetscFunctionBegin;
314: *Fd = Fr * PetscSinScalar(delta) - Fi * PetscCosScalar(delta);
315: *Fq = Fr * PetscCosScalar(delta) + Fi * PetscSinScalar(delta);
316: PetscFunctionReturn(PETSC_SUCCESS);
317: }
319: /* Saves the solution at each time to a matrix */
320: PetscErrorCode SaveSolution(TS ts)
321: {
322: Userctx *user;
323: Vec X;
324: const PetscScalar *x;
325: PetscScalar *mat;
326: PetscInt idx;
327: PetscReal t;
329: PetscFunctionBegin;
330: PetscCall(TSGetApplicationContext(ts, &user));
331: PetscCall(TSGetTime(ts, &t));
332: PetscCall(TSGetSolution(ts, &X));
333: idx = user->stepnum * (user->neqs_pgrid + 1);
334: PetscCall(MatDenseGetArray(user->Sol, &mat));
335: PetscCall(VecGetArrayRead(X, &x));
336: mat[idx] = t;
337: PetscCall(PetscArraycpy(mat + idx + 1, x, user->neqs_pgrid));
338: PetscCall(MatDenseRestoreArray(user->Sol, &mat));
339: PetscCall(VecRestoreArrayRead(X, &x));
340: user->stepnum++;
341: PetscFunctionReturn(PETSC_SUCCESS);
342: }
344: PetscErrorCode SetInitialGuess(Vec X, Userctx *user)
345: {
346: Vec Xgen, Xnet;
347: PetscScalar *xgen;
348: const PetscScalar *xnet;
349: PetscInt i, idx = 0;
350: PetscScalar Vr, Vi, IGr, IGi, Vm, Vm2;
351: PetscScalar Eqp, Edp, delta;
352: PetscScalar Efd, RF, VR; /* Exciter variables */
353: PetscScalar Id, Iq; /* Generator dq axis currents */
354: PetscScalar theta, Vd, Vq, SE;
356: PetscFunctionBegin;
357: M[0] = 2 * H[0] / w_s;
358: M[1] = 2 * H[1] / w_s;
359: M[2] = 2 * H[2] / w_s;
360: D[0] = 0.1 * M[0];
361: D[1] = 0.1 * M[1];
362: D[2] = 0.1 * M[2];
364: PetscCall(DMCompositeGetLocalVectors(user->dmpgrid, &Xgen, &Xnet));
366: /* Network subsystem initialization */
367: PetscCall(VecCopy(user->V0, Xnet));
369: /* Generator subsystem initialization */
370: PetscCall(VecGetArrayWrite(Xgen, &xgen));
371: PetscCall(VecGetArrayRead(Xnet, &xnet));
373: for (i = 0; i < ngen; i++) {
374: Vr = xnet[2 * gbus[i]]; /* Real part of generator terminal voltage */
375: Vi = xnet[2 * gbus[i] + 1]; /* Imaginary part of the generator terminal voltage */
376: Vm = PetscSqrtScalar(Vr * Vr + Vi * Vi);
377: Vm2 = Vm * Vm;
378: IGr = (Vr * PG[i] + Vi * QG[i]) / Vm2;
379: IGi = (Vi * PG[i] - Vr * QG[i]) / Vm2;
381: delta = PetscAtan2Real(Vi + Xq[i] * IGr, Vr - Xq[i] * IGi); /* Machine angle */
383: theta = PETSC_PI / 2.0 - delta;
385: Id = IGr * PetscCosScalar(theta) - IGi * PetscSinScalar(theta); /* d-axis stator current */
386: Iq = IGr * PetscSinScalar(theta) + IGi * PetscCosScalar(theta); /* q-axis stator current */
388: Vd = Vr * PetscCosScalar(theta) - Vi * PetscSinScalar(theta);
389: Vq = Vr * PetscSinScalar(theta) + Vi * PetscCosScalar(theta);
391: Edp = Vd + Rs[i] * Id - Xqp[i] * Iq; /* d-axis transient EMF */
392: Eqp = Vq + Rs[i] * Iq + Xdp[i] * Id; /* q-axis transient EMF */
394: TM[i] = PG[i];
396: /* The generator variables are ordered as [Eqp,Edp,delta,w,Id,Iq] */
397: xgen[idx] = Eqp;
398: xgen[idx + 1] = Edp;
399: xgen[idx + 2] = delta;
400: xgen[idx + 3] = w_s;
402: idx = idx + 4;
404: xgen[idx] = Id;
405: xgen[idx + 1] = Iq;
407: idx = idx + 2;
409: /* Exciter */
410: Efd = Eqp + (Xd[i] - Xdp[i]) * Id;
411: SE = k1[i] * PetscExpScalar(k2[i] * Efd);
412: VR = KE[i] * Efd + SE;
413: RF = KF[i] * Efd / TF[i];
415: xgen[idx] = Efd;
416: xgen[idx + 1] = RF;
417: xgen[idx + 2] = VR;
419: Vref[i] = Vm + (VR / KA[i]);
421: VRatmin[i] = VRatmax[i] = 0;
423: idx = idx + 3;
424: }
425: PetscCall(VecRestoreArrayWrite(Xgen, &xgen));
426: PetscCall(VecRestoreArrayRead(Xnet, &xnet));
428: /* PetscCall(VecView(Xgen,0)); */
429: PetscCall(DMCompositeGather(user->dmpgrid, INSERT_VALUES, X, Xgen, Xnet));
430: PetscCall(DMCompositeRestoreLocalVectors(user->dmpgrid, &Xgen, &Xnet));
431: PetscFunctionReturn(PETSC_SUCCESS);
432: }
434: /* Computes F = [f(x,y);g(x,y)] */
435: PetscErrorCode ResidualFunction(Vec X, Vec F, Userctx *user)
436: {
437: Vec Xgen, Xnet, Fgen, Fnet;
438: const PetscScalar *xgen, *xnet;
439: PetscScalar *fgen, *fnet;
440: PetscInt i, idx = 0;
441: PetscScalar Vr, Vi, Vm, Vm2;
442: PetscScalar Eqp, Edp, delta, w; /* Generator variables */
443: PetscScalar Efd, RF, VR; /* Exciter variables */
444: PetscScalar Id, Iq; /* Generator dq axis currents */
445: PetscScalar Vd, Vq, SE;
446: PetscScalar IGr, IGi, IDr, IDi;
447: PetscScalar Zdq_inv[4], det;
448: PetscScalar PD, QD, Vm0, *v0;
449: PetscInt k;
451: PetscFunctionBegin;
452: PetscCall(VecZeroEntries(F));
453: PetscCall(DMCompositeGetLocalVectors(user->dmpgrid, &Xgen, &Xnet));
454: PetscCall(DMCompositeGetLocalVectors(user->dmpgrid, &Fgen, &Fnet));
455: PetscCall(DMCompositeScatter(user->dmpgrid, X, Xgen, Xnet));
456: PetscCall(DMCompositeScatter(user->dmpgrid, F, Fgen, Fnet));
458: /* Network current balance residual IG + Y*V + IL = 0. Only YV is added here.
459: The generator current injection, IG, and load current injection, ID are added later
460: */
461: /* Note that the values in Ybus are stored assuming the imaginary current balance
462: equation is ordered first followed by real current balance equation for each bus.
463: Thus imaginary current contribution goes in location 2*i, and
464: real current contribution in 2*i+1
465: */
466: PetscCall(MatMult(user->Ybus, Xnet, Fnet));
468: PetscCall(VecGetArrayRead(Xgen, &xgen));
469: PetscCall(VecGetArrayRead(Xnet, &xnet));
470: PetscCall(VecGetArrayWrite(Fgen, &fgen));
471: PetscCall(VecGetArrayWrite(Fnet, &fnet));
473: /* Generator subsystem */
474: for (i = 0; i < ngen; i++) {
475: Eqp = xgen[idx];
476: Edp = xgen[idx + 1];
477: delta = xgen[idx + 2];
478: w = xgen[idx + 3];
479: Id = xgen[idx + 4];
480: Iq = xgen[idx + 5];
481: Efd = xgen[idx + 6];
482: RF = xgen[idx + 7];
483: VR = xgen[idx + 8];
485: /* Generator differential equations */
486: fgen[idx] = (-Eqp - (Xd[i] - Xdp[i]) * Id + Efd) / Td0p[i];
487: fgen[idx + 1] = (-Edp + (Xq[i] - Xqp[i]) * Iq) / Tq0p[i];
488: fgen[idx + 2] = w - w_s;
489: fgen[idx + 3] = (TM[i] - Edp * Id - Eqp * Iq - (Xqp[i] - Xdp[i]) * Id * Iq - D[i] * (w - w_s)) / M[i];
491: Vr = xnet[2 * gbus[i]]; /* Real part of generator terminal voltage */
492: Vi = xnet[2 * gbus[i] + 1]; /* Imaginary part of the generator terminal voltage */
494: PetscCall(ri2dq(Vr, Vi, delta, &Vd, &Vq));
495: /* Algebraic equations for stator currents */
496: det = Rs[i] * Rs[i] + Xdp[i] * Xqp[i];
498: Zdq_inv[0] = Rs[i] / det;
499: Zdq_inv[1] = Xqp[i] / det;
500: Zdq_inv[2] = -Xdp[i] / det;
501: Zdq_inv[3] = Rs[i] / det;
503: fgen[idx + 4] = Zdq_inv[0] * (-Edp + Vd) + Zdq_inv[1] * (-Eqp + Vq) + Id;
504: fgen[idx + 5] = Zdq_inv[2] * (-Edp + Vd) + Zdq_inv[3] * (-Eqp + Vq) + Iq;
506: /* Add generator current injection to network */
507: PetscCall(dq2ri(Id, Iq, delta, &IGr, &IGi));
509: fnet[2 * gbus[i]] -= IGi;
510: fnet[2 * gbus[i] + 1] -= IGr;
512: Vm = PetscSqrtScalar(Vd * Vd + Vq * Vq);
514: SE = k1[i] * PetscExpScalar(k2[i] * Efd);
516: /* Exciter differential equations */
517: fgen[idx + 6] = (-KE[i] * Efd - SE + VR) / TE[i];
518: fgen[idx + 7] = (-RF + KF[i] * Efd / TF[i]) / TF[i];
519: if (VRatmax[i]) fgen[idx + 8] = VR - VRMAX[i];
520: else if (VRatmin[i]) fgen[idx + 8] = VRMIN[i] - VR;
521: else fgen[idx + 8] = (-VR + KA[i] * RF - KA[i] * KF[i] * Efd / TF[i] + KA[i] * (Vref[i] - Vm)) / TA[i];
523: idx = idx + 9;
524: }
526: PetscCall(VecGetArray(user->V0, &v0));
527: for (i = 0; i < nload; i++) {
528: Vr = xnet[2 * lbus[i]]; /* Real part of load bus voltage */
529: Vi = xnet[2 * lbus[i] + 1]; /* Imaginary part of the load bus voltage */
530: Vm = PetscSqrtScalar(Vr * Vr + Vi * Vi);
531: Vm2 = Vm * Vm;
532: Vm0 = PetscSqrtScalar(v0[2 * lbus[i]] * v0[2 * lbus[i]] + v0[2 * lbus[i] + 1] * v0[2 * lbus[i] + 1]);
533: PD = QD = 0.0;
534: for (k = 0; k < ld_nsegsp[i]; k++) PD += ld_alphap[k] * PD0[i] * PetscPowScalar((Vm / Vm0), ld_betap[k]);
535: for (k = 0; k < ld_nsegsq[i]; k++) QD += ld_alphaq[k] * QD0[i] * PetscPowScalar((Vm / Vm0), ld_betaq[k]);
537: /* Load currents */
538: IDr = (PD * Vr + QD * Vi) / Vm2;
539: IDi = (-QD * Vr + PD * Vi) / Vm2;
541: fnet[2 * lbus[i]] += IDi;
542: fnet[2 * lbus[i] + 1] += IDr;
543: }
544: PetscCall(VecRestoreArray(user->V0, &v0));
546: PetscCall(VecRestoreArrayRead(Xgen, &xgen));
547: PetscCall(VecRestoreArrayRead(Xnet, &xnet));
548: PetscCall(VecRestoreArrayWrite(Fgen, &fgen));
549: PetscCall(VecRestoreArrayWrite(Fnet, &fnet));
551: PetscCall(DMCompositeGather(user->dmpgrid, INSERT_VALUES, F, Fgen, Fnet));
552: PetscCall(DMCompositeRestoreLocalVectors(user->dmpgrid, &Xgen, &Xnet));
553: PetscCall(DMCompositeRestoreLocalVectors(user->dmpgrid, &Fgen, &Fnet));
554: PetscFunctionReturn(PETSC_SUCCESS);
555: }
557: /* f(x,y)
558: g(x,y)
559: */
560: PetscErrorCode RHSFunction(TS ts, PetscReal t, Vec X, Vec F, void *ctx)
561: {
562: Userctx *user = (Userctx *)ctx;
564: PetscFunctionBegin;
565: user->t = t;
566: PetscCall(ResidualFunction(X, F, user));
567: PetscFunctionReturn(PETSC_SUCCESS);
568: }
570: /* f(x,y) - \dot{x}
571: g(x,y) = 0
572: */
573: PetscErrorCode IFunction(TS ts, PetscReal t, Vec X, Vec Xdot, Vec F, void *ctx)
574: {
575: PetscScalar *f;
576: const PetscScalar *xdot;
577: PetscInt i;
579: PetscFunctionBegin;
580: PetscCall(RHSFunction(ts, t, X, F, ctx));
581: PetscCall(VecScale(F, -1.0));
582: PetscCall(VecGetArray(F, &f));
583: PetscCall(VecGetArrayRead(Xdot, &xdot));
584: for (i = 0; i < ngen; i++) {
585: f[9 * i] += xdot[9 * i];
586: f[9 * i + 1] += xdot[9 * i + 1];
587: f[9 * i + 2] += xdot[9 * i + 2];
588: f[9 * i + 3] += xdot[9 * i + 3];
589: f[9 * i + 6] += xdot[9 * i + 6];
590: f[9 * i + 7] += xdot[9 * i + 7];
591: f[9 * i + 8] += xdot[9 * i + 8];
592: }
593: PetscCall(VecRestoreArray(F, &f));
594: PetscCall(VecRestoreArrayRead(Xdot, &xdot));
595: PetscFunctionReturn(PETSC_SUCCESS);
596: }
598: /* This function is used for solving the algebraic system only during fault on and
599: off times. It computes the entire F and then zeros out the part corresponding to
600: differential equations
601: F = [0;g(y)];
602: */
603: PetscErrorCode AlgFunction(SNES snes, Vec X, Vec F, void *ctx)
604: {
605: Userctx *user = (Userctx *)ctx;
606: PetscScalar *f;
607: PetscInt i;
609: PetscFunctionBegin;
610: PetscCall(ResidualFunction(X, F, user));
611: PetscCall(VecGetArray(F, &f));
612: for (i = 0; i < ngen; i++) {
613: f[9 * i] = 0;
614: f[9 * i + 1] = 0;
615: f[9 * i + 2] = 0;
616: f[9 * i + 3] = 0;
617: f[9 * i + 6] = 0;
618: f[9 * i + 7] = 0;
619: f[9 * i + 8] = 0;
620: }
621: PetscCall(VecRestoreArray(F, &f));
622: PetscFunctionReturn(PETSC_SUCCESS);
623: }
625: PetscErrorCode PostStage(TS ts, PetscReal t, PetscInt i, Vec *X)
626: {
627: Userctx *user;
629: PetscFunctionBegin;
630: PetscCall(TSGetApplicationContext(ts, &user));
631: PetscCall(SNESSolve(user->snes_alg, NULL, X[i]));
632: PetscFunctionReturn(PETSC_SUCCESS);
633: }
635: PetscErrorCode PostEvaluate(TS ts)
636: {
637: Userctx *user;
638: Vec X;
640: PetscFunctionBegin;
641: PetscCall(TSGetApplicationContext(ts, &user));
642: PetscCall(TSGetSolution(ts, &X));
643: PetscCall(SNESSolve(user->snes_alg, NULL, X));
644: PetscFunctionReturn(PETSC_SUCCESS);
645: }
647: PetscErrorCode PreallocateJacobian(Mat J, Userctx *user)
648: {
649: PetscInt *d_nnz;
650: PetscInt i, idx = 0, start = 0;
651: PetscInt ncols;
653: PetscFunctionBegin;
654: PetscCall(PetscMalloc1(user->neqs_pgrid, &d_nnz));
655: for (i = 0; i < user->neqs_pgrid; i++) d_nnz[i] = 0;
656: /* Generator subsystem */
657: for (i = 0; i < ngen; i++) {
658: d_nnz[idx] += 3;
659: d_nnz[idx + 1] += 2;
660: d_nnz[idx + 2] += 2;
661: d_nnz[idx + 3] += 5;
662: d_nnz[idx + 4] += 6;
663: d_nnz[idx + 5] += 6;
665: d_nnz[user->neqs_gen + 2 * gbus[i]] += 3;
666: d_nnz[user->neqs_gen + 2 * gbus[i] + 1] += 3;
668: d_nnz[idx + 6] += 2;
669: d_nnz[idx + 7] += 2;
670: d_nnz[idx + 8] += 5;
672: idx = idx + 9;
673: }
674: start = user->neqs_gen;
676: for (i = 0; i < nbus; i++) {
677: PetscCall(MatGetRow(user->Ybus, 2 * i, &ncols, NULL, NULL));
678: d_nnz[start + 2 * i] += ncols;
679: d_nnz[start + 2 * i + 1] += ncols;
680: PetscCall(MatRestoreRow(user->Ybus, 2 * i, &ncols, NULL, NULL));
681: }
682: PetscCall(MatSeqAIJSetPreallocation(J, 0, d_nnz));
683: PetscCall(PetscFree(d_nnz));
684: PetscFunctionReturn(PETSC_SUCCESS);
685: }
687: /*
688: J = [df_dx, df_dy
689: dg_dx, dg_dy]
690: */
691: PetscErrorCode ResidualJacobian(Vec X, Mat J, Mat B, void *ctx)
692: {
693: Userctx *user = (Userctx *)ctx;
694: Vec Xgen, Xnet;
695: const PetscScalar *xgen, *xnet;
696: PetscInt i, idx = 0;
697: PetscScalar Vr, Vi, Vm, Vm2;
698: PetscScalar Eqp, Edp, delta; /* Generator variables */
699: PetscScalar Efd;
700: PetscScalar Id, Iq; /* Generator dq axis currents */
701: PetscScalar Vd, Vq;
702: PetscScalar val[10];
703: PetscInt row[2], col[10];
704: PetscInt net_start = user->neqs_gen;
705: PetscScalar Zdq_inv[4], det;
706: PetscScalar dVd_dVr, dVd_dVi, dVq_dVr, dVq_dVi, dVd_ddelta, dVq_ddelta;
707: PetscScalar dIGr_ddelta, dIGi_ddelta, dIGr_dId, dIGr_dIq, dIGi_dId, dIGi_dIq;
708: PetscScalar dSE_dEfd;
709: PetscScalar dVm_dVd, dVm_dVq, dVm_dVr, dVm_dVi;
710: PetscInt ncols;
711: const PetscInt *cols;
712: const PetscScalar *yvals;
713: PetscInt k;
714: PetscScalar PD, QD, Vm0, Vm4;
715: const PetscScalar *v0;
716: PetscScalar dPD_dVr, dPD_dVi, dQD_dVr, dQD_dVi;
717: PetscScalar dIDr_dVr, dIDr_dVi, dIDi_dVr, dIDi_dVi;
719: PetscFunctionBegin;
720: PetscCall(MatZeroEntries(B));
721: PetscCall(DMCompositeGetLocalVectors(user->dmpgrid, &Xgen, &Xnet));
722: PetscCall(DMCompositeScatter(user->dmpgrid, X, Xgen, Xnet));
724: PetscCall(VecGetArrayRead(Xgen, &xgen));
725: PetscCall(VecGetArrayRead(Xnet, &xnet));
727: /* Generator subsystem */
728: for (i = 0; i < ngen; i++) {
729: Eqp = xgen[idx];
730: Edp = xgen[idx + 1];
731: delta = xgen[idx + 2];
732: Id = xgen[idx + 4];
733: Iq = xgen[idx + 5];
734: Efd = xgen[idx + 6];
736: /* fgen[idx] = (-Eqp - (Xd[i] - Xdp[i])*Id + Efd)/Td0p[i]; */
737: row[0] = idx;
738: col[0] = idx;
739: col[1] = idx + 4;
740: col[2] = idx + 6;
741: val[0] = -1 / Td0p[i];
742: val[1] = -(Xd[i] - Xdp[i]) / Td0p[i];
743: val[2] = 1 / Td0p[i];
745: PetscCall(MatSetValues(J, 1, row, 3, col, val, INSERT_VALUES));
747: /* fgen[idx+1] = (-Edp + (Xq[i] - Xqp[i])*Iq)/Tq0p[i]; */
748: row[0] = idx + 1;
749: col[0] = idx + 1;
750: col[1] = idx + 5;
751: val[0] = -1 / Tq0p[i];
752: val[1] = (Xq[i] - Xqp[i]) / Tq0p[i];
753: PetscCall(MatSetValues(J, 1, row, 2, col, val, INSERT_VALUES));
755: /* fgen[idx+2] = w - w_s; */
756: row[0] = idx + 2;
757: col[0] = idx + 2;
758: col[1] = idx + 3;
759: val[0] = 0;
760: val[1] = 1;
761: PetscCall(MatSetValues(J, 1, row, 2, col, val, INSERT_VALUES));
763: /* fgen[idx+3] = (TM[i] - Edp*Id - Eqp*Iq - (Xqp[i] - Xdp[i])*Id*Iq - D[i]*(w - w_s))/M[i]; */
764: row[0] = idx + 3;
765: col[0] = idx;
766: col[1] = idx + 1;
767: col[2] = idx + 3;
768: col[3] = idx + 4;
769: col[4] = idx + 5;
770: val[0] = -Iq / M[i];
771: val[1] = -Id / M[i];
772: val[2] = -D[i] / M[i];
773: val[3] = (-Edp - (Xqp[i] - Xdp[i]) * Iq) / M[i];
774: val[4] = (-Eqp - (Xqp[i] - Xdp[i]) * Id) / M[i];
775: PetscCall(MatSetValues(J, 1, row, 5, col, val, INSERT_VALUES));
777: Vr = xnet[2 * gbus[i]]; /* Real part of generator terminal voltage */
778: Vi = xnet[2 * gbus[i] + 1]; /* Imaginary part of the generator terminal voltage */
779: PetscCall(ri2dq(Vr, Vi, delta, &Vd, &Vq));
781: det = Rs[i] * Rs[i] + Xdp[i] * Xqp[i];
783: Zdq_inv[0] = Rs[i] / det;
784: Zdq_inv[1] = Xqp[i] / det;
785: Zdq_inv[2] = -Xdp[i] / det;
786: Zdq_inv[3] = Rs[i] / det;
788: dVd_dVr = PetscSinScalar(delta);
789: dVd_dVi = -PetscCosScalar(delta);
790: dVq_dVr = PetscCosScalar(delta);
791: dVq_dVi = PetscSinScalar(delta);
792: dVd_ddelta = Vr * PetscCosScalar(delta) + Vi * PetscSinScalar(delta);
793: dVq_ddelta = -Vr * PetscSinScalar(delta) + Vi * PetscCosScalar(delta);
795: /* fgen[idx+4] = Zdq_inv[0]*(-Edp + Vd) + Zdq_inv[1]*(-Eqp + Vq) + Id; */
796: row[0] = idx + 4;
797: col[0] = idx;
798: col[1] = idx + 1;
799: col[2] = idx + 2;
800: val[0] = -Zdq_inv[1];
801: val[1] = -Zdq_inv[0];
802: val[2] = Zdq_inv[0] * dVd_ddelta + Zdq_inv[1] * dVq_ddelta;
803: col[3] = idx + 4;
804: col[4] = net_start + 2 * gbus[i];
805: col[5] = net_start + 2 * gbus[i] + 1;
806: val[3] = 1;
807: val[4] = Zdq_inv[0] * dVd_dVr + Zdq_inv[1] * dVq_dVr;
808: val[5] = Zdq_inv[0] * dVd_dVi + Zdq_inv[1] * dVq_dVi;
809: PetscCall(MatSetValues(J, 1, row, 6, col, val, INSERT_VALUES));
811: /* fgen[idx+5] = Zdq_inv[2]*(-Edp + Vd) + Zdq_inv[3]*(-Eqp + Vq) + Iq; */
812: row[0] = idx + 5;
813: col[0] = idx;
814: col[1] = idx + 1;
815: col[2] = idx + 2;
816: val[0] = -Zdq_inv[3];
817: val[1] = -Zdq_inv[2];
818: val[2] = Zdq_inv[2] * dVd_ddelta + Zdq_inv[3] * dVq_ddelta;
819: col[3] = idx + 5;
820: col[4] = net_start + 2 * gbus[i];
821: col[5] = net_start + 2 * gbus[i] + 1;
822: val[3] = 1;
823: val[4] = Zdq_inv[2] * dVd_dVr + Zdq_inv[3] * dVq_dVr;
824: val[5] = Zdq_inv[2] * dVd_dVi + Zdq_inv[3] * dVq_dVi;
825: PetscCall(MatSetValues(J, 1, row, 6, col, val, INSERT_VALUES));
827: dIGr_ddelta = Id * PetscCosScalar(delta) - Iq * PetscSinScalar(delta);
828: dIGi_ddelta = Id * PetscSinScalar(delta) + Iq * PetscCosScalar(delta);
829: dIGr_dId = PetscSinScalar(delta);
830: dIGr_dIq = PetscCosScalar(delta);
831: dIGi_dId = -PetscCosScalar(delta);
832: dIGi_dIq = PetscSinScalar(delta);
834: /* fnet[2*gbus[i]] -= IGi; */
835: row[0] = net_start + 2 * gbus[i];
836: col[0] = idx + 2;
837: col[1] = idx + 4;
838: col[2] = idx + 5;
839: val[0] = -dIGi_ddelta;
840: val[1] = -dIGi_dId;
841: val[2] = -dIGi_dIq;
842: PetscCall(MatSetValues(J, 1, row, 3, col, val, INSERT_VALUES));
844: /* fnet[2*gbus[i]+1] -= IGr; */
845: row[0] = net_start + 2 * gbus[i] + 1;
846: col[0] = idx + 2;
847: col[1] = idx + 4;
848: col[2] = idx + 5;
849: val[0] = -dIGr_ddelta;
850: val[1] = -dIGr_dId;
851: val[2] = -dIGr_dIq;
852: PetscCall(MatSetValues(J, 1, row, 3, col, val, INSERT_VALUES));
854: Vm = PetscSqrtScalar(Vd * Vd + Vq * Vq);
856: /* fgen[idx+6] = (-KE[i]*Efd - SE + VR)/TE[i]; */
857: /* SE = k1[i]*PetscExpScalar(k2[i]*Efd); */
859: dSE_dEfd = k1[i] * k2[i] * PetscExpScalar(k2[i] * Efd);
861: row[0] = idx + 6;
862: col[0] = idx + 6;
863: col[1] = idx + 8;
864: val[0] = (-KE[i] - dSE_dEfd) / TE[i];
865: val[1] = 1 / TE[i];
866: PetscCall(MatSetValues(J, 1, row, 2, col, val, INSERT_VALUES));
868: /* Exciter differential equations */
870: /* fgen[idx+7] = (-RF + KF[i]*Efd/TF[i])/TF[i]; */
871: row[0] = idx + 7;
872: col[0] = idx + 6;
873: col[1] = idx + 7;
874: val[0] = (KF[i] / TF[i]) / TF[i];
875: val[1] = -1 / TF[i];
876: PetscCall(MatSetValues(J, 1, row, 2, col, val, INSERT_VALUES));
878: /* fgen[idx+8] = (-VR + KA[i]*RF - KA[i]*KF[i]*Efd/TF[i] + KA[i]*(Vref[i] - Vm))/TA[i]; */
879: /* Vm = (Vd^2 + Vq^2)^0.5; */
881: row[0] = idx + 8;
882: if (VRatmax[i]) {
883: col[0] = idx + 8;
884: val[0] = 1.0;
885: PetscCall(MatSetValues(J, 1, row, 1, col, val, INSERT_VALUES));
886: } else if (VRatmin[i]) {
887: col[0] = idx + 8;
888: val[0] = -1.0;
889: PetscCall(MatSetValues(J, 1, row, 1, col, val, INSERT_VALUES));
890: } else {
891: dVm_dVd = Vd / Vm;
892: dVm_dVq = Vq / Vm;
893: dVm_dVr = dVm_dVd * dVd_dVr + dVm_dVq * dVq_dVr;
894: dVm_dVi = dVm_dVd * dVd_dVi + dVm_dVq * dVq_dVi;
895: row[0] = idx + 8;
896: col[0] = idx + 6;
897: col[1] = idx + 7;
898: col[2] = idx + 8;
899: val[0] = -(KA[i] * KF[i] / TF[i]) / TA[i];
900: val[1] = KA[i] / TA[i];
901: val[2] = -1 / TA[i];
902: col[3] = net_start + 2 * gbus[i];
903: col[4] = net_start + 2 * gbus[i] + 1;
904: val[3] = -KA[i] * dVm_dVr / TA[i];
905: val[4] = -KA[i] * dVm_dVi / TA[i];
906: PetscCall(MatSetValues(J, 1, row, 5, col, val, INSERT_VALUES));
907: }
908: idx = idx + 9;
909: }
911: for (i = 0; i < nbus; i++) {
912: PetscCall(MatGetRow(user->Ybus, 2 * i, &ncols, &cols, &yvals));
913: row[0] = net_start + 2 * i;
914: for (k = 0; k < ncols; k++) {
915: col[k] = net_start + cols[k];
916: val[k] = yvals[k];
917: }
918: PetscCall(MatSetValues(J, 1, row, ncols, col, val, INSERT_VALUES));
919: PetscCall(MatRestoreRow(user->Ybus, 2 * i, &ncols, &cols, &yvals));
921: PetscCall(MatGetRow(user->Ybus, 2 * i + 1, &ncols, &cols, &yvals));
922: row[0] = net_start + 2 * i + 1;
923: for (k = 0; k < ncols; k++) {
924: col[k] = net_start + cols[k];
925: val[k] = yvals[k];
926: }
927: PetscCall(MatSetValues(J, 1, row, ncols, col, val, INSERT_VALUES));
928: PetscCall(MatRestoreRow(user->Ybus, 2 * i + 1, &ncols, &cols, &yvals));
929: }
931: PetscCall(MatAssemblyBegin(J, MAT_FLUSH_ASSEMBLY));
932: PetscCall(MatAssemblyEnd(J, MAT_FLUSH_ASSEMBLY));
934: PetscCall(VecGetArrayRead(user->V0, &v0));
935: for (i = 0; i < nload; i++) {
936: Vr = xnet[2 * lbus[i]]; /* Real part of load bus voltage */
937: Vi = xnet[2 * lbus[i] + 1]; /* Imaginary part of the load bus voltage */
938: Vm = PetscSqrtScalar(Vr * Vr + Vi * Vi);
939: Vm2 = Vm * Vm;
940: Vm4 = Vm2 * Vm2;
941: Vm0 = PetscSqrtScalar(v0[2 * lbus[i]] * v0[2 * lbus[i]] + v0[2 * lbus[i] + 1] * v0[2 * lbus[i] + 1]);
942: PD = QD = 0.0;
943: dPD_dVr = dPD_dVi = dQD_dVr = dQD_dVi = 0.0;
944: for (k = 0; k < ld_nsegsp[i]; k++) {
945: PD += ld_alphap[k] * PD0[i] * PetscPowScalar((Vm / Vm0), ld_betap[k]);
946: dPD_dVr += ld_alphap[k] * ld_betap[k] * PD0[i] * PetscPowScalar((1 / Vm0), ld_betap[k]) * Vr * PetscPowScalar(Vm, (ld_betap[k] - 2));
947: dPD_dVi += ld_alphap[k] * ld_betap[k] * PD0[i] * PetscPowScalar((1 / Vm0), ld_betap[k]) * Vi * PetscPowScalar(Vm, (ld_betap[k] - 2));
948: }
949: for (k = 0; k < ld_nsegsq[i]; k++) {
950: QD += ld_alphaq[k] * QD0[i] * PetscPowScalar((Vm / Vm0), ld_betaq[k]);
951: dQD_dVr += ld_alphaq[k] * ld_betaq[k] * QD0[i] * PetscPowScalar((1 / Vm0), ld_betaq[k]) * Vr * PetscPowScalar(Vm, (ld_betaq[k] - 2));
952: dQD_dVi += ld_alphaq[k] * ld_betaq[k] * QD0[i] * PetscPowScalar((1 / Vm0), ld_betaq[k]) * Vi * PetscPowScalar(Vm, (ld_betaq[k] - 2));
953: }
955: /* IDr = (PD*Vr + QD*Vi)/Vm2; */
956: /* IDi = (-QD*Vr + PD*Vi)/Vm2; */
958: dIDr_dVr = (dPD_dVr * Vr + dQD_dVr * Vi + PD) / Vm2 - ((PD * Vr + QD * Vi) * 2 * Vr) / Vm4;
959: dIDr_dVi = (dPD_dVi * Vr + dQD_dVi * Vi + QD) / Vm2 - ((PD * Vr + QD * Vi) * 2 * Vi) / Vm4;
961: dIDi_dVr = (-dQD_dVr * Vr + dPD_dVr * Vi - QD) / Vm2 - ((-QD * Vr + PD * Vi) * 2 * Vr) / Vm4;
962: dIDi_dVi = (-dQD_dVi * Vr + dPD_dVi * Vi + PD) / Vm2 - ((-QD * Vr + PD * Vi) * 2 * Vi) / Vm4;
964: /* fnet[2*lbus[i]] += IDi; */
965: row[0] = net_start + 2 * lbus[i];
966: col[0] = net_start + 2 * lbus[i];
967: col[1] = net_start + 2 * lbus[i] + 1;
968: val[0] = dIDi_dVr;
969: val[1] = dIDi_dVi;
970: PetscCall(MatSetValues(J, 1, row, 2, col, val, ADD_VALUES));
971: /* fnet[2*lbus[i]+1] += IDr; */
972: row[0] = net_start + 2 * lbus[i] + 1;
973: col[0] = net_start + 2 * lbus[i];
974: col[1] = net_start + 2 * lbus[i] + 1;
975: val[0] = dIDr_dVr;
976: val[1] = dIDr_dVi;
977: PetscCall(MatSetValues(J, 1, row, 2, col, val, ADD_VALUES));
978: }
979: PetscCall(VecRestoreArrayRead(user->V0, &v0));
981: PetscCall(VecRestoreArrayRead(Xgen, &xgen));
982: PetscCall(VecRestoreArrayRead(Xnet, &xnet));
984: PetscCall(DMCompositeRestoreLocalVectors(user->dmpgrid, &Xgen, &Xnet));
986: PetscCall(MatAssemblyBegin(J, MAT_FINAL_ASSEMBLY));
987: PetscCall(MatAssemblyEnd(J, MAT_FINAL_ASSEMBLY));
988: PetscFunctionReturn(PETSC_SUCCESS);
989: }
991: /*
992: J = [I, 0
993: dg_dx, dg_dy]
994: */
995: PetscErrorCode AlgJacobian(SNES snes, Vec X, Mat A, Mat B, void *ctx)
996: {
997: Userctx *user = (Userctx *)ctx;
999: PetscFunctionBegin;
1000: PetscCall(ResidualJacobian(X, A, B, ctx));
1001: PetscCall(MatSetOption(A, MAT_KEEP_NONZERO_PATTERN, PETSC_TRUE));
1002: PetscCall(MatZeroRowsIS(A, user->is_diff, 1.0, NULL, NULL));
1003: PetscFunctionReturn(PETSC_SUCCESS);
1004: }
1006: /*
1007: J = [-df_dx, -df_dy
1008: dg_dx, dg_dy]
1009: */
1011: PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec X, Mat A, Mat B, void *ctx)
1012: {
1013: Userctx *user = (Userctx *)ctx;
1015: PetscFunctionBegin;
1016: user->t = t;
1018: PetscCall(ResidualJacobian(X, A, B, user));
1020: PetscFunctionReturn(PETSC_SUCCESS);
1021: }
1023: /*
1024: J = [df_dx-aI, df_dy
1025: dg_dx, dg_dy]
1026: */
1028: PetscErrorCode IJacobian(TS ts, PetscReal t, Vec X, Vec Xdot, PetscReal a, Mat A, Mat B, Userctx *user)
1029: {
1030: PetscScalar atmp = (PetscScalar)a;
1031: PetscInt i, row;
1033: PetscFunctionBegin;
1034: user->t = t;
1036: PetscCall(RHSJacobian(ts, t, X, A, B, user));
1037: PetscCall(MatScale(B, -1.0));
1038: for (i = 0; i < ngen; i++) {
1039: row = 9 * i;
1040: PetscCall(MatSetValues(A, 1, &row, 1, &row, &atmp, ADD_VALUES));
1041: row = 9 * i + 1;
1042: PetscCall(MatSetValues(A, 1, &row, 1, &row, &atmp, ADD_VALUES));
1043: row = 9 * i + 2;
1044: PetscCall(MatSetValues(A, 1, &row, 1, &row, &atmp, ADD_VALUES));
1045: row = 9 * i + 3;
1046: PetscCall(MatSetValues(A, 1, &row, 1, &row, &atmp, ADD_VALUES));
1047: row = 9 * i + 6;
1048: PetscCall(MatSetValues(A, 1, &row, 1, &row, &atmp, ADD_VALUES));
1049: row = 9 * i + 7;
1050: PetscCall(MatSetValues(A, 1, &row, 1, &row, &atmp, ADD_VALUES));
1051: row = 9 * i + 8;
1052: PetscCall(MatSetValues(A, 1, &row, 1, &row, &atmp, ADD_VALUES));
1053: }
1054: PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
1055: PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
1056: PetscFunctionReturn(PETSC_SUCCESS);
1057: }
1059: int main(int argc, char **argv)
1060: {
1061: TS ts;
1062: SNES snes_alg;
1063: PetscMPIInt size;
1064: Userctx user;
1065: PetscViewer Xview, Ybusview, viewer;
1066: Vec X, F_alg;
1067: Mat J, A;
1068: PetscInt i, idx, *idx2;
1069: Vec Xdot;
1070: PetscScalar *x, *mat, *amat;
1071: const PetscScalar *rmat;
1072: Vec vatol;
1073: PetscInt *direction;
1074: PetscBool *terminate;
1075: const PetscInt *idx3;
1076: PetscScalar *vatoli;
1077: PetscInt k;
1079: PetscFunctionBeginUser;
1080: PetscCall(PetscInitialize(&argc, &argv, "petscoptions", help));
1081: PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
1082: PetscCheck(size == 1, PETSC_COMM_WORLD, PETSC_ERR_WRONG_MPI_SIZE, "Only for sequential runs");
1084: user.neqs_gen = 9 * ngen; /* # eqs. for generator subsystem */
1085: user.neqs_net = 2 * nbus; /* # eqs. for network subsystem */
1086: user.neqs_pgrid = user.neqs_gen + user.neqs_net;
1088: /* Create indices for differential and algebraic equations */
1090: PetscCall(PetscMalloc1(7 * ngen, &idx2));
1091: for (i = 0; i < ngen; i++) {
1092: idx2[7 * i] = 9 * i;
1093: idx2[7 * i + 1] = 9 * i + 1;
1094: idx2[7 * i + 2] = 9 * i + 2;
1095: idx2[7 * i + 3] = 9 * i + 3;
1096: idx2[7 * i + 4] = 9 * i + 6;
1097: idx2[7 * i + 5] = 9 * i + 7;
1098: idx2[7 * i + 6] = 9 * i + 8;
1099: }
1100: PetscCall(ISCreateGeneral(PETSC_COMM_WORLD, 7 * ngen, idx2, PETSC_COPY_VALUES, &user.is_diff));
1101: PetscCall(ISComplement(user.is_diff, 0, user.neqs_pgrid, &user.is_alg));
1102: PetscCall(PetscFree(idx2));
1104: /* Read initial voltage vector and Ybus */
1105: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, "X.bin", FILE_MODE_READ, &Xview));
1106: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, "Ybus.bin", FILE_MODE_READ, &Ybusview));
1108: PetscCall(VecCreate(PETSC_COMM_WORLD, &user.V0));
1109: PetscCall(VecSetSizes(user.V0, PETSC_DECIDE, user.neqs_net));
1110: PetscCall(VecLoad(user.V0, Xview));
1112: PetscCall(MatCreate(PETSC_COMM_WORLD, &user.Ybus));
1113: PetscCall(MatSetSizes(user.Ybus, PETSC_DECIDE, PETSC_DECIDE, user.neqs_net, user.neqs_net));
1114: PetscCall(MatSetType(user.Ybus, MATBAIJ));
1115: /* PetscCall(MatSetBlockSize(user.Ybus,2)); */
1116: PetscCall(MatLoad(user.Ybus, Ybusview));
1118: /* Set run time options */
1119: PetscOptionsBegin(PETSC_COMM_WORLD, NULL, "Transient stability fault options", "");
1120: {
1121: user.tfaulton = 1.0;
1122: user.tfaultoff = 1.2;
1123: user.Rfault = 0.0001;
1124: user.setisdiff = PETSC_FALSE;
1125: user.semiexplicit = PETSC_FALSE;
1126: user.faultbus = 8;
1127: PetscCall(PetscOptionsReal("-tfaulton", "", "", user.tfaulton, &user.tfaulton, NULL));
1128: PetscCall(PetscOptionsReal("-tfaultoff", "", "", user.tfaultoff, &user.tfaultoff, NULL));
1129: PetscCall(PetscOptionsInt("-faultbus", "", "", user.faultbus, &user.faultbus, NULL));
1130: user.t0 = 0.0;
1131: user.tmax = 5.0;
1132: PetscCall(PetscOptionsReal("-t0", "", "", user.t0, &user.t0, NULL));
1133: PetscCall(PetscOptionsReal("-tmax", "", "", user.tmax, &user.tmax, NULL));
1134: PetscCall(PetscOptionsBool("-setisdiff", "", "", user.setisdiff, &user.setisdiff, NULL));
1135: PetscCall(PetscOptionsBool("-dae_semiexplicit", "", "", user.semiexplicit, &user.semiexplicit, NULL));
1136: }
1137: PetscOptionsEnd();
1139: PetscCall(PetscViewerDestroy(&Xview));
1140: PetscCall(PetscViewerDestroy(&Ybusview));
1142: /* Create DMs for generator and network subsystems */
1143: PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, user.neqs_gen, 1, 1, NULL, &user.dmgen));
1144: PetscCall(DMSetOptionsPrefix(user.dmgen, "dmgen_"));
1145: PetscCall(DMSetFromOptions(user.dmgen));
1146: PetscCall(DMSetUp(user.dmgen));
1147: PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, user.neqs_net, 1, 1, NULL, &user.dmnet));
1148: PetscCall(DMSetOptionsPrefix(user.dmnet, "dmnet_"));
1149: PetscCall(DMSetFromOptions(user.dmnet));
1150: PetscCall(DMSetUp(user.dmnet));
1151: /* Create a composite DM packer and add the two DMs */
1152: PetscCall(DMCompositeCreate(PETSC_COMM_WORLD, &user.dmpgrid));
1153: PetscCall(DMSetOptionsPrefix(user.dmpgrid, "pgrid_"));
1154: PetscCall(DMCompositeAddDM(user.dmpgrid, user.dmgen));
1155: PetscCall(DMCompositeAddDM(user.dmpgrid, user.dmnet));
1157: PetscCall(DMCreateGlobalVector(user.dmpgrid, &X));
1159: PetscCall(MatCreate(PETSC_COMM_WORLD, &J));
1160: PetscCall(MatSetSizes(J, PETSC_DECIDE, PETSC_DECIDE, user.neqs_pgrid, user.neqs_pgrid));
1161: PetscCall(MatSetFromOptions(J));
1162: PetscCall(PreallocateJacobian(J, &user));
1164: /* Create matrix to save solutions at each time step */
1165: user.stepnum = 0;
1167: PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, user.neqs_pgrid + 1, 1002, NULL, &user.Sol));
1168: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
1169: Create timestepping solver context
1170: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
1171: PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
1172: PetscCall(TSSetProblemType(ts, TS_NONLINEAR));
1173: if (user.semiexplicit) {
1174: PetscCall(TSSetType(ts, TSRK));
1175: PetscCall(TSSetRHSFunction(ts, NULL, RHSFunction, &user));
1176: PetscCall(TSSetRHSJacobian(ts, J, J, RHSJacobian, &user));
1177: } else {
1178: PetscCall(TSSetType(ts, TSCN));
1179: PetscCall(TSSetEquationType(ts, TS_EQ_DAE_IMPLICIT_INDEX1));
1180: PetscCall(TSSetIFunction(ts, NULL, (TSIFunction)IFunction, &user));
1181: PetscCall(TSSetIJacobian(ts, J, J, (TSIJacobian)IJacobian, &user));
1182: }
1183: PetscCall(TSSetApplicationContext(ts, &user));
1185: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
1186: Set initial conditions
1187: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
1188: PetscCall(SetInitialGuess(X, &user));
1189: /* Just to set up the Jacobian structure */
1191: PetscCall(VecDuplicate(X, &Xdot));
1192: PetscCall(IJacobian(ts, 0.0, X, Xdot, 0.0, J, J, &user));
1193: PetscCall(VecDestroy(&Xdot));
1195: /* Save initial solution */
1197: idx = user.stepnum * (user.neqs_pgrid + 1);
1198: PetscCall(MatDenseGetArray(user.Sol, &mat));
1199: PetscCall(VecGetArray(X, &x));
1201: mat[idx] = 0.0;
1203: PetscCall(PetscArraycpy(mat + idx + 1, x, user.neqs_pgrid));
1204: PetscCall(MatDenseRestoreArray(user.Sol, &mat));
1205: PetscCall(VecRestoreArray(X, &x));
1206: user.stepnum++;
1208: PetscCall(TSSetMaxTime(ts, user.tmax));
1209: PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_MATCHSTEP));
1210: PetscCall(TSSetTimeStep(ts, 0.01));
1211: PetscCall(TSSetFromOptions(ts));
1212: PetscCall(TSSetPostStep(ts, SaveSolution));
1213: PetscCall(TSSetSolution(ts, X));
1215: PetscCall(PetscMalloc1((2 * ngen + 2), &direction));
1216: PetscCall(PetscMalloc1((2 * ngen + 2), &terminate));
1217: direction[0] = direction[1] = 1;
1218: terminate[0] = terminate[1] = PETSC_FALSE;
1219: for (i = 0; i < ngen; i++) {
1220: direction[2 + 2 * i] = -1;
1221: direction[2 + 2 * i + 1] = 1;
1222: terminate[2 + 2 * i] = terminate[2 + 2 * i + 1] = PETSC_FALSE;
1223: }
1225: PetscCall(TSSetEventHandler(ts, 2 * ngen + 2, direction, terminate, EventFunction, PostEventFunction, (void *)&user));
1227: if (user.semiexplicit) {
1228: /* Use a semi-explicit approach with the time-stepping done by an explicit method and the
1229: algrebraic part solved via PostStage and PostEvaluate callbacks
1230: */
1231: PetscCall(TSSetType(ts, TSRK));
1232: PetscCall(TSSetPostStage(ts, PostStage));
1233: PetscCall(TSSetPostEvaluate(ts, PostEvaluate));
1234: }
1236: if (user.setisdiff) {
1237: /* Create vector of absolute tolerances and set the algebraic part to infinity */
1238: PetscCall(VecDuplicate(X, &vatol));
1239: PetscCall(VecSet(vatol, 100000.0));
1240: PetscCall(VecGetArray(vatol, &vatoli));
1241: PetscCall(ISGetIndices(user.is_diff, &idx3));
1242: for (k = 0; k < 7 * ngen; k++) vatoli[idx3[k]] = 1e-2;
1243: PetscCall(VecRestoreArray(vatol, &vatoli));
1244: }
1246: /* Create the nonlinear solver for solving the algebraic system */
1247: /* Note that although the algebraic system needs to be solved only for
1248: Idq and V, we reuse the entire system including xgen. The xgen
1249: variables are held constant by setting their residuals to 0 and
1250: putting a 1 on the Jacobian diagonal for xgen rows
1251: */
1253: PetscCall(VecDuplicate(X, &F_alg));
1254: PetscCall(SNESCreate(PETSC_COMM_WORLD, &snes_alg));
1255: PetscCall(SNESSetFunction(snes_alg, F_alg, AlgFunction, &user));
1256: PetscCall(SNESSetJacobian(snes_alg, J, J, AlgJacobian, &user));
1257: PetscCall(SNESSetFromOptions(snes_alg));
1259: user.snes_alg = snes_alg;
1261: /* Solve */
1262: PetscCall(TSSolve(ts, X));
1264: PetscCall(MatAssemblyBegin(user.Sol, MAT_FINAL_ASSEMBLY));
1265: PetscCall(MatAssemblyEnd(user.Sol, MAT_FINAL_ASSEMBLY));
1267: PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, user.neqs_pgrid + 1, user.stepnum, NULL, &A));
1268: PetscCall(MatDenseGetArrayRead(user.Sol, &rmat));
1269: PetscCall(MatDenseGetArray(A, &amat));
1270: PetscCall(PetscArraycpy(amat, rmat, user.stepnum * (user.neqs_pgrid + 1)));
1271: PetscCall(MatDenseRestoreArray(A, &amat));
1272: PetscCall(MatDenseRestoreArrayRead(user.Sol, &rmat));
1273: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_SELF, "out.bin", FILE_MODE_WRITE, &viewer));
1274: PetscCall(MatView(A, viewer));
1275: PetscCall(PetscViewerDestroy(&viewer));
1276: PetscCall(MatDestroy(&A));
1278: PetscCall(PetscFree(direction));
1279: PetscCall(PetscFree(terminate));
1280: PetscCall(SNESDestroy(&snes_alg));
1281: PetscCall(VecDestroy(&F_alg));
1282: PetscCall(MatDestroy(&J));
1283: PetscCall(MatDestroy(&user.Ybus));
1284: PetscCall(MatDestroy(&user.Sol));
1285: PetscCall(VecDestroy(&X));
1286: PetscCall(VecDestroy(&user.V0));
1287: PetscCall(DMDestroy(&user.dmgen));
1288: PetscCall(DMDestroy(&user.dmnet));
1289: PetscCall(DMDestroy(&user.dmpgrid));
1290: PetscCall(ISDestroy(&user.is_diff));
1291: PetscCall(ISDestroy(&user.is_alg));
1292: PetscCall(TSDestroy(&ts));
1293: if (user.setisdiff) PetscCall(VecDestroy(&vatol));
1294: PetscCall(PetscFinalize());
1295: return 0;
1296: }
1298: /*TEST
1300: build:
1301: requires: double !complex !defined(PETSC_USE_64BIT_INDICES)
1303: test:
1304: suffix: implicit
1305: args: -ts_monitor -snes_monitor_short
1306: localrunfiles: petscoptions X.bin Ybus.bin
1308: test:
1309: suffix: semiexplicit
1310: args: -ts_monitor -snes_monitor_short -dae_semiexplicit -ts_rk_type 2a
1311: localrunfiles: petscoptions X.bin Ybus.bin
1313: test:
1314: suffix: steprestart
1315: # needs ARKIMEX methods with all implicit stages since the mass matrix is not the identity
1316: args: -ts_monitor -snes_monitor_short -ts_type arkimex -ts_arkimex_type prssp2
1317: localrunfiles: petscoptions X.bin Ybus.bin
1319: TEST*/