Actual source code: asils.c
1: #include <../src/tao/complementarity/impls/ssls/ssls.h>
2: /*
3: Context for ASXLS
4: -- active-set - reduced matrices formed
5: - inherit properties of original system
6: -- semismooth (S) - function not differentiable
7: - merit function continuously differentiable
8: - Fischer-Burmeister reformulation of complementarity
9: - Billups composition for two finite bounds
10: -- infeasible (I) - iterates not guaranteed to remain within bounds
11: -- feasible (F) - iterates guaranteed to remain within bounds
12: -- linesearch (LS) - Armijo rule on direction
14: Many other reformulations are possible and combinations of
15: feasible/infeasible and linesearch/trust region are possible.
17: Basic theory
18: Fischer-Burmeister reformulation is semismooth with a continuously
19: differentiable merit function and strongly semismooth if the F has
20: lipschitz continuous derivatives.
22: Every accumulation point generated by the algorithm is a stationary
23: point for the merit function. Stationary points of the merit function
24: are solutions of the complementarity problem if
25: a. the stationary point has a BD-regular subdifferential, or
26: b. the Schur complement F'/F'_ff is a P_0-matrix where ff is the
27: index set corresponding to the free variables.
29: If one of the accumulation points has a BD-regular subdifferential then
30: a. the entire sequence converges to this accumulation point at
31: a local q-superlinear rate
32: b. if in addition the reformulation is strongly semismooth near
33: this accumulation point, then the algorithm converges at a
34: local q-quadratic rate.
36: The theory for the feasible version follows from the feasible descent
37: algorithm framework.
39: References:
40: + * - Billups, "Algorithms for Complementarity Problems and Generalized
41: Equations," Ph.D thesis, University of Wisconsin Madison, 1995.
42: . * - De Luca, Facchinei, Kanzow, "A Semismooth Equation Approach to the
43: Solution of Nonlinear Complementarity Problems," Mathematical
44: Programming, 75, 1996.
45: . * - Ferris, Kanzow, Munson, "Feasible Descent Algorithms for Mixed
46: Complementarity Problems," Mathematical Programming, 86,
47: 1999.
48: . * - Fischer, "A Special Newton type Optimization Method," Optimization,
49: 24, 1992
50: - * - Munson, Facchinei, Ferris, Fischer, Kanzow, "The Semismooth Algorithm
51: for Large Scale Complementarity Problems," Technical Report,
52: University of Wisconsin Madison, 1999.
53: */
55: static PetscErrorCode TaoSetUp_ASILS(Tao tao)
56: {
57: TAO_SSLS *asls = (TAO_SSLS *)tao->data;
59: PetscFunctionBegin;
60: PetscCall(VecDuplicate(tao->solution, &tao->gradient));
61: PetscCall(VecDuplicate(tao->solution, &tao->stepdirection));
62: PetscCall(VecDuplicate(tao->solution, &asls->ff));
63: PetscCall(VecDuplicate(tao->solution, &asls->dpsi));
64: PetscCall(VecDuplicate(tao->solution, &asls->da));
65: PetscCall(VecDuplicate(tao->solution, &asls->db));
66: PetscCall(VecDuplicate(tao->solution, &asls->t1));
67: PetscCall(VecDuplicate(tao->solution, &asls->t2));
68: asls->fixed = NULL;
69: asls->free = NULL;
70: asls->J_sub = NULL;
71: asls->Jpre_sub = NULL;
72: asls->w = NULL;
73: asls->r1 = NULL;
74: asls->r2 = NULL;
75: asls->r3 = NULL;
76: asls->dxfree = NULL;
77: PetscFunctionReturn(PETSC_SUCCESS);
78: }
80: static PetscErrorCode Tao_ASLS_FunctionGradient(TaoLineSearch ls, Vec X, PetscReal *fcn, Vec G, void *ptr)
81: {
82: Tao tao = (Tao)ptr;
83: TAO_SSLS *asls = (TAO_SSLS *)tao->data;
85: PetscFunctionBegin;
86: PetscCall(TaoComputeConstraints(tao, X, tao->constraints));
87: PetscCall(VecFischer(X, tao->constraints, tao->XL, tao->XU, asls->ff));
88: PetscCall(VecNorm(asls->ff, NORM_2, &asls->merit));
89: *fcn = 0.5 * asls->merit * asls->merit;
91: PetscCall(TaoComputeJacobian(tao, tao->solution, tao->jacobian, tao->jacobian_pre));
92: PetscCall(MatDFischer(tao->jacobian, tao->solution, tao->constraints, tao->XL, tao->XU, asls->t1, asls->t2, asls->da, asls->db));
93: PetscCall(VecPointwiseMult(asls->t1, asls->ff, asls->db));
94: PetscCall(MatMultTranspose(tao->jacobian, asls->t1, G));
95: PetscCall(VecPointwiseMult(asls->t1, asls->ff, asls->da));
96: PetscCall(VecAXPY(G, 1.0, asls->t1));
97: PetscFunctionReturn(PETSC_SUCCESS);
98: }
100: static PetscErrorCode TaoDestroy_ASILS(Tao tao)
101: {
102: TAO_SSLS *ssls = (TAO_SSLS *)tao->data;
104: PetscFunctionBegin;
105: PetscCall(VecDestroy(&ssls->ff));
106: PetscCall(VecDestroy(&ssls->dpsi));
107: PetscCall(VecDestroy(&ssls->da));
108: PetscCall(VecDestroy(&ssls->db));
109: PetscCall(VecDestroy(&ssls->w));
110: PetscCall(VecDestroy(&ssls->t1));
111: PetscCall(VecDestroy(&ssls->t2));
112: PetscCall(VecDestroy(&ssls->r1));
113: PetscCall(VecDestroy(&ssls->r2));
114: PetscCall(VecDestroy(&ssls->r3));
115: PetscCall(VecDestroy(&ssls->dxfree));
116: PetscCall(MatDestroy(&ssls->J_sub));
117: PetscCall(MatDestroy(&ssls->Jpre_sub));
118: PetscCall(ISDestroy(&ssls->fixed));
119: PetscCall(ISDestroy(&ssls->free));
120: PetscCall(KSPDestroy(&tao->ksp));
121: PetscCall(PetscFree(tao->data));
122: PetscFunctionReturn(PETSC_SUCCESS);
123: }
125: static PetscErrorCode TaoSolve_ASILS(Tao tao)
126: {
127: TAO_SSLS *asls = (TAO_SSLS *)tao->data;
128: PetscReal psi, ndpsi, normd, innerd, t = 0;
129: PetscInt nf;
130: TaoLineSearchConvergedReason ls_reason;
132: PetscFunctionBegin;
133: /* Assume that Setup has been called!
134: Set the structure for the Jacobian and create a linear solver. */
136: PetscCall(TaoComputeVariableBounds(tao));
137: PetscCall(TaoLineSearchSetObjectiveAndGradientRoutine(tao->linesearch, Tao_ASLS_FunctionGradient, tao));
138: PetscCall(TaoLineSearchSetObjectiveRoutine(tao->linesearch, Tao_SSLS_Function, tao));
140: /* Calculate the function value and fischer function value at the
141: current iterate */
142: PetscCall(TaoLineSearchComputeObjectiveAndGradient(tao->linesearch, tao->solution, &psi, asls->dpsi));
143: PetscCall(VecNorm(asls->dpsi, NORM_2, &ndpsi));
145: tao->reason = TAO_CONTINUE_ITERATING;
146: while (1) {
147: /* Check the termination criteria */
148: PetscCall(PetscInfo(tao, "iter %" PetscInt_FMT ", merit: %g, ||dpsi||: %g\n", tao->niter, (double)asls->merit, (double)ndpsi));
149: PetscCall(TaoLogConvergenceHistory(tao, asls->merit, ndpsi, 0.0, tao->ksp_its));
150: PetscCall(TaoMonitor(tao, tao->niter, asls->merit, ndpsi, 0.0, t));
151: PetscUseTypeMethod(tao, convergencetest, tao->cnvP);
152: if (TAO_CONTINUE_ITERATING != tao->reason) break;
154: /* Call general purpose update function */
155: PetscTryTypeMethod(tao, update, tao->niter, tao->user_update);
156: tao->niter++;
158: /* We are going to solve a linear system of equations. We need to
159: set the tolerances for the solve so that we maintain an asymptotic
160: rate of convergence that is superlinear.
161: Note: these tolerances are for the reduced system. We really need
162: to make sure that the full system satisfies the full-space conditions.
164: This rule gives superlinear asymptotic convergence
165: asls->atol = min(0.5, asls->merit*sqrt(asls->merit));
166: asls->rtol = 0.0;
168: This rule gives quadratic asymptotic convergence
169: asls->atol = min(0.5, asls->merit*asls->merit);
170: asls->rtol = 0.0;
172: Calculate a free and fixed set of variables. The fixed set of
173: variables are those for the d_b is approximately equal to zero.
174: The definition of approximately changes as we approach the solution
175: to the problem.
177: No one rule is guaranteed to work in all cases. The following
178: definition is based on the norm of the Jacobian matrix. If the
179: norm is large, the tolerance becomes smaller. */
180: PetscCall(MatNorm(tao->jacobian, NORM_1, &asls->identifier));
181: asls->identifier = PetscMin(asls->merit, 1e-2) / (1 + asls->identifier);
183: PetscCall(VecSet(asls->t1, -asls->identifier));
184: PetscCall(VecSet(asls->t2, asls->identifier));
186: PetscCall(ISDestroy(&asls->fixed));
187: PetscCall(ISDestroy(&asls->free));
188: PetscCall(VecWhichBetweenOrEqual(asls->t1, asls->db, asls->t2, &asls->fixed));
189: PetscCall(ISComplementVec(asls->fixed, asls->t1, &asls->free));
191: PetscCall(ISGetSize(asls->fixed, &nf));
192: PetscCall(PetscInfo(tao, "Number of fixed variables: %" PetscInt_FMT "\n", nf));
194: /* We now have our partition. Now calculate the direction in the
195: fixed variable space. */
196: PetscCall(TaoVecGetSubVec(asls->ff, asls->fixed, tao->subset_type, 0.0, &asls->r1));
197: PetscCall(TaoVecGetSubVec(asls->da, asls->fixed, tao->subset_type, 1.0, &asls->r2));
198: PetscCall(VecPointwiseDivide(asls->r1, asls->r1, asls->r2));
199: PetscCall(VecSet(tao->stepdirection, 0.0));
200: PetscCall(VecISAXPY(tao->stepdirection, asls->fixed, 1.0, asls->r1));
202: /* Our direction in the Fixed Variable Set is fixed. Calculate the
203: information needed for the step in the Free Variable Set. To
204: do this, we need to know the diagonal perturbation and the
205: right hand side. */
207: PetscCall(TaoVecGetSubVec(asls->da, asls->free, tao->subset_type, 0.0, &asls->r1));
208: PetscCall(TaoVecGetSubVec(asls->ff, asls->free, tao->subset_type, 0.0, &asls->r2));
209: PetscCall(TaoVecGetSubVec(asls->db, asls->free, tao->subset_type, 1.0, &asls->r3));
210: PetscCall(VecPointwiseDivide(asls->r1, asls->r1, asls->r3));
211: PetscCall(VecPointwiseDivide(asls->r2, asls->r2, asls->r3));
213: /* r1 is the diagonal perturbation
214: r2 is the right hand side
215: r3 is no longer needed
217: Now need to modify r2 for our direction choice in the fixed
218: variable set: calculate t1 = J*d, take the reduced vector
219: of t1 and modify r2. */
221: PetscCall(MatMult(tao->jacobian, tao->stepdirection, asls->t1));
222: PetscCall(TaoVecGetSubVec(asls->t1, asls->free, tao->subset_type, 0.0, &asls->r3));
223: PetscCall(VecAXPY(asls->r2, -1.0, asls->r3));
225: /* Calculate the reduced problem matrix and the direction */
226: if (!asls->w && (tao->subset_type == TAO_SUBSET_MASK || tao->subset_type == TAO_SUBSET_MATRIXFREE)) PetscCall(VecDuplicate(tao->solution, &asls->w));
227: PetscCall(TaoMatGetSubMat(tao->jacobian, asls->free, asls->w, tao->subset_type, &asls->J_sub));
228: if (tao->jacobian != tao->jacobian_pre) {
229: PetscCall(TaoMatGetSubMat(tao->jacobian_pre, asls->free, asls->w, tao->subset_type, &asls->Jpre_sub));
230: } else {
231: PetscCall(MatDestroy(&asls->Jpre_sub));
232: asls->Jpre_sub = asls->J_sub;
233: PetscCall(PetscObjectReference((PetscObject)(asls->Jpre_sub)));
234: }
235: PetscCall(MatDiagonalSet(asls->J_sub, asls->r1, ADD_VALUES));
236: PetscCall(TaoVecGetSubVec(tao->stepdirection, asls->free, tao->subset_type, 0.0, &asls->dxfree));
237: PetscCall(VecSet(asls->dxfree, 0.0));
239: /* Calculate the reduced direction. (Really negative of Newton
240: direction. Therefore, rest of the code uses -d.) */
241: PetscCall(KSPReset(tao->ksp));
242: PetscCall(KSPSetOperators(tao->ksp, asls->J_sub, asls->Jpre_sub));
243: PetscCall(KSPSolve(tao->ksp, asls->r2, asls->dxfree));
244: PetscCall(KSPGetIterationNumber(tao->ksp, &tao->ksp_its));
245: tao->ksp_tot_its += tao->ksp_its;
247: /* Add the direction in the free variables back into the real direction. */
248: PetscCall(VecISAXPY(tao->stepdirection, asls->free, 1.0, asls->dxfree));
250: /* Check the real direction for descent and if not, use the negative
251: gradient direction. */
252: PetscCall(VecNorm(tao->stepdirection, NORM_2, &normd));
253: PetscCall(VecDot(tao->stepdirection, asls->dpsi, &innerd));
255: if (innerd <= asls->delta * PetscPowReal(normd, asls->rho)) {
256: PetscCall(PetscInfo(tao, "Gradient direction: %5.4e.\n", (double)innerd));
257: PetscCall(PetscInfo(tao, "Iteration %" PetscInt_FMT ": newton direction not descent\n", tao->niter));
258: PetscCall(VecCopy(asls->dpsi, tao->stepdirection));
259: PetscCall(VecDot(asls->dpsi, tao->stepdirection, &innerd));
260: }
262: PetscCall(VecScale(tao->stepdirection, -1.0));
263: innerd = -innerd;
265: /* We now have a correct descent direction. Apply a linesearch to
266: find the new iterate. */
267: PetscCall(TaoLineSearchSetInitialStepLength(tao->linesearch, 1.0));
268: PetscCall(TaoLineSearchApply(tao->linesearch, tao->solution, &psi, asls->dpsi, tao->stepdirection, &t, &ls_reason));
269: PetscCall(VecNorm(asls->dpsi, NORM_2, &ndpsi));
270: }
271: PetscFunctionReturn(PETSC_SUCCESS);
272: }
274: /* ---------------------------------------------------------- */
275: /*MC
276: TAOASILS - Active-set infeasible linesearch algorithm for solving
277: complementarity constraints
279: Options Database Keys:
280: + -tao_ssls_delta - descent test fraction
281: - -tao_ssls_rho - descent test power
283: Level: beginner
284: M*/
285: PETSC_EXTERN PetscErrorCode TaoCreate_ASILS(Tao tao)
286: {
287: TAO_SSLS *asls;
288: const char *armijo_type = TAOLINESEARCHARMIJO;
290: PetscFunctionBegin;
291: PetscCall(PetscNew(&asls));
292: tao->data = (void *)asls;
293: tao->ops->solve = TaoSolve_ASILS;
294: tao->ops->setup = TaoSetUp_ASILS;
295: tao->ops->view = TaoView_SSLS;
296: tao->ops->setfromoptions = TaoSetFromOptions_SSLS;
297: tao->ops->destroy = TaoDestroy_ASILS;
298: tao->subset_type = TAO_SUBSET_SUBVEC;
299: asls->delta = 1e-10;
300: asls->rho = 2.1;
301: asls->fixed = NULL;
302: asls->free = NULL;
303: asls->J_sub = NULL;
304: asls->Jpre_sub = NULL;
305: asls->w = NULL;
306: asls->r1 = NULL;
307: asls->r2 = NULL;
308: asls->r3 = NULL;
309: asls->t1 = NULL;
310: asls->t2 = NULL;
311: asls->dxfree = NULL;
313: asls->identifier = 1e-5;
315: PetscCall(TaoLineSearchCreate(((PetscObject)tao)->comm, &tao->linesearch));
316: PetscCall(PetscObjectIncrementTabLevel((PetscObject)tao->linesearch, (PetscObject)tao, 1));
317: PetscCall(TaoLineSearchSetType(tao->linesearch, armijo_type));
318: PetscCall(TaoLineSearchSetOptionsPrefix(tao->linesearch, tao->hdr.prefix));
319: PetscCall(TaoLineSearchSetFromOptions(tao->linesearch));
321: PetscCall(KSPCreate(((PetscObject)tao)->comm, &tao->ksp));
322: PetscCall(PetscObjectIncrementTabLevel((PetscObject)tao->ksp, (PetscObject)tao, 1));
323: PetscCall(KSPSetOptionsPrefix(tao->ksp, tao->hdr.prefix));
324: PetscCall(KSPSetFromOptions(tao->ksp));
326: /* Override default settings (unless already changed) */
327: if (!tao->max_it_changed) tao->max_it = 2000;
328: if (!tao->max_funcs_changed) tao->max_funcs = 4000;
329: if (!tao->gttol_changed) tao->gttol = 0;
330: if (!tao->grtol_changed) tao->grtol = 0;
331: #if defined(PETSC_USE_REAL_SINGLE)
332: if (!tao->gatol_changed) tao->gatol = 1.0e-6;
333: if (!tao->fmin_changed) tao->fmin = 1.0e-4;
334: #else
335: if (!tao->gatol_changed) tao->gatol = 1.0e-16;
336: if (!tao->fmin_changed) tao->fmin = 1.0e-8;
337: #endif
338: PetscFunctionReturn(PETSC_SUCCESS);
339: }