Actual source code: admm.c
1: #include <../src/tao/constrained/impls/admm/admm.h>
2: #include <petsctao.h>
3: #include <petsc/private/petscimpl.h>
5: /* Updates terminating criteria
6: *
7: * 1 ||r_k|| = ||Ax+Bz-c|| =< catol_admm* max{||Ax||,||Bz||,||c||}
8: *
9: * 2. Updates dual residual, d_k
10: *
11: * 3. ||d_k|| = ||mu*A^T*B(z_k-z_{k-1})|| =< gatol_admm * ||A^Ty|| */
13: static PetscBool cited = PETSC_FALSE;
14: static const char citation[] = "@misc{xu2017adaptive,\n"
15: " title={Adaptive Relaxed ADMM: Convergence Theory and Practical Implementation},\n"
16: " author={Zheng Xu and Mario A. T. Figueiredo and Xiaoming Yuan and Christoph Studer and Tom Goldstein},\n"
17: " year={2017},\n"
18: " eprint={1704.02712},\n"
19: " archivePrefix={arXiv},\n"
20: " primaryClass={cs.CV}\n"
21: "} \n";
23: const char *const TaoADMMRegularizerTypes[] = {"REGULARIZER_USER", "REGULARIZER_SOFT_THRESH", "TaoADMMRegularizerType", "TAO_ADMM_", NULL};
24: const char *const TaoADMMUpdateTypes[] = {"UPDATE_BASIC", "UPDATE_ADAPTIVE", "UPDATE_ADAPTIVE_RELAXED", "TaoADMMUpdateType", "TAO_ADMM_", NULL};
25: const char *const TaoALMMTypes[] = {"CLASSIC", "PHR", "TaoALMMType", "TAO_ALMM_", NULL};
27: static PetscErrorCode TaoADMMToleranceUpdate(Tao tao)
28: {
29: TAO_ADMM *am = (TAO_ADMM *)tao->data;
30: PetscReal Axnorm, Bznorm, ATynorm, temp;
31: Vec tempJR, tempL;
32: Tao mis;
34: PetscFunctionBegin;
35: mis = am->subsolverX;
36: tempJR = am->workJacobianRight;
37: tempL = am->workLeft;
38: /* ATy */
39: PetscCall(TaoComputeJacobianEquality(mis, am->y, mis->jacobian_equality, mis->jacobian_equality_pre));
40: PetscCall(MatMultTranspose(mis->jacobian_equality, am->y, tempJR));
41: PetscCall(VecNorm(tempJR, NORM_2, &ATynorm));
42: /* dualres = mu * ||AT(Bz-Bzold)||_2 */
43: PetscCall(VecWAXPY(tempJR, -1., am->Bzold, am->Bz));
44: PetscCall(MatMultTranspose(mis->jacobian_equality, tempJR, tempL));
45: PetscCall(VecNorm(tempL, NORM_2, &am->dualres));
46: am->dualres *= am->mu;
48: /* ||Ax||_2, ||Bz||_2 */
49: PetscCall(VecNorm(am->Ax, NORM_2, &Axnorm));
50: PetscCall(VecNorm(am->Bz, NORM_2, &Bznorm));
52: /* Set catol to be catol_admm * max{||Ax||,||Bz||,||c||} *
53: * Set gatol to be gatol_admm * ||A^Ty|| *
54: * while cnorm is ||r_k||_2, and gnorm is ||d_k||_2 */
55: temp = am->catol_admm * PetscMax(Axnorm, (!am->const_norm) ? Bznorm : PetscMax(Bznorm, am->const_norm));
56: PetscCall(TaoSetConstraintTolerances(tao, temp, PETSC_DEFAULT));
57: PetscCall(TaoSetTolerances(tao, am->gatol_admm * ATynorm, PETSC_DEFAULT, PETSC_DEFAULT));
58: PetscFunctionReturn(PETSC_SUCCESS);
59: }
61: /* Penaly Update for Adaptive ADMM. */
62: static PetscErrorCode AdaptiveADMMPenaltyUpdate(Tao tao)
63: {
64: TAO_ADMM *am = (TAO_ADMM *)tao->data;
65: PetscReal ydiff_norm, yhatdiff_norm, Axdiff_norm, Bzdiff_norm, Axyhat, Bzy, a_sd, a_mg, a_k, b_sd, b_mg, b_k;
66: PetscBool hflag, gflag;
67: Vec tempJR, tempJR2;
69: PetscFunctionBegin;
70: tempJR = am->workJacobianRight;
71: tempJR2 = am->workJacobianRight2;
72: hflag = PETSC_FALSE;
73: gflag = PETSC_FALSE;
74: a_k = -1;
75: b_k = -1;
77: PetscCall(VecWAXPY(tempJR, -1., am->Axold, am->Ax));
78: PetscCall(VecWAXPY(tempJR2, -1., am->yhatold, am->yhat));
79: PetscCall(VecNorm(tempJR, NORM_2, &Axdiff_norm));
80: PetscCall(VecNorm(tempJR2, NORM_2, &yhatdiff_norm));
81: PetscCall(VecDot(tempJR, tempJR2, &Axyhat));
83: PetscCall(VecWAXPY(tempJR, -1., am->Bz0, am->Bz));
84: PetscCall(VecWAXPY(tempJR2, -1., am->y, am->y0));
85: PetscCall(VecNorm(tempJR, NORM_2, &Bzdiff_norm));
86: PetscCall(VecNorm(tempJR2, NORM_2, &ydiff_norm));
87: PetscCall(VecDot(tempJR, tempJR2, &Bzy));
89: if (Axyhat > am->orthval * Axdiff_norm * yhatdiff_norm + am->mueps) {
90: hflag = PETSC_TRUE;
91: a_sd = PetscSqr(yhatdiff_norm) / Axyhat; /* alphaSD */
92: a_mg = Axyhat / PetscSqr(Axdiff_norm); /* alphaMG */
93: a_k = (a_mg / a_sd) > 0.5 ? a_mg : a_sd - 0.5 * a_mg;
94: }
95: if (Bzy > am->orthval * Bzdiff_norm * ydiff_norm + am->mueps) {
96: gflag = PETSC_TRUE;
97: b_sd = PetscSqr(ydiff_norm) / Bzy; /* betaSD */
98: b_mg = Bzy / PetscSqr(Bzdiff_norm); /* betaMG */
99: b_k = (b_mg / b_sd) > 0.5 ? b_mg : b_sd - 0.5 * b_mg;
100: }
101: am->muold = am->mu;
102: if (gflag && hflag) {
103: am->mu = PetscSqrtReal(a_k * b_k);
104: } else if (hflag) {
105: am->mu = a_k;
106: } else if (gflag) {
107: am->mu = b_k;
108: }
109: if (am->mu > am->muold) am->mu = am->muold;
110: if (am->mu < am->mumin) am->mu = am->mumin;
111: PetscFunctionReturn(PETSC_SUCCESS);
112: }
114: static PetscErrorCode TaoADMMSetRegularizerType_ADMM(Tao tao, TaoADMMRegularizerType type)
115: {
116: TAO_ADMM *am = (TAO_ADMM *)tao->data;
118: PetscFunctionBegin;
119: am->regswitch = type;
120: PetscFunctionReturn(PETSC_SUCCESS);
121: }
123: static PetscErrorCode TaoADMMGetRegularizerType_ADMM(Tao tao, TaoADMMRegularizerType *type)
124: {
125: TAO_ADMM *am = (TAO_ADMM *)tao->data;
127: PetscFunctionBegin;
128: *type = am->regswitch;
129: PetscFunctionReturn(PETSC_SUCCESS);
130: }
132: static PetscErrorCode TaoADMMSetUpdateType_ADMM(Tao tao, TaoADMMUpdateType type)
133: {
134: TAO_ADMM *am = (TAO_ADMM *)tao->data;
136: PetscFunctionBegin;
137: am->update = type;
138: PetscFunctionReturn(PETSC_SUCCESS);
139: }
141: static PetscErrorCode TaoADMMGetUpdateType_ADMM(Tao tao, TaoADMMUpdateType *type)
142: {
143: TAO_ADMM *am = (TAO_ADMM *)tao->data;
145: PetscFunctionBegin;
146: *type = am->update;
147: PetscFunctionReturn(PETSC_SUCCESS);
148: }
150: /* This routine updates Jacobians with new x,z vectors,
151: * and then updates Ax and Bz vectors, then computes updated residual vector*/
152: static PetscErrorCode ADMMUpdateConstraintResidualVector(Tao tao, Vec x, Vec z, Vec Ax, Vec Bz, Vec residual)
153: {
154: TAO_ADMM *am = (TAO_ADMM *)tao->data;
155: Tao mis, reg;
157: PetscFunctionBegin;
158: mis = am->subsolverX;
159: reg = am->subsolverZ;
160: PetscCall(TaoComputeJacobianEquality(mis, x, mis->jacobian_equality, mis->jacobian_equality_pre));
161: PetscCall(MatMult(mis->jacobian_equality, x, Ax));
162: PetscCall(TaoComputeJacobianEquality(reg, z, reg->jacobian_equality, reg->jacobian_equality_pre));
163: PetscCall(MatMult(reg->jacobian_equality, z, Bz));
165: PetscCall(VecWAXPY(residual, 1., Bz, Ax));
166: if (am->constraint != NULL) PetscCall(VecAXPY(residual, -1., am->constraint));
167: PetscFunctionReturn(PETSC_SUCCESS);
168: }
170: /* Updates Augmented Lagrangians to given routines *
171: * For subsolverX, routine needs to be ComputeObjectiveAndGraidnet
172: * Separate Objective and Gradient routines are not supported. */
173: static PetscErrorCode SubObjGradUpdate(Tao tao, Vec x, PetscReal *f, Vec g, void *ptr)
174: {
175: Tao parent = (Tao)ptr;
176: TAO_ADMM *am = (TAO_ADMM *)parent->data;
177: PetscReal temp, temp2;
178: Vec tempJR;
180: PetscFunctionBegin;
181: tempJR = am->workJacobianRight;
182: PetscCall(ADMMUpdateConstraintResidualVector(parent, x, am->subsolverZ->solution, am->Ax, am->Bz, am->residual));
183: PetscCall((*am->ops->misfitobjgrad)(am->subsolverX, x, f, g, am->misfitobjgradP));
185: am->last_misfit_val = *f;
186: /* Objective Add + yT(Ax+Bz-c) + mu/2*||Ax+Bz-c||_2^2 */
187: PetscCall(VecTDot(am->residual, am->y, &temp));
188: PetscCall(VecTDot(am->residual, am->residual, &temp2));
189: *f += temp + (am->mu / 2) * temp2;
191: /* Gradient. Add + mu*AT(Ax+Bz-c) + yTA*/
192: PetscCall(MatMultTranspose(tao->jacobian_equality, am->residual, tempJR));
193: PetscCall(VecAXPY(g, am->mu, tempJR));
194: PetscCall(MatMultTranspose(tao->jacobian_equality, am->y, tempJR));
195: PetscCall(VecAXPY(g, 1., tempJR));
196: PetscFunctionReturn(PETSC_SUCCESS);
197: }
199: /* Updates Augmented Lagrangians to given routines
200: * For subsolverZ, routine needs to be ComputeObjectiveAndGraidnet
201: * Separate Objective and Gradient routines are not supported. */
202: static PetscErrorCode RegObjGradUpdate(Tao tao, Vec z, PetscReal *f, Vec g, void *ptr)
203: {
204: Tao parent = (Tao)ptr;
205: TAO_ADMM *am = (TAO_ADMM *)parent->data;
206: PetscReal temp, temp2;
207: Vec tempJR;
209: PetscFunctionBegin;
210: tempJR = am->workJacobianRight;
211: PetscCall(ADMMUpdateConstraintResidualVector(parent, am->subsolverX->solution, z, am->Ax, am->Bz, am->residual));
212: PetscCall((*am->ops->regobjgrad)(am->subsolverZ, z, f, g, am->regobjgradP));
213: am->last_reg_val = *f;
214: /* Objective Add + yT(Ax+Bz-c) + mu/2*||Ax+Bz-c||_2^2 */
215: PetscCall(VecTDot(am->residual, am->y, &temp));
216: PetscCall(VecTDot(am->residual, am->residual, &temp2));
217: *f += temp + (am->mu / 2) * temp2;
219: /* Gradient. Add + mu*BT(Ax+Bz-c) + yTB*/
220: PetscCall(MatMultTranspose(am->subsolverZ->jacobian_equality, am->residual, tempJR));
221: PetscCall(VecAXPY(g, am->mu, tempJR));
222: PetscCall(MatMultTranspose(am->subsolverZ->jacobian_equality, am->y, tempJR));
223: PetscCall(VecAXPY(g, 1., tempJR));
224: PetscFunctionReturn(PETSC_SUCCESS);
225: }
227: /* Computes epsilon padded L1 norm lambda*sum(sqrt(x^2+eps^2)-eps */
228: static PetscErrorCode ADMML1EpsilonNorm(Tao tao, Vec x, PetscReal eps, PetscReal *norm)
229: {
230: TAO_ADMM *am = (TAO_ADMM *)tao->data;
231: PetscInt N;
233: PetscFunctionBegin;
234: PetscCall(VecGetSize(am->workLeft, &N));
235: PetscCall(VecPointwiseMult(am->workLeft, x, x));
236: PetscCall(VecShift(am->workLeft, am->l1epsilon * am->l1epsilon));
237: PetscCall(VecSqrtAbs(am->workLeft));
238: PetscCall(VecSum(am->workLeft, norm));
239: *norm += N * am->l1epsilon;
240: *norm *= am->lambda;
241: PetscFunctionReturn(PETSC_SUCCESS);
242: }
244: static PetscErrorCode ADMMInternalHessianUpdate(Mat H, Mat Constraint, PetscBool Identity, void *ptr)
245: {
246: TAO_ADMM *am = (TAO_ADMM *)ptr;
248: PetscFunctionBegin;
249: switch (am->update) {
250: case (TAO_ADMM_UPDATE_BASIC):
251: break;
252: case (TAO_ADMM_UPDATE_ADAPTIVE):
253: case (TAO_ADMM_UPDATE_ADAPTIVE_RELAXED):
254: if (H && (am->muold != am->mu)) {
255: if (!Identity) {
256: PetscCall(MatAXPY(H, am->mu - am->muold, Constraint, DIFFERENT_NONZERO_PATTERN));
257: } else {
258: PetscCall(MatShift(H, am->mu - am->muold));
259: }
260: }
261: break;
262: }
263: PetscFunctionReturn(PETSC_SUCCESS);
264: }
266: /* Updates Hessian - adds second derivative of augmented Lagrangian
267: * H \gets H + \rho*ATA
268: Here, \rho does not change in TAO_ADMM_UPDATE_BASIC - thus no-op
269: For ADAPTAIVE,ADAPTIVE_RELAXED,
270: H \gets H + (\rho-\rhoold)*ATA
271: Here, we assume that A is linear constraint i.e., does not change.
272: Thus, for both ADAPTIVE, and RELAXED, ATA matrix is pre-set (except for A=I (null case)) see TaoSetUp_ADMM */
273: static PetscErrorCode SubHessianUpdate(Tao tao, Vec x, Mat H, Mat Hpre, void *ptr)
274: {
275: Tao parent = (Tao)ptr;
276: TAO_ADMM *am = (TAO_ADMM *)parent->data;
278: PetscFunctionBegin;
279: if (am->Hxchange) {
280: /* Case where Hessian gets updated with respect to x vector input. */
281: PetscCall((*am->ops->misfithess)(am->subsolverX, x, H, Hpre, am->misfithessP));
282: PetscCall(ADMMInternalHessianUpdate(am->subsolverX->hessian, am->ATA, am->xJI, am));
283: } else if (am->Hxbool) {
284: /* Hessian doesn't get updated. H(x) = c */
285: /* Update Lagrangian only only per TAO call */
286: PetscCall(ADMMInternalHessianUpdate(am->subsolverX->hessian, am->ATA, am->xJI, am));
287: am->Hxbool = PETSC_FALSE;
288: }
289: PetscFunctionReturn(PETSC_SUCCESS);
290: }
292: /* Same as SubHessianUpdate, except for B matrix instead of A matrix */
293: static PetscErrorCode RegHessianUpdate(Tao tao, Vec z, Mat H, Mat Hpre, void *ptr)
294: {
295: Tao parent = (Tao)ptr;
296: TAO_ADMM *am = (TAO_ADMM *)parent->data;
298: PetscFunctionBegin;
300: if (am->Hzchange) {
301: /* Case where Hessian gets updated with respect to x vector input. */
302: PetscCall((*am->ops->reghess)(am->subsolverZ, z, H, Hpre, am->reghessP));
303: PetscCall(ADMMInternalHessianUpdate(am->subsolverZ->hessian, am->BTB, am->zJI, am));
304: } else if (am->Hzbool) {
305: /* Hessian doesn't get updated. H(x) = c */
306: /* Update Lagrangian only only per TAO call */
307: PetscCall(ADMMInternalHessianUpdate(am->subsolverZ->hessian, am->BTB, am->zJI, am));
308: am->Hzbool = PETSC_FALSE;
309: }
310: PetscFunctionReturn(PETSC_SUCCESS);
311: }
313: /* Shell Matrix routine for A matrix.
314: * This gets used when user puts NULL for
315: * TaoSetJacobianEqualityRoutine(tao, NULL,NULL, ...)
316: * Essentially sets A=I*/
317: static PetscErrorCode JacobianIdentity(Mat mat, Vec in, Vec out)
318: {
319: PetscFunctionBegin;
320: PetscCall(VecCopy(in, out));
321: PetscFunctionReturn(PETSC_SUCCESS);
322: }
324: /* Shell Matrix routine for B matrix.
325: * This gets used when user puts NULL for
326: * TaoADMMSetRegularizerConstraintJacobian(tao, NULL,NULL, ...)
327: * Sets B=-I */
328: static PetscErrorCode JacobianIdentityB(Mat mat, Vec in, Vec out)
329: {
330: PetscFunctionBegin;
331: PetscCall(VecCopy(in, out));
332: PetscCall(VecScale(out, -1.));
333: PetscFunctionReturn(PETSC_SUCCESS);
334: }
336: /* Solve f(x) + g(z) s.t. Ax + Bz = c */
337: static PetscErrorCode TaoSolve_ADMM(Tao tao)
338: {
339: TAO_ADMM *am = (TAO_ADMM *)tao->data;
340: PetscInt N;
341: PetscReal reg_func;
342: PetscBool is_reg_shell;
343: Vec tempL;
345: PetscFunctionBegin;
346: if (am->regswitch != TAO_ADMM_REGULARIZER_SOFT_THRESH) {
347: PetscCheck(am->subsolverX->ops->computejacobianequality, PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "Must call TaoADMMSetMisfitConstraintJacobian() first");
348: PetscCheck(am->subsolverZ->ops->computejacobianequality, PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "Must call TaoADMMSetRegularizerConstraintJacobian() first");
349: if (am->constraint != NULL) PetscCall(VecNorm(am->constraint, NORM_2, &am->const_norm));
350: }
351: tempL = am->workLeft;
352: PetscCall(VecGetSize(tempL, &N));
354: if (am->Hx && am->ops->misfithess) PetscCall(TaoSetHessian(am->subsolverX, am->Hx, am->Hx, SubHessianUpdate, tao));
356: if (!am->zJI) {
357: /* Currently, B is assumed to be a linear system, i.e., not getting updated*/
358: PetscCall(MatTransposeMatMult(am->JB, am->JB, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &(am->BTB)));
359: }
360: if (!am->xJI) {
361: /* Currently, A is assumed to be a linear system, i.e., not getting updated*/
362: PetscCall(MatTransposeMatMult(am->subsolverX->jacobian_equality, am->subsolverX->jacobian_equality, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &(am->ATA)));
363: }
365: is_reg_shell = PETSC_FALSE;
367: PetscCall(PetscObjectTypeCompare((PetscObject)am->subsolverZ, TAOSHELL, &is_reg_shell));
369: if (!is_reg_shell) {
370: switch (am->regswitch) {
371: case (TAO_ADMM_REGULARIZER_USER):
372: break;
373: case (TAO_ADMM_REGULARIZER_SOFT_THRESH):
374: /* Soft Threshold. */
375: break;
376: }
377: if (am->ops->regobjgrad) PetscCall(TaoSetObjectiveAndGradient(am->subsolverZ, NULL, RegObjGradUpdate, tao));
378: if (am->Hz && am->ops->reghess) PetscCall(TaoSetHessian(am->subsolverZ, am->Hz, am->Hzpre, RegHessianUpdate, tao));
379: }
381: switch (am->update) {
382: case TAO_ADMM_UPDATE_BASIC:
383: if (am->subsolverX->hessian) {
384: /* In basic case, Hessian does not get updated w.r.t. to spectral penalty
385: * Here, when A is set, i.e., am->xJI, add mu*ATA to Hessian*/
386: if (!am->xJI) {
387: PetscCall(MatAXPY(am->subsolverX->hessian, am->mu, am->ATA, DIFFERENT_NONZERO_PATTERN));
388: } else {
389: PetscCall(MatShift(am->subsolverX->hessian, am->mu));
390: }
391: }
392: if (am->subsolverZ->hessian && am->regswitch == TAO_ADMM_REGULARIZER_USER) {
393: if (am->regswitch == TAO_ADMM_REGULARIZER_USER && !am->zJI) {
394: PetscCall(MatAXPY(am->subsolverZ->hessian, am->mu, am->BTB, DIFFERENT_NONZERO_PATTERN));
395: } else {
396: PetscCall(MatShift(am->subsolverZ->hessian, am->mu));
397: }
398: }
399: break;
400: case TAO_ADMM_UPDATE_ADAPTIVE:
401: case TAO_ADMM_UPDATE_ADAPTIVE_RELAXED:
402: break;
403: }
405: PetscCall(PetscCitationsRegister(citation, &cited));
406: tao->reason = TAO_CONTINUE_ITERATING;
408: while (tao->reason == TAO_CONTINUE_ITERATING) {
409: PetscTryTypeMethod(tao, update, tao->niter, tao->user_update);
410: PetscCall(VecCopy(am->Bz, am->Bzold));
412: /* x update */
413: PetscCall(TaoSolve(am->subsolverX));
414: PetscCall(TaoComputeJacobianEquality(am->subsolverX, am->subsolverX->solution, am->subsolverX->jacobian_equality, am->subsolverX->jacobian_equality_pre));
415: PetscCall(MatMult(am->subsolverX->jacobian_equality, am->subsolverX->solution, am->Ax));
417: am->Hxbool = PETSC_TRUE;
419: /* z update */
420: switch (am->regswitch) {
421: case TAO_ADMM_REGULARIZER_USER:
422: PetscCall(TaoSolve(am->subsolverZ));
423: break;
424: case TAO_ADMM_REGULARIZER_SOFT_THRESH:
425: /* L1 assumes A,B jacobians are identity nxn matrix */
426: PetscCall(VecWAXPY(am->workJacobianRight, 1 / am->mu, am->y, am->Ax));
427: PetscCall(TaoSoftThreshold(am->workJacobianRight, -am->lambda / am->mu, am->lambda / am->mu, am->subsolverZ->solution));
428: break;
429: }
430: am->Hzbool = PETSC_TRUE;
431: /* Returns Ax + Bz - c with updated Ax,Bz vectors */
432: PetscCall(ADMMUpdateConstraintResidualVector(tao, am->subsolverX->solution, am->subsolverZ->solution, am->Ax, am->Bz, am->residual));
433: /* Dual variable, y += y + mu*(Ax+Bz-c) */
434: PetscCall(VecWAXPY(am->y, am->mu, am->residual, am->yold));
436: /* stopping tolerance update */
437: PetscCall(TaoADMMToleranceUpdate(tao));
439: /* Updating Spectral Penalty */
440: switch (am->update) {
441: case TAO_ADMM_UPDATE_BASIC:
442: am->muold = am->mu;
443: break;
444: case TAO_ADMM_UPDATE_ADAPTIVE:
445: case TAO_ADMM_UPDATE_ADAPTIVE_RELAXED:
446: if (tao->niter == 0) {
447: PetscCall(VecCopy(am->y, am->y0));
448: PetscCall(VecWAXPY(am->residual, 1., am->Ax, am->Bzold));
449: if (am->constraint) PetscCall(VecAXPY(am->residual, -1., am->constraint));
450: PetscCall(VecWAXPY(am->yhatold, -am->mu, am->residual, am->yold));
451: PetscCall(VecCopy(am->Ax, am->Axold));
452: PetscCall(VecCopy(am->Bz, am->Bz0));
453: am->muold = am->mu;
454: } else if (tao->niter % am->T == 1) {
455: /* we have compute Bzold in a previous iteration, and we computed Ax above */
456: PetscCall(VecWAXPY(am->residual, 1., am->Ax, am->Bzold));
457: if (am->constraint) PetscCall(VecAXPY(am->residual, -1., am->constraint));
458: PetscCall(VecWAXPY(am->yhat, -am->mu, am->residual, am->yold));
459: PetscCall(AdaptiveADMMPenaltyUpdate(tao));
460: PetscCall(VecCopy(am->Ax, am->Axold));
461: PetscCall(VecCopy(am->Bz, am->Bz0));
462: PetscCall(VecCopy(am->yhat, am->yhatold));
463: PetscCall(VecCopy(am->y, am->y0));
464: } else {
465: am->muold = am->mu;
466: }
467: break;
468: default:
469: break;
470: }
471: tao->niter++;
473: /* Calculate original function values. misfit part was done in TaoADMMToleranceUpdate*/
474: switch (am->regswitch) {
475: case TAO_ADMM_REGULARIZER_USER:
476: if (is_reg_shell) {
477: PetscCall(ADMML1EpsilonNorm(tao, am->subsolverZ->solution, am->l1epsilon, ®_func));
478: } else {
479: PetscCall((*am->ops->regobjgrad)(am->subsolverZ, am->subsolverX->solution, ®_func, tempL, am->regobjgradP));
480: }
481: break;
482: case TAO_ADMM_REGULARIZER_SOFT_THRESH:
483: PetscCall(ADMML1EpsilonNorm(tao, am->subsolverZ->solution, am->l1epsilon, ®_func));
484: break;
485: }
486: PetscCall(VecCopy(am->y, am->yold));
487: PetscCall(ADMMUpdateConstraintResidualVector(tao, am->subsolverX->solution, am->subsolverZ->solution, am->Ax, am->Bz, am->residual));
488: PetscCall(VecNorm(am->residual, NORM_2, &am->resnorm));
489: PetscCall(TaoLogConvergenceHistory(tao, am->last_misfit_val + reg_func, am->dualres, am->resnorm, tao->ksp_its));
491: PetscCall(TaoMonitor(tao, tao->niter, am->last_misfit_val + reg_func, am->dualres, am->resnorm, 1.0));
492: PetscUseTypeMethod(tao, convergencetest, tao->cnvP);
493: }
494: /* Update vectors */
495: PetscCall(VecCopy(am->subsolverX->solution, tao->solution));
496: PetscCall(VecCopy(am->subsolverX->gradient, tao->gradient));
497: PetscCall(PetscObjectCompose((PetscObject)am->subsolverX, "TaoGetADMMParentTao_ADMM", NULL));
498: PetscCall(PetscObjectCompose((PetscObject)am->subsolverZ, "TaoGetADMMParentTao_ADMM", NULL));
499: PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetRegularizerType_C", NULL));
500: PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetRegularizerType_C", NULL));
501: PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetUpdateType_C", NULL));
502: PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetUpdateType_C", NULL));
503: PetscFunctionReturn(PETSC_SUCCESS);
504: }
506: static PetscErrorCode TaoSetFromOptions_ADMM(Tao tao, PetscOptionItems *PetscOptionsObject)
507: {
508: TAO_ADMM *am = (TAO_ADMM *)tao->data;
510: PetscFunctionBegin;
511: PetscOptionsHeadBegin(PetscOptionsObject, "ADMM problem that solves f(x) in a form of f(x) + g(z) subject to x - z = 0. Norm 1 and 2 are supported. Different subsolver routines can be selected. ");
512: PetscCall(PetscOptionsReal("-tao_admm_regularizer_coefficient", "regularizer constant", "", am->lambda, &am->lambda, NULL));
513: PetscCall(PetscOptionsReal("-tao_admm_spectral_penalty", "Constant for Augmented Lagrangian term.", "", am->mu, &am->mu, NULL));
514: PetscCall(PetscOptionsReal("-tao_admm_relaxation_parameter", "x relaxation parameter for Z update.", "", am->gamma, &am->gamma, NULL));
515: PetscCall(PetscOptionsReal("-tao_admm_tolerance_update_factor", "ADMM dynamic tolerance update factor.", "", am->tol, &am->tol, NULL));
516: PetscCall(PetscOptionsReal("-tao_admm_spectral_penalty_update_factor", "ADMM spectral penalty update curvature safeguard value.", "", am->orthval, &am->orthval, NULL));
517: PetscCall(PetscOptionsReal("-tao_admm_minimum_spectral_penalty", "Set ADMM minimum spectral penalty.", "", am->mumin, &am->mumin, NULL));
518: PetscCall(PetscOptionsEnum("-tao_admm_dual_update", "Lagrangian dual update policy", "TaoADMMUpdateType", TaoADMMUpdateTypes, (PetscEnum)am->update, (PetscEnum *)&am->update, NULL));
519: PetscCall(PetscOptionsEnum("-tao_admm_regularizer_type", "ADMM regularizer update rule", "TaoADMMRegularizerType", TaoADMMRegularizerTypes, (PetscEnum)am->regswitch, (PetscEnum *)&am->regswitch, NULL));
520: PetscOptionsHeadEnd();
521: PetscCall(TaoSetFromOptions(am->subsolverX));
522: if (am->regswitch != TAO_ADMM_REGULARIZER_SOFT_THRESH) PetscCall(TaoSetFromOptions(am->subsolverZ));
523: PetscFunctionReturn(PETSC_SUCCESS);
524: }
526: static PetscErrorCode TaoView_ADMM(Tao tao, PetscViewer viewer)
527: {
528: TAO_ADMM *am = (TAO_ADMM *)tao->data;
530: PetscFunctionBegin;
531: PetscCall(PetscViewerASCIIPushTab(viewer));
532: PetscCall(TaoView(am->subsolverX, viewer));
533: PetscCall(TaoView(am->subsolverZ, viewer));
534: PetscCall(PetscViewerASCIIPopTab(viewer));
535: PetscFunctionReturn(PETSC_SUCCESS);
536: }
538: static PetscErrorCode TaoSetUp_ADMM(Tao tao)
539: {
540: TAO_ADMM *am = (TAO_ADMM *)tao->data;
541: PetscInt n, N, M;
543: PetscFunctionBegin;
544: PetscCall(VecGetLocalSize(tao->solution, &n));
545: PetscCall(VecGetSize(tao->solution, &N));
546: /* If Jacobian is given as NULL, it means Jacobian is identity matrix with size of solution vector */
547: if (!am->JB) {
548: am->zJI = PETSC_TRUE;
549: PetscCall(MatCreateShell(PetscObjectComm((PetscObject)tao), n, n, PETSC_DETERMINE, PETSC_DETERMINE, NULL, &am->JB));
550: PetscCall(MatShellSetOperation(am->JB, MATOP_MULT, (void (*)(void))JacobianIdentityB));
551: PetscCall(MatShellSetOperation(am->JB, MATOP_MULT_TRANSPOSE, (void (*)(void))JacobianIdentityB));
552: am->JBpre = am->JB;
553: }
554: if (!am->JA) {
555: am->xJI = PETSC_TRUE;
556: PetscCall(MatCreateShell(PetscObjectComm((PetscObject)tao), n, n, PETSC_DETERMINE, PETSC_DETERMINE, NULL, &am->JA));
557: PetscCall(MatShellSetOperation(am->JA, MATOP_MULT, (void (*)(void))JacobianIdentity));
558: PetscCall(MatShellSetOperation(am->JA, MATOP_MULT_TRANSPOSE, (void (*)(void))JacobianIdentity));
559: am->JApre = am->JA;
560: }
561: PetscCall(MatCreateVecs(am->JA, NULL, &am->Ax));
562: if (!tao->gradient) PetscCall(VecDuplicate(tao->solution, &tao->gradient));
563: PetscCall(TaoSetSolution(am->subsolverX, tao->solution));
564: if (!am->z) {
565: PetscCall(VecDuplicate(tao->solution, &am->z));
566: PetscCall(VecSet(am->z, 0.0));
567: }
568: PetscCall(TaoSetSolution(am->subsolverZ, am->z));
569: if (!am->workLeft) PetscCall(VecDuplicate(tao->solution, &am->workLeft));
570: if (!am->Axold) PetscCall(VecDuplicate(am->Ax, &am->Axold));
571: if (!am->workJacobianRight) PetscCall(VecDuplicate(am->Ax, &am->workJacobianRight));
572: if (!am->workJacobianRight2) PetscCall(VecDuplicate(am->Ax, &am->workJacobianRight2));
573: if (!am->Bz) PetscCall(VecDuplicate(am->Ax, &am->Bz));
574: if (!am->Bzold) PetscCall(VecDuplicate(am->Ax, &am->Bzold));
575: if (!am->Bz0) PetscCall(VecDuplicate(am->Ax, &am->Bz0));
576: if (!am->y) {
577: PetscCall(VecDuplicate(am->Ax, &am->y));
578: PetscCall(VecSet(am->y, 0.0));
579: }
580: if (!am->yold) {
581: PetscCall(VecDuplicate(am->Ax, &am->yold));
582: PetscCall(VecSet(am->yold, 0.0));
583: }
584: if (!am->y0) {
585: PetscCall(VecDuplicate(am->Ax, &am->y0));
586: PetscCall(VecSet(am->y0, 0.0));
587: }
588: if (!am->yhat) {
589: PetscCall(VecDuplicate(am->Ax, &am->yhat));
590: PetscCall(VecSet(am->yhat, 0.0));
591: }
592: if (!am->yhatold) {
593: PetscCall(VecDuplicate(am->Ax, &am->yhatold));
594: PetscCall(VecSet(am->yhatold, 0.0));
595: }
596: if (!am->residual) {
597: PetscCall(VecDuplicate(am->Ax, &am->residual));
598: PetscCall(VecSet(am->residual, 0.0));
599: }
600: if (!am->constraint) {
601: am->constraint = NULL;
602: } else {
603: PetscCall(VecGetSize(am->constraint, &M));
604: PetscCheck(M == N, PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "Solution vector and constraint vector must be of same size!");
605: }
607: /* Save changed tao tolerance for adaptive tolerance */
608: if (tao->gatol_changed) am->gatol_admm = tao->gatol;
609: if (tao->catol_changed) am->catol_admm = tao->catol;
611: /*Update spectral and dual elements to X subsolver */
612: PetscCall(TaoSetObjectiveAndGradient(am->subsolverX, NULL, SubObjGradUpdate, tao));
613: PetscCall(TaoSetJacobianEqualityRoutine(am->subsolverX, am->JA, am->JApre, am->ops->misfitjac, am->misfitjacobianP));
614: PetscCall(TaoSetJacobianEqualityRoutine(am->subsolverZ, am->JB, am->JBpre, am->ops->regjac, am->regjacobianP));
615: PetscFunctionReturn(PETSC_SUCCESS);
616: }
618: static PetscErrorCode TaoDestroy_ADMM(Tao tao)
619: {
620: TAO_ADMM *am = (TAO_ADMM *)tao->data;
622: PetscFunctionBegin;
623: PetscCall(VecDestroy(&am->z));
624: PetscCall(VecDestroy(&am->Ax));
625: PetscCall(VecDestroy(&am->Axold));
626: PetscCall(VecDestroy(&am->Bz));
627: PetscCall(VecDestroy(&am->Bzold));
628: PetscCall(VecDestroy(&am->Bz0));
629: PetscCall(VecDestroy(&am->residual));
630: PetscCall(VecDestroy(&am->y));
631: PetscCall(VecDestroy(&am->yold));
632: PetscCall(VecDestroy(&am->y0));
633: PetscCall(VecDestroy(&am->yhat));
634: PetscCall(VecDestroy(&am->yhatold));
635: PetscCall(VecDestroy(&am->workLeft));
636: PetscCall(VecDestroy(&am->workJacobianRight));
637: PetscCall(VecDestroy(&am->workJacobianRight2));
639: PetscCall(MatDestroy(&am->JA));
640: PetscCall(MatDestroy(&am->JB));
641: if (!am->xJI) PetscCall(MatDestroy(&am->JApre));
642: if (!am->zJI) PetscCall(MatDestroy(&am->JBpre));
643: if (am->Hx) {
644: PetscCall(MatDestroy(&am->Hx));
645: PetscCall(MatDestroy(&am->Hxpre));
646: }
647: if (am->Hz) {
648: PetscCall(MatDestroy(&am->Hz));
649: PetscCall(MatDestroy(&am->Hzpre));
650: }
651: PetscCall(MatDestroy(&am->ATA));
652: PetscCall(MatDestroy(&am->BTB));
653: PetscCall(TaoDestroy(&am->subsolverX));
654: PetscCall(TaoDestroy(&am->subsolverZ));
655: am->parent = NULL;
656: PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetRegularizerType_C", NULL));
657: PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetRegularizerType_C", NULL));
658: PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetUpdateType_C", NULL));
659: PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetUpdateType_C", NULL));
660: PetscCall(PetscFree(tao->data));
661: PetscFunctionReturn(PETSC_SUCCESS);
662: }
664: /*MC
666: TAOADMM - Alternating direction method of multipliers method fo solving linear problems with
667: constraints. in a min_x f(x) + g(z) s.t. Ax+Bz=c.
668: This algorithm employs two sub Tao solvers, of which type can be specified
669: by the user. User need to provide ObjectiveAndGradient routine, and/or HessianRoutine for both subsolvers.
670: Hessians can be given boolean flag determining whether they change with respect to a input vector. This can be set via
671: TaoADMMSet{Misfit,Regularizer}HessianChangeStatus.
672: Second subsolver does support TAOSHELL. It should be noted that L1-norm is used for objective value for TAOSHELL type.
673: There is option to set regularizer option, and currently soft-threshold is implemented. For spectral penalty update,
674: currently there are basic option and adaptive option.
675: Constraint is set at Ax+Bz=c, and A and B can be set with TaoADMMSet{Misfit,Regularizer}ConstraintJacobian.
676: c can be set with TaoADMMSetConstraintVectorRHS.
677: The user can also provide regularizer weight for second subsolver.
679: References:
680: . * - Xu, Zheng and Figueiredo, Mario A. T. and Yuan, Xiaoming and Studer, Christoph and Goldstein, Tom
681: "Adaptive Relaxed ADMM: Convergence Theory and Practical Implementation"
682: The IEEE Conference on Computer Vision and Pattern Recognition (CVPR), July, 2017.
684: Options Database Keys:
685: + -tao_admm_regularizer_coefficient - regularizer constant (default 1.e-6)
686: . -tao_admm_spectral_penalty - Constant for Augmented Lagrangian term (default 1.)
687: . -tao_admm_relaxation_parameter - relaxation parameter for Z update (default 1.)
688: . -tao_admm_tolerance_update_factor - ADMM dynamic tolerance update factor (default 1.e-12)
689: . -tao_admm_spectral_penalty_update_factor - ADMM spectral penalty update curvature safeguard value (default 0.2)
690: . -tao_admm_minimum_spectral_penalty - Set ADMM minimum spectral penalty (default 0)
691: . -tao_admm_dual_update - Lagrangian dual update policy ("basic","adaptive","adaptive-relaxed") (default "basic")
692: - -tao_admm_regularizer_type - ADMM regularizer update rule ("user","soft-threshold") (default "soft-threshold")
694: Level: beginner
696: .seealso: `TaoADMMSetMisfitHessianChangeStatus()`, `TaoADMMSetRegHessianChangeStatus()`, `TaoADMMGetSpectralPenalty()`,
697: `TaoADMMGetMisfitSubsolver()`, `TaoADMMGetRegularizationSubsolver()`, `TaoADMMSetConstraintVectorRHS()`,
698: `TaoADMMSetMinimumSpectralPenalty()`, `TaoADMMSetRegularizerCoefficient()`,
699: `TaoADMMSetRegularizerConstraintJacobian()`, `TaoADMMSetMisfitConstraintJacobian()`,
700: `TaoADMMSetMisfitObjectiveAndGradientRoutine()`, `TaoADMMSetMisfitHessianRoutine()`,
701: `TaoADMMSetRegularizerObjectiveAndGradientRoutine()`, `TaoADMMSetRegularizerHessianRoutine()`,
702: `TaoGetADMMParentTao()`, `TaoADMMGetDualVector()`, `TaoADMMSetRegularizerType()`,
703: `TaoADMMGetRegularizerType()`, `TaoADMMSetUpdateType()`, `TaoADMMGetUpdateType()`
704: M*/
706: PETSC_EXTERN PetscErrorCode TaoCreate_ADMM(Tao tao)
707: {
708: TAO_ADMM *am;
710: PetscFunctionBegin;
711: PetscCall(PetscNew(&am));
713: tao->ops->destroy = TaoDestroy_ADMM;
714: tao->ops->setup = TaoSetUp_ADMM;
715: tao->ops->setfromoptions = TaoSetFromOptions_ADMM;
716: tao->ops->view = TaoView_ADMM;
717: tao->ops->solve = TaoSolve_ADMM;
719: tao->data = (void *)am;
720: am->l1epsilon = 1e-6;
721: am->lambda = 1e-4;
722: am->mu = 1.;
723: am->muold = 0.;
724: am->mueps = PETSC_MACHINE_EPSILON;
725: am->mumin = 0.;
726: am->orthval = 0.2;
727: am->T = 2;
728: am->parent = tao;
729: am->update = TAO_ADMM_UPDATE_BASIC;
730: am->regswitch = TAO_ADMM_REGULARIZER_SOFT_THRESH;
731: am->tol = PETSC_SMALL;
732: am->const_norm = 0;
733: am->resnorm = 0;
734: am->dualres = 0;
735: am->ops->regobjgrad = NULL;
736: am->ops->reghess = NULL;
737: am->gamma = 1;
738: am->regobjgradP = NULL;
739: am->reghessP = NULL;
740: am->gatol_admm = 1e-8;
741: am->catol_admm = 0;
742: am->Hxchange = PETSC_TRUE;
743: am->Hzchange = PETSC_TRUE;
744: am->Hzbool = PETSC_TRUE;
745: am->Hxbool = PETSC_TRUE;
747: PetscCall(TaoCreate(PetscObjectComm((PetscObject)tao), &am->subsolverX));
748: PetscCall(TaoSetOptionsPrefix(am->subsolverX, "misfit_"));
749: PetscCall(PetscObjectIncrementTabLevel((PetscObject)am->subsolverX, (PetscObject)tao, 1));
750: PetscCall(TaoCreate(PetscObjectComm((PetscObject)tao), &am->subsolverZ));
751: PetscCall(TaoSetOptionsPrefix(am->subsolverZ, "reg_"));
752: PetscCall(PetscObjectIncrementTabLevel((PetscObject)am->subsolverZ, (PetscObject)tao, 1));
754: PetscCall(TaoSetType(am->subsolverX, TAONLS));
755: PetscCall(TaoSetType(am->subsolverZ, TAONLS));
756: PetscCall(PetscObjectCompose((PetscObject)am->subsolverX, "TaoGetADMMParentTao_ADMM", (PetscObject)tao));
757: PetscCall(PetscObjectCompose((PetscObject)am->subsolverZ, "TaoGetADMMParentTao_ADMM", (PetscObject)tao));
758: PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetRegularizerType_C", TaoADMMSetRegularizerType_ADMM));
759: PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetRegularizerType_C", TaoADMMGetRegularizerType_ADMM));
760: PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetUpdateType_C", TaoADMMSetUpdateType_ADMM));
761: PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetUpdateType_C", TaoADMMGetUpdateType_ADMM));
762: PetscFunctionReturn(PETSC_SUCCESS);
763: }
765: /*@
766: TaoADMMSetMisfitHessianChangeStatus - Set boolean that determines whether Hessian matrix of misfit subsolver changes with respect to input vector.
768: Collective
770: Input Parameters:
771: + tao - the Tao solver context.
772: - b - the Hessian matrix change status boolean, `PETSC_FALSE` when the Hessian matrix does not change, `PETSC_TRUE` otherwise.
774: Level: advanced
776: .seealso: `TAOADMM`
777: @*/
778: PetscErrorCode TaoADMMSetMisfitHessianChangeStatus(Tao tao, PetscBool b)
779: {
780: TAO_ADMM *am = (TAO_ADMM *)tao->data;
782: PetscFunctionBegin;
783: am->Hxchange = b;
784: PetscFunctionReturn(PETSC_SUCCESS);
785: }
787: /*@
788: TaoADMMSetRegHessianChangeStatus - Set boolean that determines whether Hessian matrix of regularization subsolver changes with respect to input vector.
790: Collective
792: Input Parameters:
793: + tao - the `Tao` solver context
794: - b - the Hessian matrix change status boolean, `PETSC_FALSE` when the Hessian matrix does not change, `PETSC_TRUE` otherwise.
796: Level: advanced
798: .seealso: `TAOADMM`
799: @*/
800: PetscErrorCode TaoADMMSetRegHessianChangeStatus(Tao tao, PetscBool b)
801: {
802: TAO_ADMM *am = (TAO_ADMM *)tao->data;
804: PetscFunctionBegin;
805: am->Hzchange = b;
806: PetscFunctionReturn(PETSC_SUCCESS);
807: }
809: /*@
810: TaoADMMSetSpectralPenalty - Set the spectral penalty (mu) value
812: Collective
814: Input Parameters:
815: + tao - the `Tao` solver context
816: - mu - spectral penalty
818: Level: advanced
820: .seealso: `TaoADMMSetMinimumSpectralPenalty()`, `TAOADMM`
821: @*/
822: PetscErrorCode TaoADMMSetSpectralPenalty(Tao tao, PetscReal mu)
823: {
824: TAO_ADMM *am = (TAO_ADMM *)tao->data;
826: PetscFunctionBegin;
827: am->mu = mu;
828: PetscFunctionReturn(PETSC_SUCCESS);
829: }
831: /*@
832: TaoADMMGetSpectralPenalty - Get the spectral penalty (mu) value
834: Collective
836: Input Parameter:
837: . tao - the `Tao` solver context
839: Output Parameter:
840: . mu - spectral penalty
842: Level: advanced
844: .seealso: `TaoADMMSetMinimumSpectralPenalty()`, `TaoADMMSetSpectralPenalty()`, `TAOADMM`
845: @*/
846: PetscErrorCode TaoADMMGetSpectralPenalty(Tao tao, PetscReal *mu)
847: {
848: TAO_ADMM *am = (TAO_ADMM *)tao->data;
850: PetscFunctionBegin;
853: *mu = am->mu;
854: PetscFunctionReturn(PETSC_SUCCESS);
855: }
857: /*@
858: TaoADMMGetMisfitSubsolver - Get the pointer to the misfit subsolver inside `TAOADMM`
860: Collective
862: Input Parameter:
863: . tao - the `Tao` solver context
865: Output Parameter:
866: . misfit - the `Tao` subsolver context
868: Level: advanced
870: .seealso: `TAOADMM`, `Tao`
871: @*/
872: PetscErrorCode TaoADMMGetMisfitSubsolver(Tao tao, Tao *misfit)
873: {
874: TAO_ADMM *am = (TAO_ADMM *)tao->data;
876: PetscFunctionBegin;
877: *misfit = am->subsolverX;
878: PetscFunctionReturn(PETSC_SUCCESS);
879: }
881: /*@
882: TaoADMMGetRegularizationSubsolver - Get the pointer to the regularization subsolver inside `TAOADMM`
884: Collective
886: Input Parameter:
887: . tao - the `Tao` solver context
889: Output Parameter:
890: . reg - the `Tao` subsolver context
892: Level: advanced
894: .seealso: `TAOADMM`, `Tao`
895: @*/
896: PetscErrorCode TaoADMMGetRegularizationSubsolver(Tao tao, Tao *reg)
897: {
898: TAO_ADMM *am = (TAO_ADMM *)tao->data;
900: PetscFunctionBegin;
901: *reg = am->subsolverZ;
902: PetscFunctionReturn(PETSC_SUCCESS);
903: }
905: /*@
906: TaoADMMSetConstraintVectorRHS - Set the RHS constraint vector for `TAOADMM`
908: Collective
910: Input Parameters:
911: + tao - the `Tao` solver context
912: - c - RHS vector
914: Level: advanced
916: .seealso: `TAOADMM`
917: @*/
918: PetscErrorCode TaoADMMSetConstraintVectorRHS(Tao tao, Vec c)
919: {
920: TAO_ADMM *am = (TAO_ADMM *)tao->data;
922: PetscFunctionBegin;
923: am->constraint = c;
924: PetscFunctionReturn(PETSC_SUCCESS);
925: }
927: /*@
928: TaoADMMSetMinimumSpectralPenalty - Set the minimum value for the spectral penalty
930: Collective
932: Input Parameters:
933: + tao - the `Tao` solver context
934: - mu - minimum spectral penalty value
936: Level: advanced
938: .seealso: `TaoADMMGetSpectralPenalty()`, `TAOADMM`
939: @*/
940: PetscErrorCode TaoADMMSetMinimumSpectralPenalty(Tao tao, PetscReal mu)
941: {
942: TAO_ADMM *am = (TAO_ADMM *)tao->data;
944: PetscFunctionBegin;
945: am->mumin = mu;
946: PetscFunctionReturn(PETSC_SUCCESS);
947: }
949: /*@
950: TaoADMMSetRegularizerCoefficient - Set the regularization coefficient lambda for L1 norm regularization case
952: Collective
954: Input Parameters:
955: + tao - the `Tao` solver context
956: - lambda - L1-norm regularizer coefficient
958: Level: advanced
960: .seealso: `TaoADMMSetMisfitConstraintJacobian()`, `TaoADMMSetRegularizerConstraintJacobian()`, `TAOADMM`
961: @*/
962: PetscErrorCode TaoADMMSetRegularizerCoefficient(Tao tao, PetscReal lambda)
963: {
964: TAO_ADMM *am = (TAO_ADMM *)tao->data;
966: PetscFunctionBegin;
967: am->lambda = lambda;
968: PetscFunctionReturn(PETSC_SUCCESS);
969: }
971: /*@C
972: TaoADMMSetMisfitConstraintJacobian - Set the constraint matrix B for the `TAOADMM` algorithm. Matrix B constrains the z variable.
974: Collective
976: Input Parameters:
977: + tao - the Tao solver context
978: . J - user-created regularizer constraint Jacobian matrix
979: . Jpre - user-created regularizer Jacobian constraint matrix for constructing the preconditioner, often this is `J`
980: . func - function pointer for the regularizer constraint Jacobian update function
981: - ctx - user context for the regularizer Hessian
983: Level: advanced
985: .seealso: `TaoADMMSetRegularizerCoefficient()`, `TaoADMMSetRegularizerConstraintJacobian()`, `TAOADMM`
986: @*/
987: PetscErrorCode TaoADMMSetMisfitConstraintJacobian(Tao tao, Mat J, Mat Jpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void *), void *ctx)
988: {
989: TAO_ADMM *am = (TAO_ADMM *)tao->data;
991: PetscFunctionBegin;
993: if (J) {
995: PetscCheckSameComm(tao, 1, J, 2);
996: }
997: if (Jpre) {
999: PetscCheckSameComm(tao, 1, Jpre, 3);
1000: }
1001: if (ctx) am->misfitjacobianP = ctx;
1002: if (func) am->ops->misfitjac = func;
1004: if (J) {
1005: PetscCall(PetscObjectReference((PetscObject)J));
1006: PetscCall(MatDestroy(&am->JA));
1007: am->JA = J;
1008: }
1009: if (Jpre) {
1010: PetscCall(PetscObjectReference((PetscObject)Jpre));
1011: PetscCall(MatDestroy(&am->JApre));
1012: am->JApre = Jpre;
1013: }
1014: PetscFunctionReturn(PETSC_SUCCESS);
1015: }
1017: /*@C
1018: TaoADMMSetRegularizerConstraintJacobian - Set the constraint matrix B for `TAOADMM` algorithm. Matrix B constraints z variable.
1020: Collective
1022: Input Parameters:
1023: + tao - the `Tao` solver context
1024: . J - user-created regularizer constraint Jacobian matrix
1025: . Jpre - user-created regularizer Jacobian constraint matrix for constructing the preconditioner, often this is `J`
1026: . func - function pointer for the regularizer constraint Jacobian update function
1027: - ctx - user context for the regularizer Hessian
1029: Level: advanced
1031: .seealso: `TaoADMMSetRegularizerCoefficient()`, `TaoADMMSetMisfitConstraintJacobian()`, `TAOADMM`
1032: @*/
1033: PetscErrorCode TaoADMMSetRegularizerConstraintJacobian(Tao tao, Mat J, Mat Jpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void *), void *ctx)
1034: {
1035: TAO_ADMM *am = (TAO_ADMM *)tao->data;
1037: PetscFunctionBegin;
1039: if (J) {
1041: PetscCheckSameComm(tao, 1, J, 2);
1042: }
1043: if (Jpre) {
1045: PetscCheckSameComm(tao, 1, Jpre, 3);
1046: }
1047: if (ctx) am->regjacobianP = ctx;
1048: if (func) am->ops->regjac = func;
1050: if (J) {
1051: PetscCall(PetscObjectReference((PetscObject)J));
1052: PetscCall(MatDestroy(&am->JB));
1053: am->JB = J;
1054: }
1055: if (Jpre) {
1056: PetscCall(PetscObjectReference((PetscObject)Jpre));
1057: PetscCall(MatDestroy(&am->JBpre));
1058: am->JBpre = Jpre;
1059: }
1060: PetscFunctionReturn(PETSC_SUCCESS);
1061: }
1063: /*@C
1064: TaoADMMSetMisfitObjectiveAndGradientRoutine - Sets the user-defined misfit call-back function
1066: Collective
1068: Input Parameters:
1069: + tao - the `Tao` context
1070: . func - function pointer for the misfit value and gradient evaluation
1071: - ctx - user context for the misfit
1073: Level: advanced
1075: .seealso: `TAOADMM`
1076: @*/
1077: PetscErrorCode TaoADMMSetMisfitObjectiveAndGradientRoutine(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal *, Vec, void *), void *ctx)
1078: {
1079: TAO_ADMM *am = (TAO_ADMM *)tao->data;
1081: PetscFunctionBegin;
1083: am->misfitobjgradP = ctx;
1084: am->ops->misfitobjgrad = func;
1085: PetscFunctionReturn(PETSC_SUCCESS);
1086: }
1088: /*@C
1089: TaoADMMSetMisfitHessianRoutine - Sets the user-defined misfit Hessian call-back
1090: function into the algorithm, to be used for subsolverX.
1092: Collective
1094: Input Parameters:
1095: + tao - the `Tao` context
1096: . H - user-created matrix for the Hessian of the misfit term
1097: . Hpre - user-created matrix for the preconditioner of Hessian of the misfit term
1098: . func - function pointer for the misfit Hessian evaluation
1099: - ctx - user context for the misfit Hessian
1101: Level: advanced
1103: .seealso: `TAOADMM`
1104: @*/
1105: PetscErrorCode TaoADMMSetMisfitHessianRoutine(Tao tao, Mat H, Mat Hpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void *), void *ctx)
1106: {
1107: TAO_ADMM *am = (TAO_ADMM *)tao->data;
1109: PetscFunctionBegin;
1111: if (H) {
1113: PetscCheckSameComm(tao, 1, H, 2);
1114: }
1115: if (Hpre) {
1117: PetscCheckSameComm(tao, 1, Hpre, 3);
1118: }
1119: if (ctx) am->misfithessP = ctx;
1120: if (func) am->ops->misfithess = func;
1121: if (H) {
1122: PetscCall(PetscObjectReference((PetscObject)H));
1123: PetscCall(MatDestroy(&am->Hx));
1124: am->Hx = H;
1125: }
1126: if (Hpre) {
1127: PetscCall(PetscObjectReference((PetscObject)Hpre));
1128: PetscCall(MatDestroy(&am->Hxpre));
1129: am->Hxpre = Hpre;
1130: }
1131: PetscFunctionReturn(PETSC_SUCCESS);
1132: }
1134: /*@C
1135: TaoADMMSetRegularizerObjectiveAndGradientRoutine - Sets the user-defined regularizer call-back function
1137: Collective
1139: Input Parameters:
1140: + tao - the Tao context
1141: . func - function pointer for the regularizer value and gradient evaluation
1142: - ctx - user context for the regularizer
1144: Level: advanced
1146: .seealso: `TAOADMM`
1147: @*/
1148: PetscErrorCode TaoADMMSetRegularizerObjectiveAndGradientRoutine(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal *, Vec, void *), void *ctx)
1149: {
1150: TAO_ADMM *am = (TAO_ADMM *)tao->data;
1152: PetscFunctionBegin;
1154: am->regobjgradP = ctx;
1155: am->ops->regobjgrad = func;
1156: PetscFunctionReturn(PETSC_SUCCESS);
1157: }
1159: /*@C
1160: TaoADMMSetRegularizerHessianRoutine - Sets the user-defined regularizer Hessian call-back
1161: function, to be used for subsolverZ.
1163: Collective
1165: Input Parameters:
1166: + tao - the `Tao` context
1167: . H - user-created matrix for the Hessian of the regularization term
1168: . Hpre - user-created matrix for the preconditioner of Hessian of the regularization term
1169: . func - function pointer for the regularizer Hessian evaluation
1170: - ctx - user context for the regularizer Hessian
1172: Level: advanced
1174: .seealso: `TAOADMM`
1175: @*/
1176: PetscErrorCode TaoADMMSetRegularizerHessianRoutine(Tao tao, Mat H, Mat Hpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void *), void *ctx)
1177: {
1178: TAO_ADMM *am = (TAO_ADMM *)tao->data;
1180: PetscFunctionBegin;
1182: if (H) {
1184: PetscCheckSameComm(tao, 1, H, 2);
1185: }
1186: if (Hpre) {
1188: PetscCheckSameComm(tao, 1, Hpre, 3);
1189: }
1190: if (ctx) am->reghessP = ctx;
1191: if (func) am->ops->reghess = func;
1192: if (H) {
1193: PetscCall(PetscObjectReference((PetscObject)H));
1194: PetscCall(MatDestroy(&am->Hz));
1195: am->Hz = H;
1196: }
1197: if (Hpre) {
1198: PetscCall(PetscObjectReference((PetscObject)Hpre));
1199: PetscCall(MatDestroy(&am->Hzpre));
1200: am->Hzpre = Hpre;
1201: }
1202: PetscFunctionReturn(PETSC_SUCCESS);
1203: }
1205: /*@
1206: TaoGetADMMParentTao - Gets pointer to parent `TAOADMM`, used by inner subsolver.
1208: Collective
1210: Input Parameter:
1211: . tao - the `Tao` context
1213: Output Parameter:
1214: . admm_tao - the parent `Tao` context
1216: Level: advanced
1218: .seealso: `TAOADMM`
1219: @*/
1220: PetscErrorCode TaoGetADMMParentTao(Tao tao, Tao *admm_tao)
1221: {
1222: PetscFunctionBegin;
1224: PetscCall(PetscObjectQuery((PetscObject)tao, "TaoGetADMMParentTao_ADMM", (PetscObject *)admm_tao));
1225: PetscFunctionReturn(PETSC_SUCCESS);
1226: }
1228: /*@
1229: TaoADMMGetDualVector - Returns the dual vector associated with the current `TAOADMM` state
1231: Not Collective
1233: Input Parameter:
1234: . tao - the `Tao` context
1236: Output Parameter:
1237: . Y - the current solution
1239: Level: intermediate
1241: .seealso: `TAOADMM`
1242: @*/
1243: PetscErrorCode TaoADMMGetDualVector(Tao tao, Vec *Y)
1244: {
1245: TAO_ADMM *am = (TAO_ADMM *)tao->data;
1247: PetscFunctionBegin;
1249: *Y = am->y;
1250: PetscFunctionReturn(PETSC_SUCCESS);
1251: }
1253: /*@
1254: TaoADMMSetRegularizerType - Set regularizer type for `TAOADMM` routine
1256: Not Collective
1258: Input Parameters:
1259: + tao - the `Tao` context
1260: - type - regularizer type
1262: Options Database Key:
1263: . -tao_admm_regularizer_type <admm_regularizer_user,admm_regularizer_soft_thresh> - select the regularizer
1265: Level: intermediate
1267: .seealso: `TaoADMMGetRegularizerType()`, `TaoADMMRegularizerType`, `TAOADMM`
1268: @*/
1269: PetscErrorCode TaoADMMSetRegularizerType(Tao tao, TaoADMMRegularizerType type)
1270: {
1271: PetscFunctionBegin;
1274: PetscTryMethod(tao, "TaoADMMSetRegularizerType_C", (Tao, TaoADMMRegularizerType), (tao, type));
1275: PetscFunctionReturn(PETSC_SUCCESS);
1276: }
1278: /*@
1279: TaoADMMGetRegularizerType - Gets the type of regularizer routine for `TAOADMM`
1281: Not Collective
1283: Input Parameter:
1284: . tao - the `Tao` context
1286: Output Parameter:
1287: . type - the type of regularizer
1289: Level: intermediate
1291: .seealso: `TaoADMMSetRegularizerType()`, `TaoADMMRegularizerType`, `TAOADMM`
1292: @*/
1293: PetscErrorCode TaoADMMGetRegularizerType(Tao tao, TaoADMMRegularizerType *type)
1294: {
1295: PetscFunctionBegin;
1297: PetscUseMethod(tao, "TaoADMMGetRegularizerType_C", (Tao, TaoADMMRegularizerType *), (tao, type));
1298: PetscFunctionReturn(PETSC_SUCCESS);
1299: }
1301: /*@
1302: TaoADMMSetUpdateType - Set update routine for `TAOADMM` routine
1304: Not Collective
1306: Input Parameters:
1307: + tao - the `Tao` context
1308: - type - spectral parameter update type
1310: Level: intermediate
1312: .seealso: `TaoADMMGetUpdateType()`, `TaoADMMUpdateType`, `TAOADMM`
1313: @*/
1314: PetscErrorCode TaoADMMSetUpdateType(Tao tao, TaoADMMUpdateType type)
1315: {
1316: PetscFunctionBegin;
1319: PetscTryMethod(tao, "TaoADMMSetUpdateType_C", (Tao, TaoADMMUpdateType), (tao, type));
1320: PetscFunctionReturn(PETSC_SUCCESS);
1321: }
1323: /*@
1324: TaoADMMGetUpdateType - Gets the type of spectral penalty update routine for `TAOADMM`
1326: Not Collective
1328: Input Parameter:
1329: . tao - the `Tao` context
1331: Output Parameter:
1332: . type - the type of spectral penalty update routine
1334: Level: intermediate
1336: .seealso: `TaoADMMSetUpdateType()`, `TaoADMMUpdateType`, `TAOADMM`
1337: @*/
1338: PetscErrorCode TaoADMMGetUpdateType(Tao tao, TaoADMMUpdateType *type)
1339: {
1340: PetscFunctionBegin;
1342: PetscUseMethod(tao, "TaoADMMGetUpdateType_C", (Tao, TaoADMMUpdateType *), (tao, type));
1343: PetscFunctionReturn(PETSC_SUCCESS);
1344: }