Actual source code: bnk.c
1: #include <petsctaolinesearch.h>
2: #include <../src/tao/bound/impls/bnk/bnk.h>
3: #include <petscksp.h>
5: static const char *BNK_INIT[64] = {"constant", "direction", "interpolation"};
6: static const char *BNK_UPDATE[64] = {"step", "reduction", "interpolation"};
7: static const char *BNK_AS[64] = {"none", "bertsekas"};
9: /*------------------------------------------------------------*/
11: /* Routine for initializing the KSP solver, the BFGS preconditioner, and the initial trust radius estimation */
13: PetscErrorCode TaoBNKInitialize(Tao tao, PetscInt initType, PetscBool *needH)
14: {
15: TAO_BNK *bnk = (TAO_BNK *)tao->data;
16: PC pc;
17: PetscReal f_min, ftrial, prered, actred, kappa, sigma, resnorm;
18: PetscReal tau, tau_1, tau_2, tau_max, tau_min, max_radius;
19: PetscBool is_bfgs, is_jacobi, is_symmetric, sym_set;
20: PetscInt n, N, nDiff;
21: PetscInt i_max = 5;
22: PetscInt j_max = 1;
23: PetscInt i, j;
24: PetscVoidFunction kspTR;
26: /* Project the current point onto the feasible set */
27: TaoComputeVariableBounds(tao);
28: TaoSetVariableBounds(bnk->bncg, tao->XL, tao->XU);
29: if (tao->bounded) TaoLineSearchSetVariableBounds(tao->linesearch, tao->XL, tao->XU);
31: /* Project the initial point onto the feasible region */
32: TaoBoundSolution(tao->solution, tao->XL, tao->XU, 0.0, &nDiff, tao->solution);
34: /* Check convergence criteria */
35: TaoComputeObjectiveAndGradient(tao, tao->solution, &bnk->f, bnk->unprojected_gradient);
36: TaoBNKEstimateActiveSet(tao, bnk->as_type);
37: VecCopy(bnk->unprojected_gradient, tao->gradient);
38: VecISSet(tao->gradient, bnk->active_idx, 0.0);
39: TaoGradientNorm(tao, tao->gradient, NORM_2, &bnk->gnorm);
41: /* Test the initial point for convergence */
42: VecFischer(tao->solution, bnk->unprojected_gradient, tao->XL, tao->XU, bnk->W);
43: VecNorm(bnk->W, NORM_2, &resnorm);
45: TaoLogConvergenceHistory(tao, bnk->f, resnorm, 0.0, tao->ksp_its);
46: TaoMonitor(tao, tao->niter, bnk->f, resnorm, 0.0, 1.0);
47: PetscUseTypeMethod(tao, convergencetest, tao->cnvP);
48: if (tao->reason != TAO_CONTINUE_ITERATING) return 0;
50: /* Reset KSP stopping reason counters */
51: bnk->ksp_atol = 0;
52: bnk->ksp_rtol = 0;
53: bnk->ksp_dtol = 0;
54: bnk->ksp_ctol = 0;
55: bnk->ksp_negc = 0;
56: bnk->ksp_iter = 0;
57: bnk->ksp_othr = 0;
59: /* Reset accepted step type counters */
60: bnk->tot_cg_its = 0;
61: bnk->newt = 0;
62: bnk->bfgs = 0;
63: bnk->sgrad = 0;
64: bnk->grad = 0;
66: /* Initialize the Hessian perturbation */
67: bnk->pert = bnk->sval;
69: /* Reset initial steplength to zero (this helps BNCG reset its direction internally) */
70: VecSet(tao->stepdirection, 0.0);
72: /* Allocate the vectors needed for the BFGS approximation */
73: KSPGetPC(tao->ksp, &pc);
74: PetscObjectTypeCompare((PetscObject)pc, PCLMVM, &is_bfgs);
75: PetscObjectTypeCompare((PetscObject)pc, PCJACOBI, &is_jacobi);
76: if (is_bfgs) {
77: bnk->bfgs_pre = pc;
78: PCLMVMGetMatLMVM(bnk->bfgs_pre, &bnk->M);
79: VecGetLocalSize(tao->solution, &n);
80: VecGetSize(tao->solution, &N);
81: MatSetSizes(bnk->M, n, n, N, N);
82: MatLMVMAllocate(bnk->M, tao->solution, bnk->unprojected_gradient);
83: MatIsSymmetricKnown(bnk->M, &sym_set, &is_symmetric);
85: } else if (is_jacobi) PCJacobiSetUseAbs(pc, PETSC_TRUE);
87: /* Prepare the min/max vectors for safeguarding diagonal scales */
88: VecSet(bnk->Diag_min, bnk->dmin);
89: VecSet(bnk->Diag_max, bnk->dmax);
91: /* Initialize trust-region radius. The initialization is only performed
92: when we are using Nash, Steihaug-Toint or the Generalized Lanczos method. */
93: *needH = PETSC_TRUE;
94: PetscObjectQueryFunction((PetscObject)tao->ksp, "KSPCGSetRadius_C", &kspTR);
95: if (kspTR) {
96: switch (initType) {
97: case BNK_INIT_CONSTANT:
98: /* Use the initial radius specified */
99: tao->trust = tao->trust0;
100: break;
102: case BNK_INIT_INTERPOLATION:
103: /* Use interpolation based on the initial Hessian */
104: max_radius = 0.0;
105: tao->trust = tao->trust0;
106: for (j = 0; j < j_max; ++j) {
107: f_min = bnk->f;
108: sigma = 0.0;
110: if (*needH) {
111: /* Compute the Hessian at the new step, and extract the inactive subsystem */
112: (*bnk->computehessian)(tao);
113: TaoBNKEstimateActiveSet(tao, BNK_AS_NONE);
114: MatDestroy(&bnk->H_inactive);
115: if (bnk->active_idx) {
116: MatCreateSubMatrix(tao->hessian, bnk->inactive_idx, bnk->inactive_idx, MAT_INITIAL_MATRIX, &bnk->H_inactive);
117: } else {
118: PetscObjectReference((PetscObject)tao->hessian);
119: bnk->H_inactive = tao->hessian;
120: }
121: *needH = PETSC_FALSE;
122: }
124: for (i = 0; i < i_max; ++i) {
125: /* Take a steepest descent step and snap it to bounds */
126: VecCopy(tao->solution, bnk->Xold);
127: VecAXPY(tao->solution, -tao->trust / bnk->gnorm, tao->gradient);
128: TaoBoundSolution(tao->solution, tao->XL, tao->XU, 0.0, &nDiff, tao->solution);
129: /* Compute the step we actually accepted */
130: VecCopy(tao->solution, bnk->W);
131: VecAXPY(bnk->W, -1.0, bnk->Xold);
132: /* Compute the objective at the trial */
133: TaoComputeObjective(tao, tao->solution, &ftrial);
135: VecCopy(bnk->Xold, tao->solution);
136: if (PetscIsInfOrNanReal(ftrial)) {
137: tau = bnk->gamma1_i;
138: } else {
139: if (ftrial < f_min) {
140: f_min = ftrial;
141: sigma = -tao->trust / bnk->gnorm;
142: }
144: /* Compute the predicted and actual reduction */
145: if (bnk->active_idx) {
146: VecGetSubVector(bnk->W, bnk->inactive_idx, &bnk->X_inactive);
147: VecGetSubVector(bnk->Xwork, bnk->inactive_idx, &bnk->inactive_work);
148: } else {
149: bnk->X_inactive = bnk->W;
150: bnk->inactive_work = bnk->Xwork;
151: }
152: MatMult(bnk->H_inactive, bnk->X_inactive, bnk->inactive_work);
153: VecDot(bnk->X_inactive, bnk->inactive_work, &prered);
154: if (bnk->active_idx) {
155: VecRestoreSubVector(bnk->W, bnk->inactive_idx, &bnk->X_inactive);
156: VecRestoreSubVector(bnk->Xwork, bnk->inactive_idx, &bnk->inactive_work);
157: }
158: prered = tao->trust * (bnk->gnorm - 0.5 * tao->trust * prered / (bnk->gnorm * bnk->gnorm));
159: actred = bnk->f - ftrial;
160: if ((PetscAbsScalar(actred) <= bnk->epsilon) && (PetscAbsScalar(prered) <= bnk->epsilon)) {
161: kappa = 1.0;
162: } else {
163: kappa = actred / prered;
164: }
166: tau_1 = bnk->theta_i * bnk->gnorm * tao->trust / (bnk->theta_i * bnk->gnorm * tao->trust + (1.0 - bnk->theta_i) * prered - actred);
167: tau_2 = bnk->theta_i * bnk->gnorm * tao->trust / (bnk->theta_i * bnk->gnorm * tao->trust - (1.0 + bnk->theta_i) * prered + actred);
168: tau_min = PetscMin(tau_1, tau_2);
169: tau_max = PetscMax(tau_1, tau_2);
171: if (PetscAbsScalar(kappa - (PetscReal)1.0) <= bnk->mu1_i) {
172: /* Great agreement */
173: max_radius = PetscMax(max_radius, tao->trust);
175: if (tau_max < 1.0) {
176: tau = bnk->gamma3_i;
177: } else if (tau_max > bnk->gamma4_i) {
178: tau = bnk->gamma4_i;
179: } else {
180: tau = tau_max;
181: }
182: } else if (PetscAbsScalar(kappa - (PetscReal)1.0) <= bnk->mu2_i) {
183: /* Good agreement */
184: max_radius = PetscMax(max_radius, tao->trust);
186: if (tau_max < bnk->gamma2_i) {
187: tau = bnk->gamma2_i;
188: } else if (tau_max > bnk->gamma3_i) {
189: tau = bnk->gamma3_i;
190: } else {
191: tau = tau_max;
192: }
193: } else {
194: /* Not good agreement */
195: if (tau_min > 1.0) {
196: tau = bnk->gamma2_i;
197: } else if (tau_max < bnk->gamma1_i) {
198: tau = bnk->gamma1_i;
199: } else if ((tau_min < bnk->gamma1_i) && (tau_max >= 1.0)) {
200: tau = bnk->gamma1_i;
201: } else if ((tau_1 >= bnk->gamma1_i) && (tau_1 < 1.0) && ((tau_2 < bnk->gamma1_i) || (tau_2 >= 1.0))) {
202: tau = tau_1;
203: } else if ((tau_2 >= bnk->gamma1_i) && (tau_2 < 1.0) && ((tau_1 < bnk->gamma1_i) || (tau_2 >= 1.0))) {
204: tau = tau_2;
205: } else {
206: tau = tau_max;
207: }
208: }
209: }
210: tao->trust = tau * tao->trust;
211: }
213: if (f_min < bnk->f) {
214: /* We accidentally found a solution better than the initial, so accept it */
215: bnk->f = f_min;
216: VecCopy(tao->solution, bnk->Xold);
217: VecAXPY(tao->solution, sigma, tao->gradient);
218: TaoBoundSolution(tao->solution, tao->XL, tao->XU, 0.0, &nDiff, tao->solution);
219: VecCopy(tao->solution, tao->stepdirection);
220: VecAXPY(tao->stepdirection, -1.0, bnk->Xold);
221: TaoComputeGradient(tao, tao->solution, bnk->unprojected_gradient);
222: TaoBNKEstimateActiveSet(tao, bnk->as_type);
223: VecCopy(bnk->unprojected_gradient, tao->gradient);
224: VecISSet(tao->gradient, bnk->active_idx, 0.0);
225: /* Compute gradient at the new iterate and flip switch to compute the Hessian later */
226: TaoGradientNorm(tao, tao->gradient, NORM_2, &bnk->gnorm);
227: *needH = PETSC_TRUE;
228: /* Test the new step for convergence */
229: VecFischer(tao->solution, bnk->unprojected_gradient, tao->XL, tao->XU, bnk->W);
230: VecNorm(bnk->W, NORM_2, &resnorm);
232: TaoLogConvergenceHistory(tao, bnk->f, resnorm, 0.0, tao->ksp_its);
233: TaoMonitor(tao, tao->niter, bnk->f, resnorm, 0.0, 1.0);
234: PetscUseTypeMethod(tao, convergencetest, tao->cnvP);
235: if (tao->reason != TAO_CONTINUE_ITERATING) return 0;
236: /* active BNCG recycling early because we have a stepdirection computed */
237: TaoSetRecycleHistory(bnk->bncg, PETSC_TRUE);
238: }
239: }
240: tao->trust = PetscMax(tao->trust, max_radius);
242: /* Ensure that the trust radius is within the limits */
243: tao->trust = PetscMax(tao->trust, bnk->min_radius);
244: tao->trust = PetscMin(tao->trust, bnk->max_radius);
245: break;
247: default:
248: /* Norm of the first direction will initialize radius */
249: tao->trust = 0.0;
250: break;
251: }
252: }
253: return 0;
254: }
256: /*------------------------------------------------------------*/
258: /* Routine for computing the exact Hessian and preparing the preconditioner at the new iterate */
260: PetscErrorCode TaoBNKComputeHessian(Tao tao)
261: {
262: TAO_BNK *bnk = (TAO_BNK *)tao->data;
264: /* Compute the Hessian */
265: TaoComputeHessian(tao, tao->solution, tao->hessian, tao->hessian_pre);
266: /* Add a correction to the BFGS preconditioner */
267: if (bnk->M) MatLMVMUpdate(bnk->M, tao->solution, bnk->unprojected_gradient);
268: /* Prepare the reduced sub-matrices for the inactive set */
269: MatDestroy(&bnk->Hpre_inactive);
270: MatDestroy(&bnk->H_inactive);
271: if (bnk->active_idx) {
272: MatCreateSubMatrix(tao->hessian, bnk->inactive_idx, bnk->inactive_idx, MAT_INITIAL_MATRIX, &bnk->H_inactive);
273: if (tao->hessian == tao->hessian_pre) {
274: PetscObjectReference((PetscObject)bnk->H_inactive);
275: bnk->Hpre_inactive = bnk->H_inactive;
276: } else {
277: MatCreateSubMatrix(tao->hessian_pre, bnk->inactive_idx, bnk->inactive_idx, MAT_INITIAL_MATRIX, &bnk->Hpre_inactive);
278: }
279: if (bnk->bfgs_pre) PCLMVMSetIS(bnk->bfgs_pre, bnk->inactive_idx);
280: } else {
281: PetscObjectReference((PetscObject)tao->hessian);
282: bnk->H_inactive = tao->hessian;
283: if (tao->hessian == tao->hessian_pre) {
284: PetscObjectReference((PetscObject)bnk->H_inactive);
285: bnk->Hpre_inactive = bnk->H_inactive;
286: } else {
287: PetscObjectReference((PetscObject)tao->hessian_pre);
288: bnk->Hpre_inactive = tao->hessian_pre;
289: }
290: if (bnk->bfgs_pre) PCLMVMClearIS(bnk->bfgs_pre);
291: }
292: return 0;
293: }
295: /*------------------------------------------------------------*/
297: /* Routine for estimating the active set */
299: PetscErrorCode TaoBNKEstimateActiveSet(Tao tao, PetscInt asType)
300: {
301: TAO_BNK *bnk = (TAO_BNK *)tao->data;
302: PetscBool hessComputed, diagExists, hadactive;
304: hadactive = bnk->active_idx ? PETSC_TRUE : PETSC_FALSE;
305: switch (asType) {
306: case BNK_AS_NONE:
307: ISDestroy(&bnk->inactive_idx);
308: VecWhichInactive(tao->XL, tao->solution, bnk->unprojected_gradient, tao->XU, PETSC_TRUE, &bnk->inactive_idx);
309: ISDestroy(&bnk->active_idx);
310: ISComplementVec(bnk->inactive_idx, tao->solution, &bnk->active_idx);
311: break;
313: case BNK_AS_BERTSEKAS:
314: /* Compute the trial step vector with which we will estimate the active set at the next iteration */
315: if (bnk->M) {
316: /* If the BFGS preconditioner matrix is available, we will construct a trial step with it */
317: MatSolve(bnk->M, bnk->unprojected_gradient, bnk->W);
318: } else {
319: hessComputed = diagExists = PETSC_FALSE;
320: if (tao->hessian) MatAssembled(tao->hessian, &hessComputed);
321: if (hessComputed) MatHasOperation(tao->hessian, MATOP_GET_DIAGONAL, &diagExists);
322: if (diagExists) {
323: /* BFGS preconditioner doesn't exist so let's invert the absolute diagonal of the Hessian instead onto the gradient */
324: MatGetDiagonal(tao->hessian, bnk->Xwork);
325: VecAbs(bnk->Xwork);
326: VecMedian(bnk->Diag_min, bnk->Xwork, bnk->Diag_max, bnk->Xwork);
327: VecReciprocal(bnk->Xwork);
328: VecPointwiseMult(bnk->W, bnk->Xwork, bnk->unprojected_gradient);
329: } else {
330: /* If the Hessian or its diagonal does not exist, we will simply use gradient step */
331: VecCopy(bnk->unprojected_gradient, bnk->W);
332: }
333: }
334: VecScale(bnk->W, -1.0);
335: TaoEstimateActiveBounds(tao->solution, tao->XL, tao->XU, bnk->unprojected_gradient, bnk->W, bnk->Xwork, bnk->as_step, &bnk->as_tol, &bnk->active_lower, &bnk->active_upper, &bnk->active_fixed, &bnk->active_idx, &bnk->inactive_idx);
336: break;
338: default:
339: break;
340: }
341: bnk->resetksp = (PetscBool)(bnk->active_idx || hadactive); /* inactive Hessian size may have changed, need to reset operators */
342: return 0;
343: }
345: /*------------------------------------------------------------*/
347: /* Routine for bounding the step direction */
349: PetscErrorCode TaoBNKBoundStep(Tao tao, PetscInt asType, Vec step)
350: {
351: TAO_BNK *bnk = (TAO_BNK *)tao->data;
353: switch (asType) {
354: case BNK_AS_NONE:
355: VecISSet(step, bnk->active_idx, 0.0);
356: break;
358: case BNK_AS_BERTSEKAS:
359: TaoBoundStep(tao->solution, tao->XL, tao->XU, bnk->active_lower, bnk->active_upper, bnk->active_fixed, 1.0, step);
360: break;
362: default:
363: break;
364: }
365: return 0;
366: }
368: /*------------------------------------------------------------*/
370: /* Routine for taking a finite number of BNCG iterations to
371: accelerate Newton convergence.
373: In practice, this approach simply trades off Hessian evaluations
374: for more gradient evaluations.
375: */
377: PetscErrorCode TaoBNKTakeCGSteps(Tao tao, PetscBool *terminate)
378: {
379: TAO_BNK *bnk = (TAO_BNK *)tao->data;
381: *terminate = PETSC_FALSE;
382: if (bnk->max_cg_its > 0) {
383: /* Copy the current function value (important vectors are already shared) */
384: bnk->bncg_ctx->f = bnk->f;
385: /* Take some small finite number of BNCG iterations */
386: TaoSolve(bnk->bncg);
387: /* Add the number of gradient and function evaluations to the total */
388: tao->nfuncs += bnk->bncg->nfuncs;
389: tao->nfuncgrads += bnk->bncg->nfuncgrads;
390: tao->ngrads += bnk->bncg->ngrads;
391: tao->nhess += bnk->bncg->nhess;
392: bnk->tot_cg_its += bnk->bncg->niter;
393: /* Extract the BNCG function value out and save it into BNK */
394: bnk->f = bnk->bncg_ctx->f;
395: if (bnk->bncg->reason == TAO_CONVERGED_GATOL || bnk->bncg->reason == TAO_CONVERGED_GRTOL || bnk->bncg->reason == TAO_CONVERGED_GTTOL || bnk->bncg->reason == TAO_CONVERGED_MINF) {
396: *terminate = PETSC_TRUE;
397: } else {
398: TaoBNKEstimateActiveSet(tao, bnk->as_type);
399: }
400: }
401: return 0;
402: }
404: /*------------------------------------------------------------*/
406: /* Routine for computing the Newton step. */
408: PetscErrorCode TaoBNKComputeStep(Tao tao, PetscBool shift, KSPConvergedReason *ksp_reason, PetscInt *step_type)
409: {
410: TAO_BNK *bnk = (TAO_BNK *)tao->data;
411: PetscInt bfgsUpdates = 0;
412: PetscInt kspits;
413: PetscBool is_lmvm;
414: PetscVoidFunction kspTR;
416: /* If there are no inactive variables left, save some computation and return an adjusted zero step
417: that has (l-x) and (u-x) for lower and upper bounded variables. */
418: if (!bnk->inactive_idx) {
419: VecSet(tao->stepdirection, 0.0);
420: TaoBNKBoundStep(tao, bnk->as_type, tao->stepdirection);
421: return 0;
422: }
424: /* Shift the reduced Hessian matrix */
425: if (shift && bnk->pert > 0) {
426: PetscObjectTypeCompare((PetscObject)tao->hessian, MATLMVM, &is_lmvm);
427: if (is_lmvm) {
428: MatShift(tao->hessian, bnk->pert);
429: } else {
430: MatShift(bnk->H_inactive, bnk->pert);
431: if (bnk->H_inactive != bnk->Hpre_inactive) MatShift(bnk->Hpre_inactive, bnk->pert);
432: }
433: }
435: /* Solve the Newton system of equations */
436: tao->ksp_its = 0;
437: VecSet(tao->stepdirection, 0.0);
438: if (bnk->resetksp) {
439: KSPReset(tao->ksp);
440: KSPResetFromOptions(tao->ksp);
441: bnk->resetksp = PETSC_FALSE;
442: }
443: KSPSetOperators(tao->ksp, bnk->H_inactive, bnk->Hpre_inactive);
444: VecCopy(bnk->unprojected_gradient, bnk->Gwork);
445: if (bnk->active_idx) {
446: VecGetSubVector(bnk->Gwork, bnk->inactive_idx, &bnk->G_inactive);
447: VecGetSubVector(tao->stepdirection, bnk->inactive_idx, &bnk->X_inactive);
448: } else {
449: bnk->G_inactive = bnk->unprojected_gradient;
450: bnk->X_inactive = tao->stepdirection;
451: }
452: KSPCGSetRadius(tao->ksp, tao->trust);
453: KSPSolve(tao->ksp, bnk->G_inactive, bnk->X_inactive);
454: KSPGetIterationNumber(tao->ksp, &kspits);
455: tao->ksp_its += kspits;
456: tao->ksp_tot_its += kspits;
457: PetscObjectQueryFunction((PetscObject)tao->ksp, "KSPCGGetNormD_C", &kspTR);
458: if (kspTR) {
459: KSPCGGetNormD(tao->ksp, &bnk->dnorm);
461: if (0.0 == tao->trust) {
462: /* Radius was uninitialized; use the norm of the direction */
463: if (bnk->dnorm > 0.0) {
464: tao->trust = bnk->dnorm;
466: /* Modify the radius if it is too large or small */
467: tao->trust = PetscMax(tao->trust, bnk->min_radius);
468: tao->trust = PetscMin(tao->trust, bnk->max_radius);
469: } else {
470: /* The direction was bad; set radius to default value and re-solve
471: the trust-region subproblem to get a direction */
472: tao->trust = tao->trust0;
474: /* Modify the radius if it is too large or small */
475: tao->trust = PetscMax(tao->trust, bnk->min_radius);
476: tao->trust = PetscMin(tao->trust, bnk->max_radius);
478: KSPCGSetRadius(tao->ksp, tao->trust);
479: KSPSolve(tao->ksp, bnk->G_inactive, bnk->X_inactive);
480: KSPGetIterationNumber(tao->ksp, &kspits);
481: tao->ksp_its += kspits;
482: tao->ksp_tot_its += kspits;
483: KSPCGGetNormD(tao->ksp, &bnk->dnorm);
486: }
487: }
488: }
489: /* Restore sub vectors back */
490: if (bnk->active_idx) {
491: VecRestoreSubVector(bnk->Gwork, bnk->inactive_idx, &bnk->G_inactive);
492: VecRestoreSubVector(tao->stepdirection, bnk->inactive_idx, &bnk->X_inactive);
493: }
494: /* Make sure the safeguarded fall-back step is zero for actively bounded variables */
495: VecScale(tao->stepdirection, -1.0);
496: TaoBNKBoundStep(tao, bnk->as_type, tao->stepdirection);
498: /* Record convergence reasons */
499: KSPGetConvergedReason(tao->ksp, ksp_reason);
500: if (KSP_CONVERGED_ATOL == *ksp_reason) {
501: ++bnk->ksp_atol;
502: } else if (KSP_CONVERGED_RTOL == *ksp_reason) {
503: ++bnk->ksp_rtol;
504: } else if (KSP_CONVERGED_CG_CONSTRAINED == *ksp_reason) {
505: ++bnk->ksp_ctol;
506: } else if (KSP_CONVERGED_CG_NEG_CURVE == *ksp_reason) {
507: ++bnk->ksp_negc;
508: } else if (KSP_DIVERGED_DTOL == *ksp_reason) {
509: ++bnk->ksp_dtol;
510: } else if (KSP_DIVERGED_ITS == *ksp_reason) {
511: ++bnk->ksp_iter;
512: } else {
513: ++bnk->ksp_othr;
514: }
516: /* Make sure the BFGS preconditioner is healthy */
517: if (bnk->M) {
518: MatLMVMGetUpdateCount(bnk->M, &bfgsUpdates);
519: if ((KSP_DIVERGED_INDEFINITE_PC == *ksp_reason) && (bfgsUpdates > 0)) {
520: /* Preconditioner is numerically indefinite; reset the approximation. */
521: MatLMVMReset(bnk->M, PETSC_FALSE);
522: MatLMVMUpdate(bnk->M, tao->solution, bnk->unprojected_gradient);
523: }
524: }
525: *step_type = BNK_NEWTON;
526: return 0;
527: }
529: /*------------------------------------------------------------*/
531: /* Routine for recomputing the predicted reduction for a given step vector */
533: PetscErrorCode TaoBNKRecomputePred(Tao tao, Vec S, PetscReal *prered)
534: {
535: TAO_BNK *bnk = (TAO_BNK *)tao->data;
537: /* Extract subvectors associated with the inactive set */
538: if (bnk->active_idx) {
539: VecGetSubVector(tao->stepdirection, bnk->inactive_idx, &bnk->X_inactive);
540: VecGetSubVector(bnk->Xwork, bnk->inactive_idx, &bnk->inactive_work);
541: VecGetSubVector(bnk->Gwork, bnk->inactive_idx, &bnk->G_inactive);
542: } else {
543: bnk->X_inactive = tao->stepdirection;
544: bnk->inactive_work = bnk->Xwork;
545: bnk->G_inactive = bnk->Gwork;
546: }
547: /* Recompute the predicted decrease based on the quadratic model */
548: MatMult(bnk->H_inactive, bnk->X_inactive, bnk->inactive_work);
549: VecAYPX(bnk->inactive_work, -0.5, bnk->G_inactive);
550: VecDot(bnk->inactive_work, bnk->X_inactive, prered);
551: /* Restore the sub vectors */
552: if (bnk->active_idx) {
553: VecRestoreSubVector(tao->stepdirection, bnk->inactive_idx, &bnk->X_inactive);
554: VecRestoreSubVector(bnk->Xwork, bnk->inactive_idx, &bnk->inactive_work);
555: VecRestoreSubVector(bnk->Gwork, bnk->inactive_idx, &bnk->G_inactive);
556: }
557: return 0;
558: }
560: /*------------------------------------------------------------*/
562: /* Routine for ensuring that the Newton step is a descent direction.
564: The step direction falls back onto BFGS, scaled gradient and gradient steps
565: in the event that the Newton step fails the test.
566: */
568: PetscErrorCode TaoBNKSafeguardStep(Tao tao, KSPConvergedReason ksp_reason, PetscInt *stepType)
569: {
570: TAO_BNK *bnk = (TAO_BNK *)tao->data;
571: PetscReal gdx, e_min;
572: PetscInt bfgsUpdates;
574: switch (*stepType) {
575: case BNK_NEWTON:
576: VecDot(tao->stepdirection, tao->gradient, &gdx);
577: if ((gdx >= 0.0) || PetscIsInfOrNanReal(gdx)) {
578: /* Newton step is not descent or direction produced Inf or NaN
579: Update the perturbation for next time */
580: if (bnk->pert <= 0.0) {
581: PetscBool is_gltr;
583: /* Initialize the perturbation */
584: bnk->pert = PetscMin(bnk->imax, PetscMax(bnk->imin, bnk->imfac * bnk->gnorm));
585: PetscObjectTypeCompare((PetscObject)(tao->ksp), KSPGLTR, &is_gltr);
586: if (is_gltr) {
587: KSPGLTRGetMinEig(tao->ksp, &e_min);
588: bnk->pert = PetscMax(bnk->pert, -e_min);
589: }
590: } else {
591: /* Increase the perturbation */
592: bnk->pert = PetscMin(bnk->pmax, PetscMax(bnk->pgfac * bnk->pert, bnk->pmgfac * bnk->gnorm));
593: }
595: if (!bnk->M) {
596: /* We don't have the bfgs matrix around and updated
597: Must use gradient direction in this case */
598: VecCopy(tao->gradient, tao->stepdirection);
599: *stepType = BNK_GRADIENT;
600: } else {
601: /* Attempt to use the BFGS direction */
602: MatSolve(bnk->M, bnk->unprojected_gradient, tao->stepdirection);
604: /* Check for success (descent direction)
605: NOTE: Negative gdx here means not a descent direction because
606: the fall-back step is missing a negative sign. */
607: VecDot(tao->gradient, tao->stepdirection, &gdx);
608: if ((gdx <= 0.0) || PetscIsInfOrNanReal(gdx)) {
609: /* BFGS direction is not descent or direction produced not a number
610: We can assert bfgsUpdates > 1 in this case because
611: the first solve produces the scaled gradient direction,
612: which is guaranteed to be descent */
614: /* Use steepest descent direction (scaled) */
615: MatLMVMReset(bnk->M, PETSC_FALSE);
616: MatLMVMUpdate(bnk->M, tao->solution, bnk->unprojected_gradient);
617: MatSolve(bnk->M, bnk->unprojected_gradient, tao->stepdirection);
619: *stepType = BNK_SCALED_GRADIENT;
620: } else {
621: MatLMVMGetUpdateCount(bnk->M, &bfgsUpdates);
622: if (1 == bfgsUpdates) {
623: /* The first BFGS direction is always the scaled gradient */
624: *stepType = BNK_SCALED_GRADIENT;
625: } else {
626: *stepType = BNK_BFGS;
627: }
628: }
629: }
630: /* Make sure the safeguarded fall-back step is zero for actively bounded variables */
631: VecScale(tao->stepdirection, -1.0);
632: TaoBNKBoundStep(tao, bnk->as_type, tao->stepdirection);
633: } else {
634: /* Computed Newton step is descent */
635: switch (ksp_reason) {
636: case KSP_DIVERGED_NANORINF:
637: case KSP_DIVERGED_BREAKDOWN:
638: case KSP_DIVERGED_INDEFINITE_MAT:
639: case KSP_DIVERGED_INDEFINITE_PC:
640: case KSP_CONVERGED_CG_NEG_CURVE:
641: /* Matrix or preconditioner is indefinite; increase perturbation */
642: if (bnk->pert <= 0.0) {
643: PetscBool is_gltr;
645: /* Initialize the perturbation */
646: bnk->pert = PetscMin(bnk->imax, PetscMax(bnk->imin, bnk->imfac * bnk->gnorm));
647: PetscObjectTypeCompare((PetscObject)(tao->ksp), KSPGLTR, &is_gltr);
648: if (is_gltr) {
649: KSPGLTRGetMinEig(tao->ksp, &e_min);
650: bnk->pert = PetscMax(bnk->pert, -e_min);
651: }
652: } else {
653: /* Increase the perturbation */
654: bnk->pert = PetscMin(bnk->pmax, PetscMax(bnk->pgfac * bnk->pert, bnk->pmgfac * bnk->gnorm));
655: }
656: break;
658: default:
659: /* Newton step computation is good; decrease perturbation */
660: bnk->pert = PetscMin(bnk->psfac * bnk->pert, bnk->pmsfac * bnk->gnorm);
661: if (bnk->pert < bnk->pmin) bnk->pert = 0.0;
662: break;
663: }
664: *stepType = BNK_NEWTON;
665: }
666: break;
668: case BNK_BFGS:
669: /* Check for success (descent direction) */
670: VecDot(tao->stepdirection, tao->gradient, &gdx);
671: if (gdx >= 0 || PetscIsInfOrNanReal(gdx)) {
672: /* Step is not descent or solve was not successful
673: Use steepest descent direction (scaled) */
674: MatLMVMReset(bnk->M, PETSC_FALSE);
675: MatLMVMUpdate(bnk->M, tao->solution, bnk->unprojected_gradient);
676: MatSolve(bnk->M, tao->gradient, tao->stepdirection);
677: VecScale(tao->stepdirection, -1.0);
678: TaoBNKBoundStep(tao, bnk->as_type, tao->stepdirection);
679: *stepType = BNK_SCALED_GRADIENT;
680: } else {
681: *stepType = BNK_BFGS;
682: }
683: break;
685: case BNK_SCALED_GRADIENT:
686: break;
688: default:
689: break;
690: }
692: return 0;
693: }
695: /*------------------------------------------------------------*/
697: /* Routine for performing a bound-projected More-Thuente line search.
699: Includes fallbacks to BFGS, scaled gradient, and unscaled gradient steps if the
700: Newton step does not produce a valid step length.
701: */
703: PetscErrorCode TaoBNKPerformLineSearch(Tao tao, PetscInt *stepType, PetscReal *steplen, TaoLineSearchConvergedReason *reason)
704: {
705: TAO_BNK *bnk = (TAO_BNK *)tao->data;
706: TaoLineSearchConvergedReason ls_reason;
707: PetscReal e_min, gdx;
708: PetscInt bfgsUpdates;
710: /* Perform the linesearch */
711: TaoLineSearchApply(tao->linesearch, tao->solution, &bnk->f, bnk->unprojected_gradient, tao->stepdirection, steplen, &ls_reason);
712: TaoAddLineSearchCounts(tao);
714: while (ls_reason != TAOLINESEARCH_SUCCESS && ls_reason != TAOLINESEARCH_SUCCESS_USER && *stepType != BNK_SCALED_GRADIENT && *stepType != BNK_GRADIENT) {
715: /* Linesearch failed, revert solution */
716: bnk->f = bnk->fold;
717: VecCopy(bnk->Xold, tao->solution);
718: VecCopy(bnk->unprojected_gradient_old, bnk->unprojected_gradient);
720: switch (*stepType) {
721: case BNK_NEWTON:
722: /* Failed to obtain acceptable iterate with Newton step
723: Update the perturbation for next time */
724: if (bnk->pert <= 0.0) {
725: PetscBool is_gltr;
727: /* Initialize the perturbation */
728: bnk->pert = PetscMin(bnk->imax, PetscMax(bnk->imin, bnk->imfac * bnk->gnorm));
729: PetscObjectTypeCompare((PetscObject)(tao->ksp), KSPGLTR, &is_gltr);
730: if (is_gltr) {
731: KSPGLTRGetMinEig(tao->ksp, &e_min);
732: bnk->pert = PetscMax(bnk->pert, -e_min);
733: }
734: } else {
735: /* Increase the perturbation */
736: bnk->pert = PetscMin(bnk->pmax, PetscMax(bnk->pgfac * bnk->pert, bnk->pmgfac * bnk->gnorm));
737: }
739: if (!bnk->M) {
740: /* We don't have the bfgs matrix around and being updated
741: Must use gradient direction in this case */
742: VecCopy(bnk->unprojected_gradient, tao->stepdirection);
743: *stepType = BNK_GRADIENT;
744: } else {
745: /* Attempt to use the BFGS direction */
746: MatSolve(bnk->M, bnk->unprojected_gradient, tao->stepdirection);
747: /* Check for success (descent direction)
748: NOTE: Negative gdx means not a descent direction because the step here is missing a negative sign. */
749: VecDot(tao->gradient, tao->stepdirection, &gdx);
750: if ((gdx <= 0.0) || PetscIsInfOrNanReal(gdx)) {
751: /* BFGS direction is not descent or direction produced not a number
752: We can assert bfgsUpdates > 1 in this case
753: Use steepest descent direction (scaled) */
754: MatLMVMReset(bnk->M, PETSC_FALSE);
755: MatLMVMUpdate(bnk->M, tao->solution, bnk->unprojected_gradient);
756: MatSolve(bnk->M, bnk->unprojected_gradient, tao->stepdirection);
758: bfgsUpdates = 1;
759: *stepType = BNK_SCALED_GRADIENT;
760: } else {
761: MatLMVMGetUpdateCount(bnk->M, &bfgsUpdates);
762: if (1 == bfgsUpdates) {
763: /* The first BFGS direction is always the scaled gradient */
764: *stepType = BNK_SCALED_GRADIENT;
765: } else {
766: *stepType = BNK_BFGS;
767: }
768: }
769: }
770: break;
772: case BNK_BFGS:
773: /* Can only enter if pc_type == BNK_PC_BFGS
774: Failed to obtain acceptable iterate with BFGS step
775: Attempt to use the scaled gradient direction */
776: MatLMVMReset(bnk->M, PETSC_FALSE);
777: MatLMVMUpdate(bnk->M, tao->solution, bnk->unprojected_gradient);
778: MatSolve(bnk->M, bnk->unprojected_gradient, tao->stepdirection);
780: bfgsUpdates = 1;
781: *stepType = BNK_SCALED_GRADIENT;
782: break;
783: }
784: /* Make sure the safeguarded fall-back step is zero for actively bounded variables */
785: VecScale(tao->stepdirection, -1.0);
786: TaoBNKBoundStep(tao, bnk->as_type, tao->stepdirection);
788: /* Perform one last line search with the fall-back step */
789: TaoLineSearchApply(tao->linesearch, tao->solution, &bnk->f, bnk->unprojected_gradient, tao->stepdirection, steplen, &ls_reason);
790: TaoAddLineSearchCounts(tao);
791: }
792: *reason = ls_reason;
793: return 0;
794: }
796: /*------------------------------------------------------------*/
798: /* Routine for updating the trust radius.
800: Function features three different update methods:
801: 1) Line-search step length based
802: 2) Predicted decrease on the CG quadratic model
803: 3) Interpolation
804: */
806: PetscErrorCode TaoBNKUpdateTrustRadius(Tao tao, PetscReal prered, PetscReal actred, PetscInt updateType, PetscInt stepType, PetscBool *accept)
807: {
808: TAO_BNK *bnk = (TAO_BNK *)tao->data;
810: PetscReal step, kappa;
811: PetscReal gdx, tau_1, tau_2, tau_min, tau_max;
813: /* Update trust region radius */
814: *accept = PETSC_FALSE;
815: switch (updateType) {
816: case BNK_UPDATE_STEP:
817: *accept = PETSC_TRUE; /* always accept here because line search succeeded */
818: if (stepType == BNK_NEWTON) {
819: TaoLineSearchGetStepLength(tao->linesearch, &step);
820: if (step < bnk->nu1) {
821: /* Very bad step taken; reduce radius */
822: tao->trust = bnk->omega1 * PetscMin(bnk->dnorm, tao->trust);
823: } else if (step < bnk->nu2) {
824: /* Reasonably bad step taken; reduce radius */
825: tao->trust = bnk->omega2 * PetscMin(bnk->dnorm, tao->trust);
826: } else if (step < bnk->nu3) {
827: /* Reasonable step was taken; leave radius alone */
828: if (bnk->omega3 < 1.0) {
829: tao->trust = bnk->omega3 * PetscMin(bnk->dnorm, tao->trust);
830: } else if (bnk->omega3 > 1.0) {
831: tao->trust = PetscMax(bnk->omega3 * bnk->dnorm, tao->trust);
832: }
833: } else if (step < bnk->nu4) {
834: /* Full step taken; increase the radius */
835: tao->trust = PetscMax(bnk->omega4 * bnk->dnorm, tao->trust);
836: } else {
837: /* More than full step taken; increase the radius */
838: tao->trust = PetscMax(bnk->omega5 * bnk->dnorm, tao->trust);
839: }
840: } else {
841: /* Newton step was not good; reduce the radius */
842: tao->trust = bnk->omega1 * PetscMin(bnk->dnorm, tao->trust);
843: }
844: break;
846: case BNK_UPDATE_REDUCTION:
847: if (stepType == BNK_NEWTON) {
848: if ((prered < 0.0) || PetscIsInfOrNanReal(prered)) {
849: /* The predicted reduction has the wrong sign. This cannot
850: happen in infinite precision arithmetic. Step should
851: be rejected! */
852: tao->trust = bnk->alpha1 * PetscMin(tao->trust, bnk->dnorm);
853: } else {
854: if (PetscIsInfOrNanReal(actred)) {
855: tao->trust = bnk->alpha1 * PetscMin(tao->trust, bnk->dnorm);
856: } else {
857: if ((PetscAbsScalar(actred) <= PetscMax(1.0, PetscAbsScalar(bnk->f)) * bnk->epsilon) && (PetscAbsScalar(prered) <= PetscMax(1.0, PetscAbsScalar(bnk->f)) * bnk->epsilon)) {
858: kappa = 1.0;
859: } else {
860: kappa = actred / prered;
861: }
862: /* Accept or reject the step and update radius */
863: if (kappa < bnk->eta1) {
864: /* Reject the step */
865: tao->trust = bnk->alpha1 * PetscMin(tao->trust, bnk->dnorm);
866: } else {
867: /* Accept the step */
868: *accept = PETSC_TRUE;
869: /* Update the trust region radius only if the computed step is at the trust radius boundary */
870: if (bnk->dnorm == tao->trust) {
871: if (kappa < bnk->eta2) {
872: /* Marginal bad step */
873: tao->trust = bnk->alpha2 * tao->trust;
874: } else if (kappa < bnk->eta3) {
875: /* Reasonable step */
876: tao->trust = bnk->alpha3 * tao->trust;
877: } else if (kappa < bnk->eta4) {
878: /* Good step */
879: tao->trust = bnk->alpha4 * tao->trust;
880: } else {
881: /* Very good step */
882: tao->trust = bnk->alpha5 * tao->trust;
883: }
884: }
885: }
886: }
887: }
888: } else {
889: /* Newton step was not good; reduce the radius */
890: tao->trust = bnk->alpha1 * PetscMin(bnk->dnorm, tao->trust);
891: }
892: break;
894: default:
895: if (stepType == BNK_NEWTON) {
896: if (prered < 0.0) {
897: /* The predicted reduction has the wrong sign. This cannot */
898: /* happen in infinite precision arithmetic. Step should */
899: /* be rejected! */
900: tao->trust = bnk->gamma1 * PetscMin(tao->trust, bnk->dnorm);
901: } else {
902: if (PetscIsInfOrNanReal(actred)) {
903: tao->trust = bnk->gamma1 * PetscMin(tao->trust, bnk->dnorm);
904: } else {
905: if ((PetscAbsScalar(actred) <= bnk->epsilon) && (PetscAbsScalar(prered) <= bnk->epsilon)) {
906: kappa = 1.0;
907: } else {
908: kappa = actred / prered;
909: }
911: VecDot(tao->gradient, tao->stepdirection, &gdx);
912: tau_1 = bnk->theta * gdx / (bnk->theta * gdx - (1.0 - bnk->theta) * prered + actred);
913: tau_2 = bnk->theta * gdx / (bnk->theta * gdx + (1.0 + bnk->theta) * prered - actred);
914: tau_min = PetscMin(tau_1, tau_2);
915: tau_max = PetscMax(tau_1, tau_2);
917: if (kappa >= 1.0 - bnk->mu1) {
918: /* Great agreement */
919: *accept = PETSC_TRUE;
920: if (tau_max < 1.0) {
921: tao->trust = PetscMax(tao->trust, bnk->gamma3 * bnk->dnorm);
922: } else if (tau_max > bnk->gamma4) {
923: tao->trust = PetscMax(tao->trust, bnk->gamma4 * bnk->dnorm);
924: } else {
925: tao->trust = PetscMax(tao->trust, tau_max * bnk->dnorm);
926: }
927: } else if (kappa >= 1.0 - bnk->mu2) {
928: /* Good agreement */
929: *accept = PETSC_TRUE;
930: if (tau_max < bnk->gamma2) {
931: tao->trust = bnk->gamma2 * PetscMin(tao->trust, bnk->dnorm);
932: } else if (tau_max > bnk->gamma3) {
933: tao->trust = PetscMax(tao->trust, bnk->gamma3 * bnk->dnorm);
934: } else if (tau_max < 1.0) {
935: tao->trust = tau_max * PetscMin(tao->trust, bnk->dnorm);
936: } else {
937: tao->trust = PetscMax(tao->trust, tau_max * bnk->dnorm);
938: }
939: } else {
940: /* Not good agreement */
941: if (tau_min > 1.0) {
942: tao->trust = bnk->gamma2 * PetscMin(tao->trust, bnk->dnorm);
943: } else if (tau_max < bnk->gamma1) {
944: tao->trust = bnk->gamma1 * PetscMin(tao->trust, bnk->dnorm);
945: } else if ((tau_min < bnk->gamma1) && (tau_max >= 1.0)) {
946: tao->trust = bnk->gamma1 * PetscMin(tao->trust, bnk->dnorm);
947: } else if ((tau_1 >= bnk->gamma1) && (tau_1 < 1.0) && ((tau_2 < bnk->gamma1) || (tau_2 >= 1.0))) {
948: tao->trust = tau_1 * PetscMin(tao->trust, bnk->dnorm);
949: } else if ((tau_2 >= bnk->gamma1) && (tau_2 < 1.0) && ((tau_1 < bnk->gamma1) || (tau_2 >= 1.0))) {
950: tao->trust = tau_2 * PetscMin(tao->trust, bnk->dnorm);
951: } else {
952: tao->trust = tau_max * PetscMin(tao->trust, bnk->dnorm);
953: }
954: }
955: }
956: }
957: } else {
958: /* Newton step was not good; reduce the radius */
959: tao->trust = bnk->gamma1 * PetscMin(bnk->dnorm, tao->trust);
960: }
961: break;
962: }
963: /* Make sure the radius does not violate min and max settings */
964: tao->trust = PetscMin(tao->trust, bnk->max_radius);
965: tao->trust = PetscMax(tao->trust, bnk->min_radius);
966: return 0;
967: }
969: /* ---------------------------------------------------------- */
971: PetscErrorCode TaoBNKAddStepCounts(Tao tao, PetscInt stepType)
972: {
973: TAO_BNK *bnk = (TAO_BNK *)tao->data;
975: switch (stepType) {
976: case BNK_NEWTON:
977: ++bnk->newt;
978: break;
979: case BNK_BFGS:
980: ++bnk->bfgs;
981: break;
982: case BNK_SCALED_GRADIENT:
983: ++bnk->sgrad;
984: break;
985: case BNK_GRADIENT:
986: ++bnk->grad;
987: break;
988: default:
989: break;
990: }
991: return 0;
992: }
994: /* ---------------------------------------------------------- */
996: PetscErrorCode TaoSetUp_BNK(Tao tao)
997: {
998: TAO_BNK *bnk = (TAO_BNK *)tao->data;
999: PetscInt i;
1001: if (!tao->gradient) VecDuplicate(tao->solution, &tao->gradient);
1002: if (!tao->stepdirection) VecDuplicate(tao->solution, &tao->stepdirection);
1003: if (!bnk->W) VecDuplicate(tao->solution, &bnk->W);
1004: if (!bnk->Xold) VecDuplicate(tao->solution, &bnk->Xold);
1005: if (!bnk->Gold) VecDuplicate(tao->solution, &bnk->Gold);
1006: if (!bnk->Xwork) VecDuplicate(tao->solution, &bnk->Xwork);
1007: if (!bnk->Gwork) VecDuplicate(tao->solution, &bnk->Gwork);
1008: if (!bnk->unprojected_gradient) VecDuplicate(tao->solution, &bnk->unprojected_gradient);
1009: if (!bnk->unprojected_gradient_old) VecDuplicate(tao->solution, &bnk->unprojected_gradient_old);
1010: if (!bnk->Diag_min) VecDuplicate(tao->solution, &bnk->Diag_min);
1011: if (!bnk->Diag_max) VecDuplicate(tao->solution, &bnk->Diag_max);
1012: if (bnk->max_cg_its > 0) {
1013: /* Ensure that the important common vectors are shared between BNK and embedded BNCG */
1014: bnk->bncg_ctx = (TAO_BNCG *)bnk->bncg->data;
1015: PetscObjectReference((PetscObject)(bnk->unprojected_gradient_old));
1016: VecDestroy(&bnk->bncg_ctx->unprojected_gradient_old);
1017: bnk->bncg_ctx->unprojected_gradient_old = bnk->unprojected_gradient_old;
1018: PetscObjectReference((PetscObject)(bnk->unprojected_gradient));
1019: VecDestroy(&bnk->bncg_ctx->unprojected_gradient);
1020: bnk->bncg_ctx->unprojected_gradient = bnk->unprojected_gradient;
1021: PetscObjectReference((PetscObject)(bnk->Gold));
1022: VecDestroy(&bnk->bncg_ctx->G_old);
1023: bnk->bncg_ctx->G_old = bnk->Gold;
1024: PetscObjectReference((PetscObject)(tao->gradient));
1025: VecDestroy(&bnk->bncg->gradient);
1026: bnk->bncg->gradient = tao->gradient;
1027: PetscObjectReference((PetscObject)(tao->stepdirection));
1028: VecDestroy(&bnk->bncg->stepdirection);
1029: bnk->bncg->stepdirection = tao->stepdirection;
1030: TaoSetSolution(bnk->bncg, tao->solution);
1031: /* Copy over some settings from BNK into BNCG */
1032: TaoSetMaximumIterations(bnk->bncg, bnk->max_cg_its);
1033: TaoSetTolerances(bnk->bncg, tao->gatol, tao->grtol, tao->gttol);
1034: TaoSetFunctionLowerBound(bnk->bncg, tao->fmin);
1035: TaoSetConvergenceTest(bnk->bncg, tao->ops->convergencetest, tao->cnvP);
1036: TaoSetObjective(bnk->bncg, tao->ops->computeobjective, tao->user_objP);
1037: TaoSetGradient(bnk->bncg, NULL, tao->ops->computegradient, tao->user_gradP);
1038: TaoSetObjectiveAndGradient(bnk->bncg, NULL, tao->ops->computeobjectiveandgradient, tao->user_objgradP);
1039: PetscObjectCopyFortranFunctionPointers((PetscObject)tao, (PetscObject)(bnk->bncg));
1040: for (i = 0; i < tao->numbermonitors; ++i) {
1041: TaoSetMonitor(bnk->bncg, tao->monitor[i], tao->monitorcontext[i], tao->monitordestroy[i]);
1042: PetscObjectReference((PetscObject)(tao->monitorcontext[i]));
1043: }
1044: }
1045: bnk->X_inactive = NULL;
1046: bnk->G_inactive = NULL;
1047: bnk->inactive_work = NULL;
1048: bnk->active_work = NULL;
1049: bnk->inactive_idx = NULL;
1050: bnk->active_idx = NULL;
1051: bnk->active_lower = NULL;
1052: bnk->active_upper = NULL;
1053: bnk->active_fixed = NULL;
1054: bnk->M = NULL;
1055: bnk->H_inactive = NULL;
1056: bnk->Hpre_inactive = NULL;
1057: return 0;
1058: }
1060: /*------------------------------------------------------------*/
1062: PetscErrorCode TaoDestroy_BNK(Tao tao)
1063: {
1064: TAO_BNK *bnk = (TAO_BNK *)tao->data;
1066: VecDestroy(&bnk->W);
1067: VecDestroy(&bnk->Xold);
1068: VecDestroy(&bnk->Gold);
1069: VecDestroy(&bnk->Xwork);
1070: VecDestroy(&bnk->Gwork);
1071: VecDestroy(&bnk->unprojected_gradient);
1072: VecDestroy(&bnk->unprojected_gradient_old);
1073: VecDestroy(&bnk->Diag_min);
1074: VecDestroy(&bnk->Diag_max);
1075: ISDestroy(&bnk->active_lower);
1076: ISDestroy(&bnk->active_upper);
1077: ISDestroy(&bnk->active_fixed);
1078: ISDestroy(&bnk->active_idx);
1079: ISDestroy(&bnk->inactive_idx);
1080: MatDestroy(&bnk->Hpre_inactive);
1081: MatDestroy(&bnk->H_inactive);
1082: TaoDestroy(&bnk->bncg);
1083: KSPDestroy(&tao->ksp);
1084: PetscFree(tao->data);
1085: return 0;
1086: }
1088: /*------------------------------------------------------------*/
1090: PetscErrorCode TaoSetFromOptions_BNK(Tao tao, PetscOptionItems *PetscOptionsObject)
1091: {
1092: TAO_BNK *bnk = (TAO_BNK *)tao->data;
1094: PetscOptionsHeadBegin(PetscOptionsObject, "Newton-Krylov method for bound constrained optimization");
1095: PetscOptionsEList("-tao_bnk_init_type", "radius initialization type", "", BNK_INIT, BNK_INIT_TYPES, BNK_INIT[bnk->init_type], &bnk->init_type, NULL);
1096: PetscOptionsEList("-tao_bnk_update_type", "radius update type", "", BNK_UPDATE, BNK_UPDATE_TYPES, BNK_UPDATE[bnk->update_type], &bnk->update_type, NULL);
1097: PetscOptionsEList("-tao_bnk_as_type", "active set estimation method", "", BNK_AS, BNK_AS_TYPES, BNK_AS[bnk->as_type], &bnk->as_type, NULL);
1098: PetscOptionsReal("-tao_bnk_sval", "(developer) Hessian perturbation starting value", "", bnk->sval, &bnk->sval, NULL);
1099: PetscOptionsReal("-tao_bnk_imin", "(developer) minimum initial Hessian perturbation", "", bnk->imin, &bnk->imin, NULL);
1100: PetscOptionsReal("-tao_bnk_imax", "(developer) maximum initial Hessian perturbation", "", bnk->imax, &bnk->imax, NULL);
1101: PetscOptionsReal("-tao_bnk_imfac", "(developer) initial merit factor for Hessian perturbation", "", bnk->imfac, &bnk->imfac, NULL);
1102: PetscOptionsReal("-tao_bnk_pmin", "(developer) minimum Hessian perturbation", "", bnk->pmin, &bnk->pmin, NULL);
1103: PetscOptionsReal("-tao_bnk_pmax", "(developer) maximum Hessian perturbation", "", bnk->pmax, &bnk->pmax, NULL);
1104: PetscOptionsReal("-tao_bnk_pgfac", "(developer) Hessian perturbation growth factor", "", bnk->pgfac, &bnk->pgfac, NULL);
1105: PetscOptionsReal("-tao_bnk_psfac", "(developer) Hessian perturbation shrink factor", "", bnk->psfac, &bnk->psfac, NULL);
1106: PetscOptionsReal("-tao_bnk_pmgfac", "(developer) merit growth factor for Hessian perturbation", "", bnk->pmgfac, &bnk->pmgfac, NULL);
1107: PetscOptionsReal("-tao_bnk_pmsfac", "(developer) merit shrink factor for Hessian perturbation", "", bnk->pmsfac, &bnk->pmsfac, NULL);
1108: PetscOptionsReal("-tao_bnk_eta1", "(developer) threshold for rejecting step (-tao_bnk_update_type reduction)", "", bnk->eta1, &bnk->eta1, NULL);
1109: PetscOptionsReal("-tao_bnk_eta2", "(developer) threshold for accepting marginal step (-tao_bnk_update_type reduction)", "", bnk->eta2, &bnk->eta2, NULL);
1110: PetscOptionsReal("-tao_bnk_eta3", "(developer) threshold for accepting reasonable step (-tao_bnk_update_type reduction)", "", bnk->eta3, &bnk->eta3, NULL);
1111: PetscOptionsReal("-tao_bnk_eta4", "(developer) threshold for accepting good step (-tao_bnk_update_type reduction)", "", bnk->eta4, &bnk->eta4, NULL);
1112: PetscOptionsReal("-tao_bnk_alpha1", "(developer) radius reduction factor for rejected step (-tao_bnk_update_type reduction)", "", bnk->alpha1, &bnk->alpha1, NULL);
1113: PetscOptionsReal("-tao_bnk_alpha2", "(developer) radius reduction factor for marginally accepted bad step (-tao_bnk_update_type reduction)", "", bnk->alpha2, &bnk->alpha2, NULL);
1114: PetscOptionsReal("-tao_bnk_alpha3", "(developer) radius increase factor for reasonable accepted step (-tao_bnk_update_type reduction)", "", bnk->alpha3, &bnk->alpha3, NULL);
1115: PetscOptionsReal("-tao_bnk_alpha4", "(developer) radius increase factor for good accepted step (-tao_bnk_update_type reduction)", "", bnk->alpha4, &bnk->alpha4, NULL);
1116: PetscOptionsReal("-tao_bnk_alpha5", "(developer) radius increase factor for very good accepted step (-tao_bnk_update_type reduction)", "", bnk->alpha5, &bnk->alpha5, NULL);
1117: PetscOptionsReal("-tao_bnk_nu1", "(developer) threshold for small line-search step length (-tao_bnk_update_type step)", "", bnk->nu1, &bnk->nu1, NULL);
1118: PetscOptionsReal("-tao_bnk_nu2", "(developer) threshold for reasonable line-search step length (-tao_bnk_update_type step)", "", bnk->nu2, &bnk->nu2, NULL);
1119: PetscOptionsReal("-tao_bnk_nu3", "(developer) threshold for large line-search step length (-tao_bnk_update_type step)", "", bnk->nu3, &bnk->nu3, NULL);
1120: PetscOptionsReal("-tao_bnk_nu4", "(developer) threshold for very large line-search step length (-tao_bnk_update_type step)", "", bnk->nu4, &bnk->nu4, NULL);
1121: PetscOptionsReal("-tao_bnk_omega1", "(developer) radius reduction factor for very small line-search step length (-tao_bnk_update_type step)", "", bnk->omega1, &bnk->omega1, NULL);
1122: PetscOptionsReal("-tao_bnk_omega2", "(developer) radius reduction factor for small line-search step length (-tao_bnk_update_type step)", "", bnk->omega2, &bnk->omega2, NULL);
1123: PetscOptionsReal("-tao_bnk_omega3", "(developer) radius factor for decent line-search step length (-tao_bnk_update_type step)", "", bnk->omega3, &bnk->omega3, NULL);
1124: PetscOptionsReal("-tao_bnk_omega4", "(developer) radius increase factor for large line-search step length (-tao_bnk_update_type step)", "", bnk->omega4, &bnk->omega4, NULL);
1125: PetscOptionsReal("-tao_bnk_omega5", "(developer) radius increase factor for very large line-search step length (-tao_bnk_update_type step)", "", bnk->omega5, &bnk->omega5, NULL);
1126: PetscOptionsReal("-tao_bnk_mu1_i", "(developer) threshold for accepting very good step (-tao_bnk_init_type interpolation)", "", bnk->mu1_i, &bnk->mu1_i, NULL);
1127: PetscOptionsReal("-tao_bnk_mu2_i", "(developer) threshold for accepting good step (-tao_bnk_init_type interpolation)", "", bnk->mu2_i, &bnk->mu2_i, NULL);
1128: PetscOptionsReal("-tao_bnk_gamma1_i", "(developer) radius reduction factor for rejected very bad step (-tao_bnk_init_type interpolation)", "", bnk->gamma1_i, &bnk->gamma1_i, NULL);
1129: PetscOptionsReal("-tao_bnk_gamma2_i", "(developer) radius reduction factor for rejected bad step (-tao_bnk_init_type interpolation)", "", bnk->gamma2_i, &bnk->gamma2_i, NULL);
1130: PetscOptionsReal("-tao_bnk_gamma3_i", "(developer) radius increase factor for accepted good step (-tao_bnk_init_type interpolation)", "", bnk->gamma3_i, &bnk->gamma3_i, NULL);
1131: PetscOptionsReal("-tao_bnk_gamma4_i", "(developer) radius increase factor for accepted very good step (-tao_bnk_init_type interpolation)", "", bnk->gamma4_i, &bnk->gamma4_i, NULL);
1132: PetscOptionsReal("-tao_bnk_theta_i", "(developer) trust region interpolation factor (-tao_bnk_init_type interpolation)", "", bnk->theta_i, &bnk->theta_i, NULL);
1133: PetscOptionsReal("-tao_bnk_mu1", "(developer) threshold for accepting very good step (-tao_bnk_update_type interpolation)", "", bnk->mu1, &bnk->mu1, NULL);
1134: PetscOptionsReal("-tao_bnk_mu2", "(developer) threshold for accepting good step (-tao_bnk_update_type interpolation)", "", bnk->mu2, &bnk->mu2, NULL);
1135: PetscOptionsReal("-tao_bnk_gamma1", "(developer) radius reduction factor for rejected very bad step (-tao_bnk_update_type interpolation)", "", bnk->gamma1, &bnk->gamma1, NULL);
1136: PetscOptionsReal("-tao_bnk_gamma2", "(developer) radius reduction factor for rejected bad step (-tao_bnk_update_type interpolation)", "", bnk->gamma2, &bnk->gamma2, NULL);
1137: PetscOptionsReal("-tao_bnk_gamma3", "(developer) radius increase factor for accepted good step (-tao_bnk_update_type interpolation)", "", bnk->gamma3, &bnk->gamma3, NULL);
1138: PetscOptionsReal("-tao_bnk_gamma4", "(developer) radius increase factor for accepted very good step (-tao_bnk_update_type interpolation)", "", bnk->gamma4, &bnk->gamma4, NULL);
1139: PetscOptionsReal("-tao_bnk_theta", "(developer) trust region interpolation factor (-tao_bnk_update_type interpolation)", "", bnk->theta, &bnk->theta, NULL);
1140: PetscOptionsReal("-tao_bnk_min_radius", "(developer) lower bound on initial radius", "", bnk->min_radius, &bnk->min_radius, NULL);
1141: PetscOptionsReal("-tao_bnk_max_radius", "(developer) upper bound on radius", "", bnk->max_radius, &bnk->max_radius, NULL);
1142: PetscOptionsReal("-tao_bnk_epsilon", "(developer) tolerance used when computing actual and predicted reduction", "", bnk->epsilon, &bnk->epsilon, NULL);
1143: PetscOptionsReal("-tao_bnk_as_tol", "(developer) initial tolerance used when estimating actively bounded variables", "", bnk->as_tol, &bnk->as_tol, NULL);
1144: PetscOptionsReal("-tao_bnk_as_step", "(developer) step length used when estimating actively bounded variables", "", bnk->as_step, &bnk->as_step, NULL);
1145: PetscOptionsInt("-tao_bnk_max_cg_its", "number of BNCG iterations to take for each Newton step", "", bnk->max_cg_its, &bnk->max_cg_its, NULL);
1146: PetscOptionsHeadEnd();
1148: TaoSetOptionsPrefix(bnk->bncg, ((PetscObject)(tao))->prefix);
1149: TaoAppendOptionsPrefix(bnk->bncg, "tao_bnk_cg_");
1150: TaoSetFromOptions(bnk->bncg);
1152: KSPSetOptionsPrefix(tao->ksp, ((PetscObject)(tao))->prefix);
1153: KSPAppendOptionsPrefix(tao->ksp, "tao_bnk_");
1154: KSPSetFromOptions(tao->ksp);
1155: return 0;
1156: }
1158: /*------------------------------------------------------------*/
1160: PetscErrorCode TaoView_BNK(Tao tao, PetscViewer viewer)
1161: {
1162: TAO_BNK *bnk = (TAO_BNK *)tao->data;
1163: PetscInt nrejects;
1164: PetscBool isascii;
1166: PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii);
1167: if (isascii) {
1168: PetscViewerASCIIPushTab(viewer);
1169: if (bnk->M) {
1170: MatLMVMGetRejectCount(bnk->M, &nrejects);
1171: PetscViewerASCIIPrintf(viewer, "Rejected BFGS updates: %" PetscInt_FMT "\n", nrejects);
1172: }
1173: PetscViewerASCIIPrintf(viewer, "CG steps: %" PetscInt_FMT "\n", bnk->tot_cg_its);
1174: PetscViewerASCIIPrintf(viewer, "Newton steps: %" PetscInt_FMT "\n", bnk->newt);
1175: if (bnk->M) PetscViewerASCIIPrintf(viewer, "BFGS steps: %" PetscInt_FMT "\n", bnk->bfgs);
1176: PetscViewerASCIIPrintf(viewer, "Scaled gradient steps: %" PetscInt_FMT "\n", bnk->sgrad);
1177: PetscViewerASCIIPrintf(viewer, "Gradient steps: %" PetscInt_FMT "\n", bnk->grad);
1178: PetscViewerASCIIPrintf(viewer, "KSP termination reasons:\n");
1179: PetscViewerASCIIPrintf(viewer, " atol: %" PetscInt_FMT "\n", bnk->ksp_atol);
1180: PetscViewerASCIIPrintf(viewer, " rtol: %" PetscInt_FMT "\n", bnk->ksp_rtol);
1181: PetscViewerASCIIPrintf(viewer, " ctol: %" PetscInt_FMT "\n", bnk->ksp_ctol);
1182: PetscViewerASCIIPrintf(viewer, " negc: %" PetscInt_FMT "\n", bnk->ksp_negc);
1183: PetscViewerASCIIPrintf(viewer, " dtol: %" PetscInt_FMT "\n", bnk->ksp_dtol);
1184: PetscViewerASCIIPrintf(viewer, " iter: %" PetscInt_FMT "\n", bnk->ksp_iter);
1185: PetscViewerASCIIPrintf(viewer, " othr: %" PetscInt_FMT "\n", bnk->ksp_othr);
1186: PetscViewerASCIIPopTab(viewer);
1187: }
1188: return 0;
1189: }
1191: /* ---------------------------------------------------------- */
1193: /*MC
1194: TAOBNK - Shared base-type for Bounded Newton-Krylov type algorithms.
1195: At each iteration, the BNK methods solve the symmetric
1196: system of equations to obtain the step diretion dk:
1197: Hk dk = -gk
1198: for free variables only. The step can be globalized either through
1199: trust-region methods, or a line search, or a heuristic mixture of both.
1201: Options Database Keys:
1202: + -tao_bnk_max_cg_its - maximum number of bounded conjugate-gradient iterations taken in each Newton loop
1203: . -tao_bnk_init_type - trust radius initialization method ("constant", "direction", "interpolation")
1204: . -tao_bnk_update_type - trust radius update method ("step", "direction", "interpolation")
1205: . -tao_bnk_as_type - active-set estimation method ("none", "bertsekas")
1206: . -tao_bnk_as_tol - (developer) initial tolerance used in estimating bounded active variables (-as_type bertsekas)
1207: . -tao_bnk_as_step - (developer) trial step length used in estimating bounded active variables (-as_type bertsekas)
1208: . -tao_bnk_sval - (developer) Hessian perturbation starting value
1209: . -tao_bnk_imin - (developer) minimum initial Hessian perturbation
1210: . -tao_bnk_imax - (developer) maximum initial Hessian perturbation
1211: . -tao_bnk_pmin - (developer) minimum Hessian perturbation
1212: . -tao_bnk_pmax - (developer) aximum Hessian perturbation
1213: . -tao_bnk_pgfac - (developer) Hessian perturbation growth factor
1214: . -tao_bnk_psfac - (developer) Hessian perturbation shrink factor
1215: . -tao_bnk_imfac - (developer) initial merit factor for Hessian perturbation
1216: . -tao_bnk_pmgfac - (developer) merit growth factor for Hessian perturbation
1217: . -tao_bnk_pmsfac - (developer) merit shrink factor for Hessian perturbation
1218: . -tao_bnk_eta1 - (developer) threshold for rejecting step (-update_type reduction)
1219: . -tao_bnk_eta2 - (developer) threshold for accepting marginal step (-update_type reduction)
1220: . -tao_bnk_eta3 - (developer) threshold for accepting reasonable step (-update_type reduction)
1221: . -tao_bnk_eta4 - (developer) threshold for accepting good step (-update_type reduction)
1222: . -tao_bnk_alpha1 - (developer) radius reduction factor for rejected step (-update_type reduction)
1223: . -tao_bnk_alpha2 - (developer) radius reduction factor for marginally accepted bad step (-update_type reduction)
1224: . -tao_bnk_alpha3 - (developer) radius increase factor for reasonable accepted step (-update_type reduction)
1225: . -tao_bnk_alpha4 - (developer) radius increase factor for good accepted step (-update_type reduction)
1226: . -tao_bnk_alpha5 - (developer) radius increase factor for very good accepted step (-update_type reduction)
1227: . -tao_bnk_epsilon - (developer) tolerance for small pred/actual ratios that trigger automatic step acceptance (-update_type reduction)
1228: . -tao_bnk_mu1 - (developer) threshold for accepting very good step (-update_type interpolation)
1229: . -tao_bnk_mu2 - (developer) threshold for accepting good step (-update_type interpolation)
1230: . -tao_bnk_gamma1 - (developer) radius reduction factor for rejected very bad step (-update_type interpolation)
1231: . -tao_bnk_gamma2 - (developer) radius reduction factor for rejected bad step (-update_type interpolation)
1232: . -tao_bnk_gamma3 - (developer) radius increase factor for accepted good step (-update_type interpolation)
1233: . -tao_bnk_gamma4 - (developer) radius increase factor for accepted very good step (-update_type interpolation)
1234: . -tao_bnk_theta - (developer) trust region interpolation factor (-update_type interpolation)
1235: . -tao_bnk_nu1 - (developer) threshold for small line-search step length (-update_type step)
1236: . -tao_bnk_nu2 - (developer) threshold for reasonable line-search step length (-update_type step)
1237: . -tao_bnk_nu3 - (developer) threshold for large line-search step length (-update_type step)
1238: . -tao_bnk_nu4 - (developer) threshold for very large line-search step length (-update_type step)
1239: . -tao_bnk_omega1 - (developer) radius reduction factor for very small line-search step length (-update_type step)
1240: . -tao_bnk_omega2 - (developer) radius reduction factor for small line-search step length (-update_type step)
1241: . -tao_bnk_omega3 - (developer) radius factor for decent line-search step length (-update_type step)
1242: . -tao_bnk_omega4 - (developer) radius increase factor for large line-search step length (-update_type step)
1243: . -tao_bnk_omega5 - (developer) radius increase factor for very large line-search step length (-update_type step)
1244: . -tao_bnk_mu1_i - (developer) threshold for accepting very good step (-init_type interpolation)
1245: . -tao_bnk_mu2_i - (developer) threshold for accepting good step (-init_type interpolation)
1246: . -tao_bnk_gamma1_i - (developer) radius reduction factor for rejected very bad step (-init_type interpolation)
1247: . -tao_bnk_gamma2_i - (developer) radius reduction factor for rejected bad step (-init_type interpolation)
1248: . -tao_bnk_gamma3_i - (developer) radius increase factor for accepted good step (-init_type interpolation)
1249: . -tao_bnk_gamma4_i - (developer) radius increase factor for accepted very good step (-init_type interpolation)
1250: - -tao_bnk_theta_i - (developer) trust region interpolation factor (-init_type interpolation)
1252: Level: beginner
1253: M*/
1255: PetscErrorCode TaoCreate_BNK(Tao tao)
1256: {
1257: TAO_BNK *bnk;
1258: PC pc;
1260: PetscNew(&bnk);
1262: tao->ops->setup = TaoSetUp_BNK;
1263: tao->ops->view = TaoView_BNK;
1264: tao->ops->setfromoptions = TaoSetFromOptions_BNK;
1265: tao->ops->destroy = TaoDestroy_BNK;
1267: /* Override default settings (unless already changed) */
1268: if (!tao->max_it_changed) tao->max_it = 50;
1269: if (!tao->trust0_changed) tao->trust0 = 100.0;
1271: tao->data = (void *)bnk;
1273: /* Hessian shifting parameters */
1274: bnk->computehessian = TaoBNKComputeHessian;
1275: bnk->computestep = TaoBNKComputeStep;
1277: bnk->sval = 0.0;
1278: bnk->imin = 1.0e-4;
1279: bnk->imax = 1.0e+2;
1280: bnk->imfac = 1.0e-1;
1282: bnk->pmin = 1.0e-12;
1283: bnk->pmax = 1.0e+2;
1284: bnk->pgfac = 1.0e+1;
1285: bnk->psfac = 4.0e-1;
1286: bnk->pmgfac = 1.0e-1;
1287: bnk->pmsfac = 1.0e-1;
1289: /* Default values for trust-region radius update based on steplength */
1290: bnk->nu1 = 0.25;
1291: bnk->nu2 = 0.50;
1292: bnk->nu3 = 1.00;
1293: bnk->nu4 = 1.25;
1295: bnk->omega1 = 0.25;
1296: bnk->omega2 = 0.50;
1297: bnk->omega3 = 1.00;
1298: bnk->omega4 = 2.00;
1299: bnk->omega5 = 4.00;
1301: /* Default values for trust-region radius update based on reduction */
1302: bnk->eta1 = 1.0e-4;
1303: bnk->eta2 = 0.25;
1304: bnk->eta3 = 0.50;
1305: bnk->eta4 = 0.90;
1307: bnk->alpha1 = 0.25;
1308: bnk->alpha2 = 0.50;
1309: bnk->alpha3 = 1.00;
1310: bnk->alpha4 = 2.00;
1311: bnk->alpha5 = 4.00;
1313: /* Default values for trust-region radius update based on interpolation */
1314: bnk->mu1 = 0.10;
1315: bnk->mu2 = 0.50;
1317: bnk->gamma1 = 0.25;
1318: bnk->gamma2 = 0.50;
1319: bnk->gamma3 = 2.00;
1320: bnk->gamma4 = 4.00;
1322: bnk->theta = 0.05;
1324: /* Default values for trust region initialization based on interpolation */
1325: bnk->mu1_i = 0.35;
1326: bnk->mu2_i = 0.50;
1328: bnk->gamma1_i = 0.0625;
1329: bnk->gamma2_i = 0.5;
1330: bnk->gamma3_i = 2.0;
1331: bnk->gamma4_i = 5.0;
1333: bnk->theta_i = 0.25;
1335: /* Remaining parameters */
1336: bnk->max_cg_its = 0;
1337: bnk->min_radius = 1.0e-10;
1338: bnk->max_radius = 1.0e10;
1339: bnk->epsilon = PetscPowReal(PETSC_MACHINE_EPSILON, 2.0 / 3.0);
1340: bnk->as_tol = 1.0e-3;
1341: bnk->as_step = 1.0e-3;
1342: bnk->dmin = 1.0e-6;
1343: bnk->dmax = 1.0e6;
1345: bnk->M = NULL;
1346: bnk->bfgs_pre = NULL;
1347: bnk->init_type = BNK_INIT_INTERPOLATION;
1348: bnk->update_type = BNK_UPDATE_REDUCTION;
1349: bnk->as_type = BNK_AS_BERTSEKAS;
1351: /* Create the embedded BNCG solver */
1352: TaoCreate(PetscObjectComm((PetscObject)tao), &bnk->bncg);
1353: PetscObjectIncrementTabLevel((PetscObject)bnk->bncg, (PetscObject)tao, 1);
1354: TaoSetType(bnk->bncg, TAOBNCG);
1356: /* Create the line search */
1357: TaoLineSearchCreate(((PetscObject)tao)->comm, &tao->linesearch);
1358: PetscObjectIncrementTabLevel((PetscObject)tao->linesearch, (PetscObject)tao, 1);
1359: TaoLineSearchSetType(tao->linesearch, TAOLINESEARCHMT);
1360: TaoLineSearchUseTaoRoutines(tao->linesearch, tao);
1362: /* Set linear solver to default for symmetric matrices */
1363: KSPCreate(((PetscObject)tao)->comm, &tao->ksp);
1364: PetscObjectIncrementTabLevel((PetscObject)tao->ksp, (PetscObject)tao, 1);
1365: KSPSetType(tao->ksp, KSPSTCG);
1366: KSPGetPC(tao->ksp, &pc);
1367: PCSetType(pc, PCLMVM);
1368: return 0;
1369: }