91 std::ostream &outStream) {
93 if (proj_ == nullPtr) {
94 proj_ = makePtr<PolyhedralProjection<Real>>(makePtrFromRef(bnd));
100 list.sublist(
"General").sublist(
"Krylov").set(
"Absolute Tolerance", atolKrylov_);
101 list.sublist(
"General").sublist(
"Krylov").set(
"Relative Tolerance", rtolKrylov_);
102 list.sublist(
"General").sublist(
"Krylov").set(
"Iteration Limit", maxitKrylov_);
103 krylovName_ =
"GMRES";
104 krylov_ = makePtr<GMRES<Real>>(list);
108 krylov_ = makePtr<ConjugateResiduals<Real>>(atolKrylov_,rtolKrylov_,maxitKrylov_);
115 Real ftol = std::sqrt(ROL_EPSILON<Real>());
116 proj_->project(x,outStream);
117 state_->iterateVec->set(x);
119 state_->value = obj.
value(x,ftol); state_->nfval++;
120 obj.
gradient(*state_->gradientVec,x,ftol); state_->ngrad++;
121 state_->stepVec->set(x);
122 state_->stepVec->axpy(-one,state_->gradientVec->dual());
123 proj_->project(*state_->stepVec,outStream);
124 state_->stepVec->axpy(-one,x);
125 state_->gnorm = state_->stepVec->norm();
126 state_->snorm = ROL_INF<Real>();
134 std::ostream &outStream ) {
135 const Real
zero(0), one(1);
137 initialize(x,g,obj,bnd,outStream);
139 Ptr<Vector<Real>> lambda = x.
clone(), gtmp = g.
clone();
140 Ptr<Vector<Real>> pwa = x.
clone(), dwa = g.
clone();
141 Ptr<Vector<Real>> mu, b, r;
143 mu = proj_->getMultiplier()->clone(); mu->zero();
144 b = proj_->getResidual()->clone();
145 r = proj_->getResidual()->clone();
147 lambda->set(state_->gradientVec->dual());
149 Real xnorm(0), snorm(0), rnorm(0), tol(std::sqrt(ROL_EPSILON<Real>()));
151 Ptr<LinearOperator<Real>> hessian, precond;
154 if (verbosity_ > 0) writeOutput(outStream,
true);
156 while (status_->check(*state_)) {
158 state_->stepVec->zero();
161 proj_->getLinearConstraint()->value(*r,x,tol);
163 for ( iter_ = 0; iter_ < maxit_; iter_++ ) {
168 xlam->axpy(scale_,*lambda);
175 xtmp->axpy(-one,*state_->iterateVec);
180 xtmp->axpy(-one,*state_->iterateVec);
186 if ( useSecantHessVec_ && secant_ != nullPtr ) {
187 secant_->applyB(*gtmp,*As);
190 obj.
hessVec(*gtmp,*As,*state_->iterateVec,itol_);
193 proj_->getLinearConstraint()->applyJacobian(*b,*As,*state_->iterateVec,tol);
197 gtmp->plus(*state_->gradientVec);
204 rnorm = std::sqrt(gtmp->dot(*gtmp)+b->dot(*b));
207 rnorm = gtmp->norm();
209 if ( rnorm >
zero ) {
212 hessian = makePtr<HessianPDAS_Poly>(makePtrFromRef(obj),makePtrFromRef(bnd),
213 proj_->getLinearConstraint(),state_->iterateVec,xlam,neps_,secant_,
214 useSecantHessVec_,pwa,dwa);
215 precond = makePtr<PrecondPDAS_Poly>(makePtrFromRef(obj),makePtrFromRef(bnd),
216 state_->iterateVec,xlam,neps_,secant_,useSecantPrecond_,dwa);
219 krylov_->run(sol,*hessian,rhs,*precond,iterKrylov_,flagKrylov_);
223 hessian = makePtr<HessianPDAS>(makePtrFromRef(obj),makePtrFromRef(bnd),
224 state_->iterateVec,xlam,neps_,secant_,useSecantHessVec_,pwa);
225 precond = makePtr<PrecondPDAS>(makePtrFromRef(obj),makePtrFromRef(bnd),
226 state_->iterateVec,xlam,neps_,secant_,useSecantPrecond_,dwa);
227 krylov_->run(*state_->stepVec,*hessian,*gtmp,*precond,iterKrylov_,flagKrylov_);
229 totalKrylov_ += iterKrylov_;
232 state_->stepVec->plus(*As);
236 x.
set(*state_->iterateVec);
237 x.
plus(*state_->stepVec);
238 snorm = state_->stepVec->norm();
240 if ( useSecantHessVec_ && secant_ != nullPtr ) {
241 secant_->applyB(*gtmp,*state_->stepVec);
244 obj.
hessVec(*gtmp,*state_->stepVec,*state_->iterateVec,itol_);
247 proj_->getLinearConstraint()->applyAdjointJacobian(*dwa,*mu,*state_->iterateVec,tol);
250 gtmp->plus(*state_->gradientVec);
252 lambda->set(gtmp->dual());
261 if ( xtmp->norm() < gtol_*state_->gnorm ) {
265 if ( snorm < stol_*xnorm ) {
270 if ( iter_ == maxit_ ) {
279 state_->iterateVec->set(x);
281 state_->snorm = snorm;
283 state_->value = obj.
value(x,tol); state_->nfval++;
285 if ( secant_ != nullPtr ) {
286 gtmp->set(*state_->gradientVec);
288 obj.
gradient(*state_->gradientVec,x,tol); state_->ngrad++;
289 xtmp->set(x); xtmp->axpy(-one,state_->gradientVec->dual());
290 proj_->project(*xtmp,outStream);
292 state_->gnorm = xtmp->norm();
293 if ( secant_ != nullPtr ) {
294 secant_->updateStorage(x,*state_->gradientVec,*gtmp,*state_->stepVec,state_->snorm,state_->iter+1);
298 if (verbosity_ > 0) writeOutput(outStream,writeHeader_);
305 std::stringstream hist;
306 if (verbosity_ > 1) {
307 hist << std::string(114,
'-') << std::endl;
308 if (!useSecantHessVec_) {
309 hist <<
"Primal Dual Active Set Newton's Method";
312 hist <<
"Primal Dual Active Set Quasi-Newton Method with " << secantName_ <<
" Hessian approximation";
314 hist <<
" status output definitions" << std::endl << std::endl;
315 hist <<
" iter - Number of iterates (steps taken)" << std::endl;
316 hist <<
" value - Objective function value" << std::endl;
317 hist <<
" gnorm - Norm of the gradient" << std::endl;
318 hist <<
" snorm - Norm of the step (update to optimization vector)" << std::endl;
319 hist <<
" #fval - Cumulative number of times the objective function was evaluated" << std::endl;
320 hist <<
" #grad - Cumulative number of times the gradient was computed" << std::endl;
322 hist <<
" iterPDAS - Number of Primal Dual Active Set iterations" << std::endl << std::endl;
323 hist <<
" flagPDAS - Primal Dual Active Set flag" << std::endl;
324 hist <<
" iterK - Number of Krylov iterations" << std::endl << std::endl;
327 hist <<
" iterK - Number of Krylov iterations" << std::endl << std::endl;
328 hist <<
" flagK - Krylov flag" << std::endl;
334 hist <<
" feasible - Is iterate feasible?" << std::endl;
335 hist << std::string(114,
'-') << std::endl;
339 hist << std::setw(6) << std::left <<
"iter";
340 hist << std::setw(15) << std::left <<
"value";
341 hist << std::setw(15) << std::left <<
"gnorm";
342 hist << std::setw(15) << std::left <<
"snorm";
343 hist << std::setw(10) << std::left <<
"#fval";
344 hist << std::setw(10) << std::left <<
"#grad";
346 hist << std::setw(10) << std::left <<
"iterPDAS";
347 hist << std::setw(10) << std::left <<
"flagPDAS";
348 hist << std::setw(10) << std::left <<
"iterK";
351 hist << std::setw(10) << std::left <<
"iterK";
352 hist << std::setw(10) << std::left <<
"flagK";
354 hist << std::setw(10) << std::left <<
"feasible";
374 std::stringstream hist;
375 hist << std::scientific << std::setprecision(6);
376 if ( state_->iter == 0 ) writeName(os);
377 if ( write_header ) writeHeader(os);
378 if ( state_->iter == 0 ) {
380 hist << std::setw(6) << std::left << state_->iter;
381 hist << std::setw(15) << std::left << state_->value;
382 hist << std::setw(15) << std::left << state_->gnorm;
383 hist << std::setw(15) << std::left <<
"---";
384 hist << std::setw(10) << std::left << state_->nfval;
385 hist << std::setw(10) << std::left << state_->ngrad;
386 hist << std::setw(10) << std::left <<
"---";
387 hist << std::setw(10) << std::left <<
"---";
389 hist << std::setw(10) << std::left <<
"---";
392 hist << std::setw(10) << std::left <<
"YES";
395 hist << std::setw(10) << std::left <<
"NO";
401 hist << std::setw(6) << std::left << state_->iter;
402 hist << std::setw(15) << std::left << state_->value;
403 hist << std::setw(15) << std::left << state_->gnorm;
404 hist << std::setw(15) << std::left << state_->snorm;
405 hist << std::setw(10) << std::left << state_->nfval;
406 hist << std::setw(10) << std::left << state_->ngrad;
408 hist << std::setw(10) << std::left << iter_;
409 hist << std::setw(10) << std::left << flag_;
410 hist << std::setw(10) << std::left << totalKrylov_;
413 hist << std::setw(10) << std::left << iterKrylov_;
414 hist << std::setw(10) << std::left << flagKrylov_;
417 hist << std::setw(10) << std::left <<
"YES";
420 hist << std::setw(10) << std::left <<
"NO";