ROL
ROL_PrimalDualActiveSetStep.hpp
Go to the documentation of this file.
1 // @HEADER
2 // ************************************************************************
3 //
4 // Rapid Optimization Library (ROL) Package
5 // Copyright (2014) Sandia Corporation
6 //
7 // Under terms of Contract DE-AC04-94AL85000, there is a non-exclusive
8 // license for use of this work by or on behalf of the U.S. Government.
9 //
10 // Redistribution and use in source and binary forms, with or without
11 // modification, are permitted provided that the following conditions are
12 // met:
13 //
14 // 1. Redistributions of source code must retain the above copyright
15 // notice, this list of conditions and the following disclaimer.
16 //
17 // 2. Redistributions in binary form must reproduce the above copyright
18 // notice, this list of conditions and the following disclaimer in the
19 // documentation and/or other materials provided with the distribution.
20 //
21 // 3. Neither the name of the Corporation nor the names of the
22 // contributors may be used to endorse or promote products derived from
23 // this software without specific prior written permission.
24 //
25 // THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
26 // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
27 // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
28 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
29 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
30 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
31 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
32 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
33 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
34 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
35 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36 //
37 // Questions? Contact lead developers:
38 // Drew Kouri (dpkouri@sandia.gov) and
39 // Denis Ridzal (dridzal@sandia.gov)
40 //
41 // ************************************************************************
42 // @HEADER
43 
44 #ifndef ROL_PRIMALDUALACTIVESETSTEP_H
45 #define ROL_PRIMALDUALACTIVESETSTEP_H
46 
47 #include "ROL_Step.hpp"
48 #include "ROL_Vector.hpp"
49 #include "ROL_Krylov.hpp"
50 #include "ROL_Objective.hpp"
51 #include "ROL_BoundConstraint.hpp"
52 #include "ROL_Types.hpp"
53 #include "ROL_Secant.hpp"
56 #include "Teuchos_ParameterList.hpp"
57 
132 namespace ROL {
133 
134 template <class Real>
135 class PrimalDualActiveSetStep : public Step<Real> {
136 private:
137 
138  Teuchos::RCP<PrimalDualHessian<Real> > hessian_;
139  Teuchos::RCP<PrimalDualPreconditioner<Real> > precond_;
140  Teuchos::RCP<Krylov<Real> > krylov_;
141 
142  // Krylov Parameters
143  int iterCR_;
144  int flagCR_;
145  Real itol_;
146 
147  // PDAS Parameters
148  int maxit_;
149  int iter_;
150  int flag_;
151  Real stol_;
152  Real gtol_;
153  Real scale_;
154  Real neps_;
155  bool feasible_;
156 
157  // Dual Variable
158  Teuchos::RCP<Vector<Real> > lambda_;
159  Teuchos::RCP<Vector<Real> > xlam_;
160  Teuchos::RCP<Vector<Real> > x0_;
161  Teuchos::RCP<Vector<Real> > xbnd_;
162  Teuchos::RCP<Vector<Real> > As_;
163  Teuchos::RCP<Vector<Real> > xtmp_;
164  Teuchos::RCP<Vector<Real> > res_;
165  Teuchos::RCP<Vector<Real> > Ag_;
166  Teuchos::RCP<Vector<Real> > rtmp_;
167  Teuchos::RCP<Vector<Real> > gtmp_;
168 
169  // Secant Information
171  Teuchos::RCP<Secant<Real> > secant_;
174 
188  Teuchos::RCP<StepState<Real> > step_state = Step<Real>::getState();
189  obj.gradient(*(step_state->gradientVec),x,tol);
190  xtmp_->set(x);
191  xtmp_->axpy(-1.0,(step_state->gradientVec)->dual());
192  con.project(*xtmp_);
193  xtmp_->axpy(-1.0,x);
194  return xtmp_->norm();
195  }
196 
197 public:
204  PrimalDualActiveSetStep( Teuchos::ParameterList &parlist )
205  : Step<Real>::Step(),
206  hessian_(Teuchos::null), precond_(Teuchos::null), krylov_(Teuchos::null),
207  iterCR_(0), flagCR_(0), itol_(0.),
208  maxit_(0), iter_(0), flag_(0), stol_(0.), gtol_(0.), scale_(0.),
209  neps_(-ROL_EPSILON), feasible_(false),
210  lambda_(Teuchos::null), xlam_(Teuchos::null), x0_(Teuchos::null),
211  xbnd_(Teuchos::null), As_(Teuchos::null), xtmp_(Teuchos::null),
212  res_(Teuchos::null), Ag_(Teuchos::null), rtmp_(Teuchos::null),
213  gtmp_(Teuchos::null),
214  esec_(SECANT_LBFGS), secant_(Teuchos::null), useSecantPrecond_(false),
215  useSecantHessVec_(false) {
216  // Algorithmic parameters
217  maxit_ = parlist.sublist("Step").sublist("Primal Dual Active Set").get("Iteration Limit",10);
218  stol_ = parlist.sublist("Step").sublist("Primal Dual Active Set").get("Relative Step Tolerance",1.e-8);
219  gtol_ = parlist.sublist("Step").sublist("Primal Dual Active Set").get("Relative Gradient Tolerance",1.e-6);
220  scale_ = parlist.sublist("Step").sublist("Primal Dual Active Set").get("Dual Scaling", 1.0);
221  // Build secant object
222  esec_ = StringToESecant(parlist.sublist("General").sublist("Secant").get("Type","Limited-Memory BFGS"));
223  useSecantHessVec_ = parlist.sublist("General").sublist("Secant").get("Use as Hessian", false);
224  useSecantPrecond_ = parlist.sublist("General").sublist("Secant").get("Use as Preconditioner", false);
225  if ( useSecantHessVec_ || useSecantPrecond_ ) {
226  secant_ = SecantFactory<Real>(parlist);
227  }
228  // Build Krylov object
229  krylov_ = KrylovFactory<Real>(parlist);
230  }
231 
243  void initialize( Vector<Real> &x, const Vector<Real> &s, const Vector<Real> &g,
245  AlgorithmState<Real> &algo_state ) {
246  Teuchos::RCP<StepState<Real> > step_state = Step<Real>::getState();
247  // Initialize state descent direction and gradient storage
248  step_state->descentVec = s.clone();
249  step_state->gradientVec = g.clone();
250  step_state->searchSize = 0.0;
251  // Initialize additional storage
252  xlam_ = x.clone();
253  x0_ = x.clone();
254  xbnd_ = x.clone();
255  As_ = s.clone();
256  xtmp_ = x.clone();
257  res_ = g.clone();
258  Ag_ = g.clone();
259  rtmp_ = g.clone();
260  gtmp_ = g.clone();
261  // Project x onto constraint set
262  con.project(x);
263  // Update objective function, get value, and get gradient
264  Real tol = std::sqrt(ROL_EPSILON);
265  obj.update(x,true,algo_state.iter);
266  algo_state.value = obj.value(x,tol);
267  algo_state.nfval++;
268  algo_state.gnorm = computeCriticalityMeasure(x,obj,con,tol);
269  algo_state.ngrad++;
270  // Initialize dual variable
271  lambda_ = s.clone();
272  lambda_->set((step_state->gradientVec)->dual());
273  lambda_->scale(-1.0);
274  //con.setVectorToLowerBound(*lambda_);
275  // Initialize Hessian and preconditioner
276  Teuchos::RCP<Objective<Real> > obj_ptr = Teuchos::rcp(&obj, false);
277  Teuchos::RCP<BoundConstraint<Real> > con_ptr = Teuchos::rcp(&con, false);
278  hessian_ = Teuchos::rcp(
279  new PrimalDualHessian<Real>(secant_,obj_ptr,con_ptr,algo_state.iterateVec,xlam_,useSecantHessVec_) );
280  precond_ = Teuchos::rcp(
281  new PrimalDualPreconditioner<Real>(secant_,obj_ptr,con_ptr,algo_state.iterateVec,xlam_,
282  useSecantPrecond_) );
283  }
284 
311  AlgorithmState<Real> &algo_state ) {
312  Teuchos::RCP<StepState<Real> > step_state = Step<Real>::getState();
313  s.zero();
314  x0_->set(x);
315  res_->set(*(step_state->gradientVec));
316  for ( iter_ = 0; iter_ < maxit_; iter_++ ) {
317  /********************************************************************/
318  // MODIFY ITERATE VECTOR TO CHECK ACTIVE SET
319  /********************************************************************/
320  xlam_->set(*x0_); // xlam = x0
321  xlam_->axpy(scale_,*(lambda_)); // xlam = x0 + c*lambda
322  /********************************************************************/
323  // PROJECT x ONTO PRIMAL DUAL FEASIBLE SET
324  /********************************************************************/
325  As_->zero(); // As = 0
326 
327  con.setVectorToUpperBound(*xbnd_); // xbnd = u
328  xbnd_->axpy(-1.0,x); // xbnd = u - x
329  xtmp_->set(*xbnd_); // tmp = u - x
330  con.pruneUpperActive(*xtmp_,*xlam_,neps_); // tmp = I(u - x)
331  xbnd_->axpy(-1.0,*xtmp_); // xbnd = A(u - x)
332  As_->plus(*xbnd_); // As += A(u - x)
333 
334  con.setVectorToLowerBound(*xbnd_); // xbnd = l
335  xbnd_->axpy(-1.0,x); // xbnd = l - x
336  xtmp_->set(*xbnd_); // tmp = l - x
337  con.pruneLowerActive(*xtmp_,*xlam_,neps_); // tmp = I(l - x)
338  xbnd_->axpy(-1.0,*xtmp_); // xbnd = A(l - x)
339  As_->plus(*xbnd_); // As += A(l - x)
340  /********************************************************************/
341  // APPLY HESSIAN TO ACTIVE COMPONENTS OF s AND REMOVE INACTIVE
342  /********************************************************************/
343  itol_ = std::sqrt(ROL_EPSILON);
344  if ( useSecantHessVec_ && secant_ != Teuchos::null ) { // IHAs = H*As
345  secant_->applyB(*gtmp_,*As_,x);
346  }
347  else {
348  obj.hessVec(*gtmp_,*As_,x,itol_);
349  }
350  con.pruneActive(*gtmp_,*xlam_,neps_); // IHAs = I(H*As)
351  /********************************************************************/
352  // SEPARATE ACTIVE AND INACTIVE COMPONENTS OF THE GRADIENT
353  /********************************************************************/
354  rtmp_->set(*(step_state->gradientVec)); // Inactive components
355  con.pruneActive(*rtmp_,*xlam_,neps_);
356 
357  Ag_->set(*(step_state->gradientVec)); // Active components
358  Ag_->axpy(-1.0,*rtmp_);
359  /********************************************************************/
360  // SOLVE REDUCED NEWTON SYSTEM
361  /********************************************************************/
362  rtmp_->plus(*gtmp_);
363  rtmp_->scale(-1.0); // rhs = -Ig - I(H*As)
364  s.zero();
365  if ( rtmp_->norm() > 0.0 ) {
366  //solve(s,*rtmp_,*xlam_,x,obj,con); // Call conjugate residuals
367  krylov_->run(s,*hessian_,*rtmp_,*precond_,iterCR_,flagCR_);
368  con.pruneActive(s,*xlam_,neps_); // s <- Is
369  }
370  s.plus(*As_); // s = Is + As
371  /********************************************************************/
372  // UPDATE MULTIPLIER
373  /********************************************************************/
374  if ( useSecantHessVec_ && secant_ != Teuchos::null ) {
375  secant_->applyB(*rtmp_,s,x);
376  }
377  else {
378  obj.hessVec(*rtmp_,s,x,itol_);
379  }
380  gtmp_->set(*rtmp_);
381  con.pruneActive(*gtmp_,*xlam_,neps_);
382  lambda_->set(*rtmp_);
383  lambda_->axpy(-1.0,*gtmp_);
384  lambda_->plus(*Ag_);
385  lambda_->scale(-1.0);
386  /********************************************************************/
387  // UPDATE STEP
388  /********************************************************************/
389  x0_->set(x);
390  x0_->plus(s);
391  res_->set(*(step_state->gradientVec));
392  res_->plus(*rtmp_);
393  // Compute criticality measure
394  xtmp_->set(*x0_);
395  xtmp_->axpy(-1.0,res_->dual());
396  con.project(*xtmp_);
397  xtmp_->axpy(-1.0,*x0_);
398 // std::cout << s.norm() << " "
399 // << tmp->norm() << " "
400 // << res_->norm() << " "
401 // << lambda_->norm() << " "
402 // << flagCR_ << " "
403 // << iterCR_ << "\n";
404  if ( xtmp_->norm() < gtol_*algo_state.gnorm ) {
405  flag_ = 0;
406  break;
407  }
408  if ( s.norm() < stol_*x.norm() ) {
409  flag_ = 2;
410  break;
411  }
412  }
413  if ( iter_ == maxit_ ) {
414  flag_ = 1;
415  }
416  else {
417  iter_++;
418  }
419  }
420 
433  AlgorithmState<Real> &algo_state ) {
434  Teuchos::RCP<StepState<Real> > step_state = Step<Real>::getState();
435 
436  x.plus(s);
437  feasible_ = con.isFeasible(x);
438  algo_state.snorm = s.norm();
439  algo_state.iter++;
440  Real tol = std::sqrt(ROL_EPSILON);
441  obj.update(x,true,algo_state.iter);
442  algo_state.value = obj.value(x,tol);
443  algo_state.nfval++;
444 
445  if ( secant_ != Teuchos::null ) {
446  gtmp_->set(*(step_state->gradientVec));
447  }
448  algo_state.gnorm = computeCriticalityMeasure(x,obj,con,tol);
449  algo_state.ngrad++;
450 
451  if ( secant_ != Teuchos::null ) {
452  secant_->update(*(step_state->gradientVec),*gtmp_,s,algo_state.snorm,algo_state.iter+1);
453  }
454  (algo_state.iterateVec)->set(x);
455  }
456 
462  std::string printHeader( void ) const {
463  std::stringstream hist;
464  hist << " ";
465  hist << std::setw(6) << std::left << "iter";
466  hist << std::setw(15) << std::left << "value";
467  hist << std::setw(15) << std::left << "gnorm";
468  hist << std::setw(15) << std::left << "snorm";
469  hist << std::setw(10) << std::left << "#fval";
470  hist << std::setw(10) << std::left << "#grad";
471  if ( maxit_ > 1 ) {
472  hist << std::setw(10) << std::left << "iterPDAS";
473  hist << std::setw(10) << std::left << "flagPDAS";
474  }
475  else {
476  hist << std::setw(10) << std::left << "iterCR";
477  hist << std::setw(10) << std::left << "flagCR";
478  }
479  hist << std::setw(10) << std::left << "feasible";
480  hist << "\n";
481  return hist.str();
482  }
483 
489  std::string printName( void ) const {
490  std::stringstream hist;
491  hist << "\nPrimal Dual Active Set Newton's Method\n";
492  return hist.str();
493  }
494 
502  virtual std::string print( AlgorithmState<Real> &algo_state, bool print_header = false ) const {
503  std::stringstream hist;
504  hist << std::scientific << std::setprecision(6);
505  if ( algo_state.iter == 0 ) {
506  hist << printName();
507  }
508  if ( print_header ) {
509  hist << printHeader();
510  }
511  if ( algo_state.iter == 0 ) {
512  hist << " ";
513  hist << std::setw(6) << std::left << algo_state.iter;
514  hist << std::setw(15) << std::left << algo_state.value;
515  hist << std::setw(15) << std::left << algo_state.gnorm;
516  hist << "\n";
517  }
518  else {
519  hist << " ";
520  hist << std::setw(6) << std::left << algo_state.iter;
521  hist << std::setw(15) << std::left << algo_state.value;
522  hist << std::setw(15) << std::left << algo_state.gnorm;
523  hist << std::setw(15) << std::left << algo_state.snorm;
524  hist << std::setw(10) << std::left << algo_state.nfval;
525  hist << std::setw(10) << std::left << algo_state.ngrad;
526  if ( maxit_ > 1 ) {
527  hist << std::setw(10) << std::left << iter_;
528  hist << std::setw(10) << std::left << flag_;
529  }
530  else {
531  hist << std::setw(10) << std::left << iterCR_;
532  hist << std::setw(10) << std::left << flagCR_;
533  }
534  if ( feasible_ ) {
535  hist << std::setw(10) << std::left << "YES";
536  }
537  else {
538  hist << std::setw(10) << std::left << "NO";
539  }
540  hist << "\n";
541  }
542  return hist.str();
543  }
544 
545 }; // class PrimalDualActiveSetStep
546 
547 } // namespace ROL
548 
549 #endif
550 
551 // void solve(Vector<Real> &sol, const Vector<Real> &rhs, const Vector<Real> &xlam, const Vector<Real> &x,
552 // Objective<Real> &obj, BoundConstraint<Real> &con) {
553 // Real rnorm = rhs.norm();
554 // Real rtol = std::min(tol1_,tol2_*rnorm);
555 // itol_ = std::sqrt(ROL_EPSILON);
556 // sol.zero();
557 //
558 // Teuchos::RCP<Vector<Real> > res = rhs.clone();
559 // res->set(rhs);
560 //
561 // Teuchos::RCP<Vector<Real> > v = x.clone();
562 // con.pruneActive(*res,xlam,neps_);
563 // obj.precond(*v,*res,x,itol_);
564 // con.pruneActive(*v,xlam,neps_);
565 //
566 // Teuchos::RCP<Vector<Real> > p = x.clone();
567 // p->set(*v);
568 //
569 // Teuchos::RCP<Vector<Real> > Hp = x.clone();
570 //
571 // iterCR_ = 0;
572 // flagCR_ = 0;
573 //
574 // Real kappa = 0.0, beta = 0.0, alpha = 0.0, tmp = 0.0, rv = v->dot(*res);
575 //
576 // for (iterCR_ = 0; iterCR_ < maxitCR_; iterCR_++) {
577 // if ( false ) {
578 // itol_ = rtol/(maxitCR_*rnorm);
579 // }
580 // con.pruneActive(*p,xlam,neps_);
581 // if ( secant_ == Teuchos::null ) {
582 // obj.hessVec(*Hp, *p, x, itol_);
583 // }
584 // else {
585 // secant_->applyB( *Hp, *p, x );
586 // }
587 // con.pruneActive(*Hp,xlam,neps_);
588 //
589 // kappa = p->dot(*Hp);
590 // if ( kappa <= 0.0 ) { flagCR_ = 2; break; }
591 // alpha = rv/kappa;
592 // sol.axpy(alpha,*p);
593 //
594 // res->axpy(-alpha,*Hp);
595 // rnorm = res->norm();
596 // if ( rnorm < rtol ) { break; }
597 //
598 // con.pruneActive(*res,xlam,neps_);
599 // obj.precond(*v,*res,x,itol_);
600 // con.pruneActive(*v,xlam,neps_);
601 // tmp = rv;
602 // rv = v->dot(*res);
603 // beta = rv/tmp;
604 //
605 // p->scale(beta);
606 // p->axpy(1.0,*v);
607 // }
608 // if ( iterCR_ == maxitCR_ ) {
609 // flagCR_ = 1;
610 // }
611 // else {
612 // iterCR_++;
613 // }
614 // }
615 
616 
617 // /** \brief Apply the inactive components of the Hessian operator.
618 //
619 // I.e., the components corresponding to \f$\mathcal{I}_k\f$.
620 //
621 // @param[out] hv is the result of applying the Hessian at @b x to
622 // @b v
623 // @param[in] v is the direction in which we apply the Hessian
624 // @param[in] x is the current iteration vector \f$x_k\f$
625 // @param[in] xlam is the vector \f$x_k + c\lambda_k\f$
626 // @param[in] obj is the objective function
627 // @param[in] con are the bound constraints
628 // */
629 // void applyInactiveHessian(Vector<Real> &hv, const Vector<Real> &v, const Vector<Real> &x,
630 // const Vector<Real> &xlam, Objective<Real> &obj, BoundConstraint<Real> &con) {
631 // Teuchos::RCP<Vector<Real> > tmp = v.clone();
632 // tmp->set(v);
633 // con.pruneActive(*tmp,xlam,neps_);
634 // if ( secant_ == Teuchos::null ) {
635 // obj.hessVec(hv,*tmp,x,itol_);
636 // }
637 // else {
638 // secant_->applyB(hv,*tmp,x);
639 // }
640 // con.pruneActive(hv,xlam,neps_);
641 // }
642 //
643 // /** \brief Apply the inactive components of the preconditioner operator.
644 //
645 // I.e., the components corresponding to \f$\mathcal{I}_k\f$.
646 //
647 // @param[out] hv is the result of applying the preconditioner at @b x to
648 // @b v
649 // @param[in] v is the direction in which we apply the preconditioner
650 // @param[in] x is the current iteration vector \f$x_k\f$
651 // @param[in] xlam is the vector \f$x_k + c\lambda_k\f$
652 // @param[in] obj is the objective function
653 // @param[in] con are the bound constraints
654 // */
655 // void applyInactivePrecond(Vector<Real> &pv, const Vector<Real> &v, const Vector<Real> &x,
656 // const Vector<Real> &xlam, Objective<Real> &obj, BoundConstraint<Real> &con) {
657 // Teuchos::RCP<Vector<Real> > tmp = v.clone();
658 // tmp->set(v);
659 // con.pruneActive(*tmp,xlam,neps_);
660 // obj.precond(pv,*tmp,x,itol_);
661 // con.pruneActive(pv,xlam,neps_);
662 // }
663 //
664 // /** \brief Solve the inactive part of the PDAS optimality system.
665 //
666 // The inactive PDAS optimality system is
667 // \f[
668 // \nabla^2 f(x_k)_{\mathcal{I}_k,\mathcal{I}_k}s =
669 // -\nabla f(x_k)_{\mathcal{I}_k}
670 // -\nabla^2 f(x_k)_{\mathcal{I}_k,\mathcal{A}_k} (s_k)_{\mathcal{A}_k}.
671 // \f]
672 // Since the inactive part of the Hessian may not be positive definite, we solve
673 // using CR.
674 //
675 // @param[out] sol is the vector containing the solution
676 // @param[in] rhs is the right-hand side vector
677 // @param[in] xlam is the vector \f$x_k + c\lambda_k\f$
678 // @param[in] x is the current iteration vector \f$x_k\f$
679 // @param[in] obj is the objective function
680 // @param[in] con are the bound constraints
681 // */
682 // // Solve the inactive part of the optimality system using conjugate residuals
683 // void solve(Vector<Real> &sol, const Vector<Real> &rhs, const Vector<Real> &xlam, const Vector<Real> &x,
684 // Objective<Real> &obj, BoundConstraint<Real> &con) {
685 // // Initialize Residual
686 // Teuchos::RCP<Vector<Real> > res = rhs.clone();
687 // res->set(rhs);
688 // Real rnorm = res->norm();
689 // Real rtol = std::min(tol1_,tol2_*rnorm);
690 // if ( false ) { itol_ = rtol/(maxitCR_*rnorm); }
691 // sol.zero();
692 //
693 // // Apply preconditioner to residual r = Mres
694 // Teuchos::RCP<Vector<Real> > r = x.clone();
695 // applyInactivePrecond(*r,*res,x,xlam,obj,con);
696 //
697 // // Initialize direction p = v
698 // Teuchos::RCP<Vector<Real> > p = x.clone();
699 // p->set(*r);
700 //
701 // // Apply Hessian to v
702 // Teuchos::RCP<Vector<Real> > Hr = x.clone();
703 // applyInactiveHessian(*Hr,*r,x,xlam,obj,con);
704 //
705 // // Apply Hessian to p
706 // Teuchos::RCP<Vector<Real> > Hp = x.clone();
707 // Teuchos::RCP<Vector<Real> > MHp = x.clone();
708 // Hp->set(*Hr);
709 //
710 // iterCR_ = 0;
711 // flagCR_ = 0;
712 //
713 // Real kappa = 0.0, beta = 0.0, alpha = 0.0, tmp = 0.0, rHr = Hr->dot(*r);
714 //
715 // for (iterCR_ = 0; iterCR_ < maxitCR_; iterCR_++) {
716 // // Precondition Hp
717 // applyInactivePrecond(*MHp,*Hp,x,xlam,obj,con);
718 //
719 // kappa = Hp->dot(*MHp); // p' H M H p
720 // alpha = rHr/kappa; // r' M H M r
721 // sol.axpy(alpha,*p); // update step
722 // res->axpy(-alpha,*Hp); // residual
723 // r->axpy(-alpha,*MHp); // preconditioned residual
724 //
725 // // recompute rnorm and decide whether or not to exit
726 // rnorm = res->norm();
727 // if ( rnorm < rtol ) { break; }
728 //
729 // // Apply Hessian to v
730 // itol_ = rtol/(maxitCR_*rnorm);
731 // applyInactiveHessian(*Hr,*r,x,xlam,obj,con);
732 //
733 // tmp = rHr;
734 // rHr = Hr->dot(*r);
735 // beta = rHr/tmp;
736 // p->scale(beta);
737 // p->axpy(1.0,*r);
738 // Hp->scale(beta);
739 // Hp->axpy(1.0,*Hr);
740 // }
741 // if ( iterCR_ == maxitCR_ ) {
742 // flagCR_ = 1;
743 // }
744 // else {
745 // iterCR_++;
746 // }
747 // }
Teuchos::RCP< Vector< Real > > rtmp_
Container for temporary right hand side storage.
Implements the computation of optimization steps with the Newton primal-dual active set method...
Provides the interface to evaluate objective functions.
Teuchos::RCP< Vector< Real > > As_
Container for step projected onto active set.
std::string printName(void) const
Print step name.
virtual void plus(const Vector &x)=0
Compute , where .
std::string printHeader(void) const
Print iterate header.
virtual Real value(const Vector< Real > &x, Real &tol)=0
Compute value.
Provides the interface to compute optimization steps.
Definition: ROL_Step.hpp:67
virtual void hessVec(Vector< Real > &hv, const Vector< Real > &v, const Vector< Real > &x, Real &tol)
Apply Hessian approximation to vector.
Teuchos::RCP< StepState< Real > > getState(void)
Definition: ROL_Step.hpp:72
Contains definitions of custom data types in ROL.
virtual void pruneLowerActive(Vector< Real > &v, const Vector< Real > &x, Real eps=0.0)
Set variables to zero if they correspond to the lower -active set.
virtual Teuchos::RCP< Vector > clone() const =0
Clone to make a new (uninitialized) vector.
Teuchos::RCP< Krylov< Real > > krylov_
ESecant StringToESecant(std::string s)
Definition: ROL_Types.hpp:444
virtual void setVectorToUpperBound(Vector< Real > &u)
Set the input vector to the upper bound.
Teuchos::RCP< Vector< Real > > gtmp_
Container for temporary gradient storage.
Teuchos::RCP< Secant< Real > > secant_
Secant object.
virtual void zero()
Set to zero vector.
Definition: ROL_Vector.hpp:157
Real scale_
Scale for dual variables in the active set, .
Defines the linear algebra or vector space interface.
Definition: ROL_Vector.hpp:74
Teuchos::RCP< Vector< Real > > x0_
Container for initial priaml variables.
Teuchos::RCP< Vector< Real > > lambda_
Container for dual variables.
State for algorithm class. Will be used for restarts.
Definition: ROL_Types.hpp:77
virtual void gradient(Vector< Real > &g, const Vector< Real > &x, Real &tol)
Compute gradient.
ESecant
Enumeration of secant update algorithms.
Definition: ROL_Types.hpp:387
void initialize(Vector< Real > &x, const Vector< Real > &s, const Vector< Real > &g, Objective< Real > &obj, BoundConstraint< Real > &con, AlgorithmState< Real > &algo_state)
Initialize step.
virtual void pruneUpperActive(Vector< Real > &v, const Vector< Real > &x, Real eps=0.0)
Set variables to zero if they correspond to the upper -active set.
Teuchos::RCP< PrimalDualHessian< Real > > hessian_
bool feasible_
Flag whether the current iterate is feasible or not.
Real computeCriticalityMeasure(Vector< Real > &x, Objective< Real > &obj, BoundConstraint< Real > &con, Real tol)
Compute the gradient-based criticality measure.
Teuchos::RCP< Vector< Real > > xbnd_
Container for primal variable bounds.
Teuchos::RCP< Vector< Real > > xlam_
Container for primal plus dual variables.
virtual void setVectorToLowerBound(Vector< Real > &l)
Set the input vector to the lower bound.
Provides the interface to apply upper and lower bound constraints.
Teuchos::RCP< PrimalDualPreconditioner< Real > > precond_
void compute(Vector< Real > &s, const Vector< Real > &x, Objective< Real > &obj, BoundConstraint< Real > &con, AlgorithmState< Real > &algo_state)
Compute step.
Real gtol_
PDAS gradient stopping tolerance.
PrimalDualActiveSetStep(Teuchos::ParameterList &parlist)
Constructor.
Teuchos::RCP< Vector< Real > > xtmp_
Container for temporary primal storage.
Real stol_
PDAS minimum step size stopping tolerance.
Teuchos::RCP< Vector< Real > > iterateVec
Definition: ROL_Types.hpp:91
virtual void pruneActive(Vector< Real > &v, const Vector< Real > &x, Real eps=0.0)
Set variables to zero if they correspond to the -active set.
virtual Real norm() const =0
Returns where .
virtual void update(const Vector< Real > &x, bool flag=true, int iter=-1)
Update objective function.
ESecant esec_
Enum for secant type.
virtual bool isFeasible(const Vector< Real > &v)
Check if the vector, v, is feasible.
void update(Vector< Real > &x, const Vector< Real > &s, Objective< Real > &obj, BoundConstraint< Real > &con, AlgorithmState< Real > &algo_state)
Update step, if successful.
virtual std::string print(AlgorithmState< Real > &algo_state, bool print_header=false) const
Print iterate status.
Teuchos::RCP< Vector< Real > > res_
Container for optimality system residual for quadratic model.
virtual void project(Vector< Real > &x)
Project optimization variables onto the bounds.
int maxit_
Maximum number of PDAS iterations.
static const double ROL_EPSILON
Platform-dependent machine epsilon.
Definition: ROL_Types.hpp:118
Teuchos::RCP< Vector< Real > > Ag_
Container for gradient projected onto active set.