ROL
ROL_MoreauYosidaPenalty.hpp
Go to the documentation of this file.
1 // @HEADER
2 // ************************************************************************
3 //
4 // Rapid Optimization Library (ROL) Package
5 // Copyright (2014) Sandia Corporation
6 //
7 // Under terms of Contract DE-AC04-94AL85000, there is a non-exclusive
8 // license for use of this work by or on behalf of the U.S. Government.
9 //
10 // Redistribution and use in source and binary forms, with or without
11 // modification, are permitted provided that the following conditions are
12 // met:
13 //
14 // 1. Redistributions of source code must retain the above copyright
15 // notice, this list of conditions and the following disclaimer.
16 //
17 // 2. Redistributions in binary form must reproduce the above copyright
18 // notice, this list of conditions and the following disclaimer in the
19 // documentation and/or other materials provided with the distribution.
20 //
21 // 3. Neither the name of the Corporation nor the names of the
22 // contributors may be used to endorse or promote products derived from
23 // this software without specific prior written permission.
24 //
25 // THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
26 // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
27 // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
28 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
29 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
30 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
31 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
32 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
33 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
34 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
35 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36 //
37 // Questions? Contact lead developers:
38 // Drew Kouri (dpkouri@sandia.gov) and
39 // Denis Ridzal (dridzal@sandia.gov)
40 //
41 // ************************************************************************
42 // @HEADER
43 
44 #ifndef ROL_MOREAUYOSIDAPENALTY_H
45 #define ROL_MOREAUYOSIDAPENALTY_H
46 
47 #include "ROL_Objective.hpp"
48 #include "ROL_BoundConstraint.hpp"
49 #include "ROL_Vector.hpp"
50 #include "ROL_Types.hpp"
51 #include "Teuchos_RCP.hpp"
52 #include <iostream>
53 
62 namespace ROL {
63 
64 template <class Real>
65 class MoreauYosidaPenalty : public Objective<Real> {
66 private:
67  Teuchos::RCP<Objective<Real> > obj_;
68  Teuchos::RCP<BoundConstraint<Real> > con_;
69 
70  Teuchos::RCP<Vector<Real> > g_;
71  Teuchos::RCP<Vector<Real> > l_;
72  Teuchos::RCP<Vector<Real> > u_;
73  Teuchos::RCP<Vector<Real> > l1_;
74  Teuchos::RCP<Vector<Real> > u1_;
75  Teuchos::RCP<Vector<Real> > dl1_;
76  Teuchos::RCP<Vector<Real> > du1_;
77  Teuchos::RCP<Vector<Real> > xlam_;
78  Teuchos::RCP<Vector<Real> > v_;
79  Teuchos::RCP<Vector<Real> > dv_;
80  Teuchos::RCP<Vector<Real> > dv2_;
81  Teuchos::RCP<Vector<Real> > lam_;
82  Teuchos::RCP<Vector<Real> > tmp_;
83 
84  Real mu_;
85  Real fval_;
87  int nfval_;
88  int ngval_;
89 
90  void computePenalty(const Vector<Real> &x) {
91  if ( con_->isActivated() ) {
92  if ( !isConEvaluated_ ) {
93  xlam_->set(x);
94  xlam_->axpy(1./mu_,*lam_);
95 
96  if ( con_->isFeasible(*xlam_) ) {
97  l1_->zero(); dl1_->zero();
98  u1_->zero(); du1_->zero();
99  }
100  else {
101  // Compute lower penalty component
102  l1_->set(*l_);
103  con_->pruneLowerInactive(*l1_,*xlam_);
104  tmp_->set(*xlam_);
105  con_->pruneLowerInactive(*tmp_,*xlam_);
106  l1_->axpy(-1.,*tmp_);
107 
108  // Compute upper penalty component
109  u1_->set(*xlam_);
110  con_->pruneUpperInactive(*u1_,*xlam_);
111  tmp_->set(*u_);
112  con_->pruneUpperInactive(*tmp_,*xlam_);
113  u1_->axpy(-1.,*tmp_);
114 
115  // Compute derivative of lower penalty component
116  dl1_->set(l1_->dual());
117  con_->pruneLowerInactive(*dl1_,*xlam_);
118 
119  // Compute derivative of upper penalty component
120  du1_->set(u1_->dual());
121  con_->pruneUpperInactive(*du1_,*xlam_);
122  }
123 
124  isConEvaluated_ = true;
125  }
126  }
127  }
128 
129 public:
131 
133  const ROL::Vector<Real> &x, const Real mu = 1.0)
134  : mu_(mu), fval_(0.0), isConEvaluated_(false), nfval_(0), ngval_(0) {
135  obj_ = Teuchos::rcp(&obj, false);
136  con_ = Teuchos::rcp(&con, false);
137 
138  g_ = x.dual().clone();
139  l_ = x.clone();
140  l1_ = x.clone();
141  dl1_ = x.dual().clone();
142  u_ = x.clone();
143  u1_ = x.clone();
144  du1_ = x.dual().clone();
145  xlam_ = x.clone();
146  v_ = x.clone();
147  dv_ = x.dual().clone();
148  dv2_ = x.dual().clone();
149  lam_ = x.clone();
150  tmp_ = x.clone();
151 
152  con_->setVectorToLowerBound(*l_);
153  con_->setVectorToUpperBound(*u_);
154 
155  lam_->zero();
156  //lam_->set(*u_);
157  //lam_->plus(*l_);
158  //lam_->scale(0.5);
159  }
160 
161  void updateMultipliers(Real mu, const ROL::Vector<Real> &x) {
162  if ( con_->isActivated() ) {
163  computePenalty(x);
164 
165  lam_->set(*u1_);
166  lam_->axpy(-1.,*l1_);
167  lam_->scale(mu_);
168 
169  mu_ = mu;
170  }
171 
172  nfval_ = 0;
173  ngval_ = 0;
174 
175  isConEvaluated_ = false;
176  }
177 
178  Real getObjectiveValue(void) const {
179  return fval_;
180  }
181 
182  Teuchos::RCP<Vector<Real> > getGradient(void) const {
183  return g_;
184  }
185 
187  return nfval_;
188  }
189 
191  return ngval_;
192  }
193 
201  void update( const Vector<Real> &x, bool flag = true, int iter = -1 ) {
202  obj_->update(x,flag,iter);
203  con_->update(x,flag,iter);
204  isConEvaluated_ = false;
205  }
206 
213  Real value( const Vector<Real> &x, Real &tol ) {
214  // Compute objective function value
215  fval_ = obj_->value(x,tol);
216  nfval_++;
217  // Add value of the Moreau-Yosida penalty
218  Real fval = fval_;
219  if ( con_->isActivated() ) {
220  computePenalty(x);
221  fval += 0.5*mu_*(l1_->dot(*l1_) + u1_->dot(*u1_));
222  }
223  return fval;
224  }
225 
233  void gradient( Vector<Real> &g, const Vector<Real> &x, Real &tol ) {
234  // Compute gradient of objective function
235  obj_->gradient(*g_,x,tol);
236  ngval_++;
237  g.set(*g_);
238  // Add gradient of the Moreau-Yosida penalty
239  if ( con_->isActivated() ) {
240  computePenalty(x);
241  g.axpy(-mu_,*dl1_);
242  g.axpy(mu_,*du1_);
243  }
244  }
245 
254  void hessVec( Vector<Real> &hv, const Vector<Real> &v, const Vector<Real> &x, Real &tol ) {
255  // Apply objective Hessian to a vector
256  obj_->hessVec(hv,v,x,tol);
257  // Add Hessian of the Moreau-Yosida penalty
258  if ( con_->isActivated() ) {
259  computePenalty(x);
260 
261  v_->set(v);
262  con_->pruneLowerActive(*v_,*xlam_);
263  v_->scale(-1.0);
264  v_->plus(v);
265  dv_->set(v_->dual());
266  dv2_->set(*dv_);
267  con_->pruneLowerActive(*dv_,*xlam_);
268  dv_->scale(-1.0);
269  dv_->plus(*dv2_);
270  hv.axpy(mu_,*dv_);
271 
272  v_->set(v);
273  con_->pruneUpperActive(*v_,*xlam_);
274  v_->scale(-1.0);
275  v_->plus(v);
276  dv_->set(v_->dual());
277  dv2_->set(*dv_);
278  con_->pruneUpperActive(*dv_,*xlam_);
279  dv_->scale(-1.0);
280  dv_->plus(*dv2_);
281  hv.axpy(mu_,*dv_);
282  }
283  }
284 
285 }; // class MoreauYosidaPenalty
286 
287 } // namespace ROL
288 
289 #endif
Provides the interface to evaluate objective functions.
virtual const Vector & dual() const
Return dual representation of , for example, the result of applying a Riesz map, or change of basis...
Definition: ROL_Vector.hpp:213
Teuchos::RCP< Vector< Real > > g_
virtual void axpy(const Real alpha, const Vector &x)
Compute where .
Definition: ROL_Vector.hpp:143
Teuchos::RCP< Vector< Real > > dv_
Teuchos::RCP< Vector< Real > > xlam_
Contains definitions of custom data types in ROL.
Teuchos::RCP< Vector< Real > > v_
Real value(const Vector< Real > &x, Real &tol)
Compute value.
Teuchos::RCP< BoundConstraint< Real > > con_
virtual Teuchos::RCP< Vector > clone() const =0
Clone to make a new (uninitialized) vector.
void updateMultipliers(Real mu, const ROL::Vector< Real > &x)
Defines the linear algebra or vector space interface.
Definition: ROL_Vector.hpp:74
Teuchos::RCP< Vector< Real > > dv2_
Teuchos::RCP< Vector< Real > > tmp_
void computePenalty(const Vector< Real > &x)
Teuchos::RCP< Vector< Real > > u_
Teuchos::RCP< Vector< Real > > l_
Teuchos::RCP< Vector< Real > > du1_
Provides the interface to evaluate the Moreau-Yosida penalty function.
void gradient(Vector< Real > &g, const Vector< Real > &x, Real &tol)
Compute gradient.
Provides the interface to apply upper and lower bound constraints.
Teuchos::RCP< Vector< Real > > lam_
void update(const Vector< Real > &x, bool flag=true, int iter=-1)
Update Moreau-Yosida penalty function.
Teuchos::RCP< Vector< Real > > u1_
virtual void set(const Vector &x)
Set where .
Definition: ROL_Vector.hpp:196
MoreauYosidaPenalty(Objective< Real > &obj, BoundConstraint< Real > &con, const ROL::Vector< Real > &x, const Real mu=1.0)
void hessVec(Vector< Real > &hv, const Vector< Real > &v, const Vector< Real > &x, Real &tol)
Apply Hessian approximation to vector.
Teuchos::RCP< Vector< Real > > dl1_
Teuchos::RCP< Objective< Real > > obj_
Teuchos::RCP< Vector< Real > > l1_
Teuchos::RCP< Vector< Real > > getGradient(void) const