ROL
ROL_LineSearchStep.hpp
Go to the documentation of this file.
1// @HEADER
2// ************************************************************************
3//
4// Rapid Optimization Library (ROL) Package
5// Copyright (2014) Sandia Corporation
6//
7// Under terms of Contract DE-AC04-94AL85000, there is a non-exclusive
8// license for use of this work by or on behalf of the U.S. Government.
9//
10// Redistribution and use in source and binary forms, with or without
11// modification, are permitted provided that the following conditions are
12// met:
13//
14// 1. Redistributions of source code must retain the above copyright
15// notice, this list of conditions and the following disclaimer.
16//
17// 2. Redistributions in binary form must reproduce the above copyright
18// notice, this list of conditions and the following disclaimer in the
19// documentation and/or other materials provided with the distribution.
20//
21// 3. Neither the name of the Corporation nor the names of the
22// contributors may be used to endorse or promote products derived from
23// this software without specific prior written permission.
24//
25// THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
26// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
27// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
28// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
29// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
30// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
31// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
32// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
33// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
34// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
35// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36//
37// Questions? Contact lead developers:
38// Drew Kouri (dpkouri@sandia.gov) and
39// Denis Ridzal (dridzal@sandia.gov)
40//
41// ************************************************************************
42// @HEADER
43
44#ifndef ROL_LINESEARCHSTEP_H
45#define ROL_LINESEARCHSTEP_H
46
47#include "ROL_Types.hpp"
48#include "ROL_Step.hpp"
49#include "ROL_LineSearch.hpp"
50
51// Unconstrained Methods
52#include "ROL_GradientStep.hpp"
54#include "ROL_SecantStep.hpp"
55#include "ROL_NewtonStep.hpp"
57
58// Bound Constrained Methods
62
63#include <sstream>
64#include <iomanip>
65
135namespace ROL {
136
137template <class Real>
138class LineSearchStep : public Step<Real> {
139private:
140
141 ROL::Ptr<Step<Real> > desc_;
142 ROL::Ptr<Secant<Real> > secant_;
143 ROL::Ptr<Krylov<Real> > krylov_;
144 ROL::Ptr<NonlinearCG<Real> > nlcg_;
145 ROL::Ptr<LineSearch<Real> > lineSearch_;
146
147 ROL::Ptr<Vector<Real> > d_;
148
151
153
155
158 Real fval_;
159
160 ROL::ParameterList parlist_;
161
162 std::string lineSearchName_;
163
164 Real GradDotStep(const Vector<Real> &g, const Vector<Real> &s,
165 const Vector<Real> &x,
166 BoundConstraint<Real> &bnd, Real eps = 0) {
167 Real gs(0), one(1);
168 if (!bnd.isActivated()) {
169 gs = s.dot(g.dual());
170 }
171 else {
172 d_->set(s);
173 bnd.pruneActive(*d_,g,x,eps);
174 gs = d_->dot(g.dual());
175 d_->set(x);
176 d_->axpy(-one,g.dual());
177 bnd.project(*d_);
178 d_->scale(-one);
179 d_->plus(x);
180 bnd.pruneInactive(*d_,g,x,eps);
181 gs -= d_->dot(g.dual());
182 }
183 return gs;
184 }
185
186public:
187
188 using Step<Real>::initialize;
189 using Step<Real>::compute;
190 using Step<Real>::update;
191
205 LineSearchStep( ROL::ParameterList &parlist,
206 const ROL::Ptr<LineSearch<Real> > &lineSearch = ROL::nullPtr,
207 const ROL::Ptr<Secant<Real> > &secant = ROL::nullPtr,
208 const ROL::Ptr<Krylov<Real> > &krylov = ROL::nullPtr,
209 const ROL::Ptr<NonlinearCG<Real> > &nlcg = ROL::nullPtr )
210 : Step<Real>(), desc_(ROL::nullPtr), secant_(secant),
211 krylov_(krylov), nlcg_(nlcg), lineSearch_(lineSearch),
213 verbosity_(0), computeObj_(true), fval_(0), parlist_(parlist) {
214 // Parse parameter list
215 ROL::ParameterList& Llist = parlist.sublist("Step").sublist("Line Search");
216 ROL::ParameterList& Glist = parlist.sublist("General");
217 econd_ = StringToECurvatureCondition(Llist.sublist("Curvature Condition").get("Type","Strong Wolfe Conditions") );
218 acceptLastAlpha_ = Llist.get("Accept Last Alpha", false);
219 verbosity_ = Glist.get("Print Verbosity",0);
220 computeObj_ = Glist.get("Recompute Objective Function",false);
221 // Initialize Line Search
222 if (lineSearch_ == ROL::nullPtr) {
223 lineSearchName_ = Llist.sublist("Line-Search Method").get("Type","Cubic Interpolation");
225 lineSearch_ = LineSearchFactory<Real>(parlist);
226 }
227 else { // User-defined linesearch provided
228 lineSearchName_ = Llist.sublist("Line-Search Method").get("User Defined Line-Search Name",
229 "Unspecified User Defined Line-Search");
230 }
231
232 }
233
234 void initialize( Vector<Real> &x, const Vector<Real> &s, const Vector<Real> &g,
236 AlgorithmState<Real> &algo_state ) {
237 d_ = x.clone();
238
239 // Initialize unglobalized step
240 ROL::ParameterList& list
241 = parlist_.sublist("Step").sublist("Line Search").sublist("Descent Method");
242 EDescent edesc = StringToEDescent(list.get("Type","Quasi-Newton Method"));
243 if (bnd.isActivated()) {
244 switch(edesc) {
245 case DESCENT_STEEPEST: {
246 desc_ = ROL::makePtr<GradientStep<Real>>(parlist_,computeObj_);
247 break;
248 }
249 case DESCENT_NONLINEARCG: {
250 desc_ = ROL::makePtr<NonlinearCGStep<Real>>(parlist_,nlcg_,computeObj_);
251 break;
252 }
253 case DESCENT_SECANT: {
254 desc_ = ROL::makePtr<ProjectedSecantStep<Real>>(parlist_,secant_,computeObj_);
255 break;
256 }
257 case DESCENT_NEWTON: {
258 desc_ = ROL::makePtr<ProjectedNewtonStep<Real>>(parlist_,computeObj_);
259 break;
260 }
262 desc_ = ROL::makePtr<ProjectedNewtonKrylovStep<Real>>(parlist_,krylov_,secant_,computeObj_);
263 break;
264 }
265 default:
266 ROL_TEST_FOR_EXCEPTION(true,std::invalid_argument,
267 ">>> (LineSearchStep::Initialize): Undefined descent type!");
268 }
269 }
270 else {
271 switch(edesc) {
272 case DESCENT_STEEPEST: {
273 desc_ = ROL::makePtr<GradientStep<Real>>(parlist_,computeObj_);
274 break;
275 }
276 case DESCENT_NONLINEARCG: {
277 desc_ = ROL::makePtr<NonlinearCGStep<Real>>(parlist_,nlcg_,computeObj_);
278 break;
279 }
280 case DESCENT_SECANT: {
281 desc_ = ROL::makePtr<SecantStep<Real>>(parlist_,secant_,computeObj_);
282 break;
283 }
284 case DESCENT_NEWTON: {
285 desc_ = ROL::makePtr<NewtonStep<Real>>(parlist_,computeObj_);
286 break;
287 }
289 desc_ = ROL::makePtr<NewtonKrylovStep<Real>>(parlist_,krylov_,secant_,computeObj_);
290 break;
291 }
292 default:
293 ROL_TEST_FOR_EXCEPTION(true,std::invalid_argument,
294 ">>> (LineSearchStep::Initialize): Undefined descent type!");
295 }
296 }
297 desc_->initialize(x,s,g,obj,bnd,algo_state);
298
299 // Initialize line search
300 lineSearch_->initialize(x,s,g,obj,bnd);
301 //const ROL::Ptr<const StepState<Real> > desc_state = desc_->getStepState();
302 //lineSearch_->initialize(x,s,*(desc_state->gradientVec),obj,bnd);
303 }
304
320 AlgorithmState<Real> &algo_state ) {
321 Real zero(0), one(1);
322 // Compute unglobalized step
323 desc_->compute(s,x,obj,bnd,algo_state);
324
325 // Ensure that s is a descent direction
326 // ---> If not, then default to steepest descent
327 const ROL::Ptr<const StepState<Real> > desc_state = desc_->getStepState();
328 Real gs = GradDotStep(*(desc_state->gradientVec),s,x,bnd,algo_state.gnorm);
329 if (gs >= zero) {
330 s.set((desc_state->gradientVec)->dual());
331 s.scale(-one);
332 gs = GradDotStep(*(desc_state->gradientVec),s,x,bnd,algo_state.gnorm);
333 }
334
335 // Perform line search
336 ROL::Ptr<StepState<Real> > step_state = Step<Real>::getState();
337 fval_ = algo_state.value;
338 step_state->nfval = 0; step_state->ngrad = 0;
339 lineSearch_->setData(algo_state.gnorm,*(desc_state->gradientVec));
340 lineSearch_->run(step_state->searchSize,fval_,step_state->nfval,step_state->ngrad,gs,s,x,obj,bnd);
341
342 // Make correction if maximum function evaluations reached
343 if(!acceptLastAlpha_) {
344 lineSearch_->setMaxitUpdate(step_state->searchSize,fval_,algo_state.value);
345 }
346
347 // Compute scaled descent direction
348 s.scale(step_state->searchSize);
349 if ( bnd.isActivated() ) {
350 s.plus(x);
351 bnd.project(s);
352 s.axpy(static_cast<Real>(-1),x);
353 }
354 }
355
367 void update( Vector<Real> &x, const Vector<Real> &s,
369 AlgorithmState<Real> &algo_state ) {
370 ROL::Ptr<StepState<Real> > step_state = Step<Real>::getState();
371 algo_state.nfval += step_state->nfval;
372 algo_state.ngrad += step_state->ngrad;
373 desc_->update(x,s,obj,bnd,algo_state);
374 step_state->flag = desc_->getStepState()->flag;
375 step_state->SPiter = desc_->getStepState()->SPiter;
376 step_state->SPflag = desc_->getStepState()->SPflag;
377 if ( !computeObj_ ) {
378 algo_state.value = fval_;
379 }
380 }
381
386 std::string printHeader( void ) const {
387 std::string head = desc_->printHeader();
388 head.erase(std::remove(head.end()-3,head.end(),'\n'), head.end());
389 std::stringstream hist;
390 hist.write(head.c_str(),head.length());
391 hist << std::setw(10) << std::left << "ls_#fval";
392 hist << std::setw(10) << std::left << "ls_#grad";
393 hist << "\n";
394 return hist.str();
395 }
396
401 std::string printName( void ) const {
402 std::string name = desc_->printName();
403 std::stringstream hist;
404 hist << name;
405 hist << "Line Search: " << lineSearchName_;
406 hist << " satisfying " << ECurvatureConditionToString(econd_) << "\n";
407 return hist.str();
408 }
409
417 std::string print( AlgorithmState<Real> & algo_state, bool print_header = false ) const {
418 const ROL::Ptr<const StepState<Real> > step_state = Step<Real>::getStepState();
419 std::string desc = desc_->print(algo_state,false);
420 desc.erase(std::remove(desc.end()-3,desc.end(),'\n'), desc.end());
421 std::string name = desc_->printName();
422 size_t pos = desc.find(name);
423 if ( pos != std::string::npos ) {
424 desc.erase(pos, name.length());
425 }
426
427 std::stringstream hist;
428 if ( algo_state.iter == 0 ) {
429 hist << printName();
430 }
431 if ( print_header ) {
432 hist << printHeader();
433 }
434 hist << desc;
435 if ( algo_state.iter == 0 ) {
436 hist << "\n";
437 }
438 else {
439 hist << std::setw(10) << std::left << step_state->nfval;
440 hist << std::setw(10) << std::left << step_state->ngrad;
441 hist << "\n";
442 }
443 return hist.str();
444 }
445}; // class LineSearchStep
446
447} // namespace ROL
448
449#endif
Objective_SerialSimOpt(const Ptr< Obj > &obj, const V &ui) z0_ zero()
Contains definitions of custom data types in ROL.
Provides the interface to apply upper and lower bound constraints.
void pruneInactive(Vector< Real > &v, const Vector< Real > &x, Real eps=Real(0))
Set variables to zero if they correspond to the -inactive set.
void pruneActive(Vector< Real > &v, const Vector< Real > &x, Real eps=Real(0))
Set variables to zero if they correspond to the -active set.
bool isActivated(void) const
Check if bounds are on.
virtual void project(Vector< Real > &x)
Project optimization variables onto the bounds.
Provides definitions for Krylov solvers.
Definition: ROL_Krylov.hpp:58
Provides the interface to compute optimization steps with line search.
ECurvatureCondition econd_
enum determines type of curvature condition
bool acceptLastAlpha_
For backwards compatibility. When max function evaluations are reached take last step.
void update(Vector< Real > &x, const Vector< Real > &s, Objective< Real > &obj, BoundConstraint< Real > &bnd, AlgorithmState< Real > &algo_state)
Update step, if successful.
ROL::Ptr< Secant< Real > > secant_
Secant object (used for quasi-Newton)
ROL::Ptr< Step< Real > > desc_
Unglobalized step object.
void initialize(Vector< Real > &x, const Vector< Real > &s, const Vector< Real > &g, Objective< Real > &obj, BoundConstraint< Real > &bnd, AlgorithmState< Real > &algo_state)
Initialize step with bound constraint.
Real GradDotStep(const Vector< Real > &g, const Vector< Real > &s, const Vector< Real > &x, BoundConstraint< Real > &bnd, Real eps=0)
std::string printName(void) const
Print step name.
bool usePreviousAlpha_
If true, use the previously accepted step length (if any) as the new initial step length.
std::string printHeader(void) const
Print iterate header.
void compute(Vector< Real > &s, const Vector< Real > &x, Objective< Real > &obj, BoundConstraint< Real > &bnd, AlgorithmState< Real > &algo_state)
Compute step.
ROL::ParameterList parlist_
ROL::Ptr< Vector< Real > > d_
std::string print(AlgorithmState< Real > &algo_state, bool print_header=false) const
Print iterate status.
ROL::Ptr< NonlinearCG< Real > > nlcg_
Nonlinear CG object (used for nonlinear CG)
ELineSearch els_
enum determines type of line search
LineSearchStep(ROL::ParameterList &parlist, const ROL::Ptr< LineSearch< Real > > &lineSearch=ROL::nullPtr, const ROL::Ptr< Secant< Real > > &secant=ROL::nullPtr, const ROL::Ptr< Krylov< Real > > &krylov=ROL::nullPtr, const ROL::Ptr< NonlinearCG< Real > > &nlcg=ROL::nullPtr)
Constructor.
ROL::Ptr< Krylov< Real > > krylov_
Krylov solver object (used for inexact Newton)
ROL::Ptr< LineSearch< Real > > lineSearch_
Line-search object.
Provides interface for and implements line searches.
Provides the interface to evaluate objective functions.
Provides interface for and implements limited-memory secant operators.
Definition: ROL_Secant.hpp:79
Provides the interface to compute optimization steps.
Definition: ROL_Step.hpp:68
ROL::Ptr< StepState< Real > > getState(void)
Definition: ROL_Step.hpp:73
const ROL::Ptr< const StepState< Real > > getStepState(void) const
Get state for step object.
Definition: ROL_Step.hpp:211
Defines the linear algebra or vector space interface.
Definition: ROL_Vector.hpp:84
virtual void set(const Vector &x)
Set where .
Definition: ROL_Vector.hpp:209
virtual void scale(const Real alpha)=0
Compute where .
virtual const Vector & dual() const
Return dual representation of , for example, the result of applying a Riesz map, or change of basis,...
Definition: ROL_Vector.hpp:226
virtual void plus(const Vector &x)=0
Compute , where .
virtual ROL::Ptr< Vector > clone() const =0
Clone to make a new (uninitialized) vector.
virtual void axpy(const Real alpha, const Vector &x)
Compute where .
Definition: ROL_Vector.hpp:153
virtual Real dot(const Vector &x) const =0
Compute where .
EDescent StringToEDescent(std::string s)
Definition: ROL_Types.hpp:468
EDescent
Definition: ROL_Types.hpp:411
@ DESCENT_NEWTONKRYLOV
Definition: ROL_Types.hpp:416
@ DESCENT_SECANT
Definition: ROL_Types.hpp:414
@ DESCENT_STEEPEST
Definition: ROL_Types.hpp:412
@ DESCENT_NONLINEARCG
Definition: ROL_Types.hpp:413
@ DESCENT_NEWTON
Definition: ROL_Types.hpp:415
ECurvatureCondition
Definition: ROL_Types.hpp:741
@ CURVATURECONDITION_WOLFE
Definition: ROL_Types.hpp:742
std::string ECurvatureConditionToString(ECurvatureCondition ls)
Definition: ROL_Types.hpp:751
ELineSearch
Definition: ROL_Types.hpp:658
@ LINESEARCH_USERDEFINED
Definition: ROL_Types.hpp:666
ELineSearch StringToELineSearch(std::string s)
Definition: ROL_Types.hpp:724
ECurvatureCondition StringToECurvatureCondition(std::string s)
Definition: ROL_Types.hpp:801
State for algorithm class. Will be used for restarts.
Definition: ROL_Types.hpp:143