Commit 6f0f9227 authored by David Bommes's avatar David Bommes

added automatic identification and exploit

of special properties like "constant hessian" "constant gradient", ...

git-svn-id: http://www.openflipper.org/svnrepo/CoMISo/trunk@190 1355f012-dd97-4b2f-ae87-10fa9f823a57
parent ffdc668e
......@@ -85,7 +85,29 @@ eval_hessian ( const double* _x, SMatrixNC& _h )
bool
BoundConstraint::
is_linear()
is_linear() const
{
return true;
}
//-----------------------------------------------------------------------------
bool
BoundConstraint::
constant_gradient() const
{
return true;
}
//-----------------------------------------------------------------------------
bool
BoundConstraint::
constant_hessian() const
{
return true;
}
......
......@@ -56,7 +56,9 @@ public:
virtual void eval_gradient ( const double* _x, SVectorNC& _g );
virtual void eval_hessian ( const double* _x, SMatrixNC& _h );
virtual bool is_linear();
virtual bool is_linear() const;
virtual bool constant_gradient() const;
virtual bool constant_hessian () const;
// set/get values
unsigned int& idx();
......
......@@ -60,9 +60,34 @@ solve(NProblemInterface* _problem, const std::vector<NConstraintInterface*>& _co
// 1. Create an instance of IPOPT NLP
//----------------------------------------------------------------------------
Ipopt::SmartPtr<Ipopt::TNLP> np = new NProblemIPOPT(_problem, _constraints);
NProblemIPOPT* np2 = dynamic_cast<NProblemIPOPT*> (Ipopt::GetRawPtr(np));
//----------------------------------------------------------------------------
// 2. solve problem
// 2. exploit special characteristics of problem
//----------------------------------------------------------------------------
std::cerr << "exploit detected special properties: ";
if(np2->hessian_constant())
{
std::cerr << "*constant hessian* ";
app().Options()->SetStringValue("hessian_constant", "yes");
}
if(np2->jac_c_constant())
{
std::cerr << "*constant jacobian of equality constraints* ";
app().Options()->SetStringValue("jac_c_constant", "yes");
}
if(np2->jac_d_constant())
{
std::cerr << "*constant jacobian of in-equality constraints*";
app().Options()->SetStringValue("jac_d_constant", "yes");
}
std::cerr << std::endl;
//----------------------------------------------------------------------------
// 3. solve problem
//----------------------------------------------------------------------------
// Initialize the IpoptApplication and process the options
......@@ -73,10 +98,212 @@ solve(NProblemInterface* _problem, const std::vector<NConstraintInterface*>& _co
printf("\n\n*** Error IPOPT during initialization!\n");
}
status = app_->OptimizeTNLP( np);
//----------------------------------------------------------------------------
// 3. solve problem
// 4. output statistics
//----------------------------------------------------------------------------
status = app_->OptimizeTNLP(np);
if (status == Ipopt::Solve_Succeeded || status == Ipopt::Solved_To_Acceptable_Level)
{
// Retrieve some statistics about the solve
Ipopt::Index iter_count = app_->Statistics()->IterationCount();
printf("\n\n*** IPOPT: The problem solved in %d iterations!\n", iter_count);
Ipopt::Number final_obj = app_->Statistics()->FinalObjective();
printf("\n\n*** IPOPT: The final value of the objective function is %e.\n", final_obj);
}
return status;
}
//-----------------------------------------------------------------------------
int
IPOPTSolver::
solve(NProblemInterface* _problem,
const std::vector<NConstraintInterface*>& _constraints,
const std::vector<NConstraintInterface*>& _lazy_constraints,
const double _almost_infeasible,
const int _max_passes )
{
//----------------------------------------------------------------------------
// 0. Initialize IPOPT Applicaiton
//----------------------------------------------------------------------------
StopWatch sw; sw.start();
// Initialize the IpoptApplication and process the options
Ipopt::ApplicationReturnStatus status;
status = app_->Initialize();
if (status != Ipopt::Solve_Succeeded)
{
printf("\n\n*** Error IPOPT during initialization!\n");
}
bool feasible_point_found = false;
int cur_pass = 0;
double acceptable_tolerance = 0.01; // hack: read out from ipopt!!!
// copy default constraints
std::vector<NConstraintInterface*> constraints = _constraints;
std::vector<bool> lazy_added(_lazy_constraints.size(),false);
// cache statistics of all iterations
std::vector<int> n_inf;
std::vector<int> n_almost_inf;
while(!feasible_point_found && cur_pass <(_max_passes-1))
{
++cur_pass;
//----------------------------------------------------------------------------
// 1. Create an instance of current IPOPT NLP
//----------------------------------------------------------------------------
Ipopt::SmartPtr<Ipopt::TNLP> np = new NProblemIPOPT(_problem, constraints);
NProblemIPOPT* np2 = dynamic_cast<NProblemIPOPT*> (Ipopt::GetRawPtr(np));
// enable caching of solution
np2->store_solution() = true;
//----------------------------------------------------------------------------
// 2. exploit special characteristics of problem
//----------------------------------------------------------------------------
std::cerr << "exploit detected special properties: ";
if(np2->hessian_constant())
{
std::cerr << "*constant hessian* ";
app().Options()->SetStringValue("hessian_constant", "yes");
}
if(np2->jac_c_constant())
{
std::cerr << "*constant jacobian of equality constraints* ";
app().Options()->SetStringValue("jac_c_constant", "yes");
}
if(np2->jac_d_constant())
{
std::cerr << "*constant jacobian of in-equality constraints*";
app().Options()->SetStringValue("jac_d_constant", "yes");
}
std::cerr << std::endl;
//----------------------------------------------------------------------------
// 3. solve problem
//----------------------------------------------------------------------------
status = app_->OptimizeTNLP( np);
// check lazy constraints
n_inf.push_back(0);
n_almost_inf.push_back(0);
feasible_point_found = true;
for(unsigned int i=0; i<_lazy_constraints.size(); ++i)
if(!lazy_added[i])
{
NConstraintInterface* lc = _lazy_constraints[i];
double v = lc->eval_constraint(&(np2->solution()[0]));
bool inf = false;
bool almost_inf = false;
if(lc->constraint_type() == NConstraintInterface::NC_EQUAL)
{
v = std::abs(v);
if(v>acceptable_tolerance)
inf = true;
else
if(v>_almost_infeasible)
almost_inf = true;
}
else
if(lc->constraint_type() == NConstraintInterface::NC_GREATER_EQUAL)
{
if(v<-acceptable_tolerance)
inf = true;
else
if(v<_almost_infeasible)
almost_inf = true;
}
else
if(lc->constraint_type() == NConstraintInterface::NC_LESS_EQUAL)
{
if(v>acceptable_tolerance)
inf = true;
else
if(v>-_almost_infeasible)
almost_inf = true;
}
// infeasible?
if(inf)
{
constraints.push_back(lc);
lazy_added[i] = true;
feasible_point_found = false;
++n_inf.back();
}
// almost violated or violated? -> add to constraints
if(almost_inf)
{
constraints.push_back(lc);
lazy_added[i] = true;
++n_almost_inf.back();
}
}
}
// no termination after max number of passes?
if(!feasible_point_found)
{
++cur_pass;
std::cerr << "*************** could not find feasible point after " << _max_passes-1 << " -> solving with all lazy constraints..." << std::endl;
for(unsigned int i=0; i<_lazy_constraints.size(); ++i)
if(!lazy_added[i])
constraints.push_back(_lazy_constraints[i]);
//----------------------------------------------------------------------------
// 1. Create an instance of current IPOPT NLP
//----------------------------------------------------------------------------
Ipopt::SmartPtr<Ipopt::TNLP> np = new NProblemIPOPT(_problem, constraints);
NProblemIPOPT* np2 = dynamic_cast<NProblemIPOPT*> (Ipopt::GetRawPtr(np));
// enable caching of solution
np2->store_solution() = true;
//----------------------------------------------------------------------------
// 2. exploit special characteristics of problem
//----------------------------------------------------------------------------
std::cerr << "exploit detected special properties: ";
if(np2->hessian_constant())
{
std::cerr << "*constant hessian* ";
app().Options()->SetStringValue("hessian_constant", "yes");
}
if(np2->jac_c_constant())
{
std::cerr << "*constant jacobian of equality constraints* ";
app().Options()->SetStringValue("jac_c_constant", "yes");
}
if(np2->jac_d_constant())
{
std::cerr << "*constant jacobian of in-equality constraints*";
app().Options()->SetStringValue("jac_d_constant", "yes");
}
std::cerr << std::endl;
//----------------------------------------------------------------------------
// 3. solve problem
//----------------------------------------------------------------------------
status = app_->OptimizeTNLP( np);
}
const double overall_time = sw.stop()/1000.0;
//----------------------------------------------------------------------------
// 4. output statistics
......@@ -91,6 +318,12 @@ solve(NProblemInterface* _problem, const std::vector<NConstraintInterface*>& _co
printf("\n\n*** IPOPT: The final value of the objective function is %e.\n", final_obj);
}
std::cerr <<"############# IPOPT with lazy constraints statistics ###############" << std::endl;
std::cerr << "overall time: " << overall_time << "s" << std::endl;
std::cerr << "#passes : " << cur_pass << "( of " << _max_passes << ")" << std::endl;
for(unsigned int i=0; i<n_inf.size(); ++i)
std::cerr << "pass " << i << " induced " << n_inf[i] << " infeasible and " << n_almost_inf[i] << " almost infeasible" << std::endl;
return status;
}
......@@ -181,6 +414,40 @@ split_constraints(const std::vector<NConstraintInterface*>& _constraints)
//-----------------------------------------------------------------------------
void
NProblemIPOPT::
analyze_special_properties(const NProblemInterface* _problem, const std::vector<NConstraintInterface*>& _constraints)
{
hessian_constant_ = true;
jac_c_constant_ = true;
jac_d_constant_ = true;
if(!_problem->constant_hessian())
hessian_constant_ = false;
for(unsigned int i=0; i<_constraints.size(); ++i)
{
if(!_constraints[i]->constant_hessian())
hessian_constant_ = false;
if(!_constraints[i]->constant_gradient())
{
if(_constraints[i]->constraint_type() == NConstraintInterface::NC_EQUAL)
jac_c_constant_ = false;
else
jac_d_constant_ = false;
}
// nothing else to check?
if(!hessian_constant_ && !jac_c_constant_ && !jac_d_constant_)
break;
}
}
//-----------------------------------------------------------------------------
bool NProblemIPOPT::get_nlp_info(Index& n, Index& m, Index& nnz_jac_g,
Index& nnz_h_lag, IndexStyleEnum& index_style)
{
......@@ -530,9 +797,42 @@ void NProblemIPOPT::finalize_solution(SolverReturn status,
{
// problem knows what to do
problem_->store_result(x);
if(store_solution_)
{
x_.resize(n);
for( Index i=0; i<n; ++i)
x_[i] = x[i];
}
}
//-----------------------------------------------------------------------------
bool NProblemIPOPT::hessian_constant() const
{
return hessian_constant_;
}
//-----------------------------------------------------------------------------
bool NProblemIPOPT::jac_c_constant() const
{
return jac_c_constant_;
}
//-----------------------------------------------------------------------------
bool NProblemIPOPT::jac_d_constant() const
{
return jac_d_constant_;
}
//== IMPLEMENTATION PROBLEM INSTANCE==========================================================
......
......@@ -16,6 +16,7 @@
//== INCLUDES =================================================================
#include <CoMISo/Config/CoMISoDefines.hh>
#include <CoMISo/Utils/StopWatch.hh>
#include <vector>
#include <cstddef>
#include <gmm/gmm.h>
......@@ -84,6 +85,14 @@ public:
int solve(NProblemInterface* _problem, const std::vector<NConstraintInterface*>& _constraints);
// same as above with additional lazy constraints that are only added iteratively to the problem if not satisfied
int solve(NProblemInterface* _problem,
const std::vector<NConstraintInterface*>& _constraints,
const std::vector<NConstraintInterface*>& _lazy_constraints,
const double _almost_infeasible = 0.5,
const int _max_passes = 5 );
// for convenience, if no constraints are given
int solve(NProblemInterface* _problem);
......@@ -133,7 +142,7 @@ public:
/** default constructor */
NProblemIPOPT(NProblemInterface* _problem, const std::vector<NConstraintInterface*>& _constraints)
: problem_(_problem) { split_constraints(_constraints);}
: problem_(_problem), store_solution_(false) { split_constraints(_constraints); analyze_special_properties(_problem, _constraints);}
/** default destructor */
virtual ~NProblemIPOPT() {};
......@@ -193,6 +202,14 @@ public:
IpoptCalculatedQuantities* ip_cq);
//@}
// special properties of problem
bool hessian_constant() const;
bool jac_c_constant() const;
bool jac_d_constant() const;
bool& store_solution() {return store_solution_; }
std::vector<double>& solution() {return x_;}
private:
/**@name Methods to block default compiler methods.
* The compiler automatically generates the following three methods.
......@@ -213,6 +230,10 @@ private:
// split user-provided constraints into general-constraints and bound-constraints
void split_constraints(const std::vector<NConstraintInterface*>& _constraints);
// determine if hessian_constant, jac_c_constant or jac_d_constant
void analyze_special_properties(const NProblemInterface* _problem, const std::vector<NConstraintInterface*>& _constraints);
protected:
double* P(std::vector<double>& _v)
{
......@@ -229,6 +250,13 @@ private:
// reference to constraints vector
std::vector<NConstraintInterface*> constraints_;
std::vector<BoundConstraint*> bound_constraints_;
bool hessian_constant_;
bool jac_c_constant_;
bool jac_d_constant_;
bool store_solution_;
std::vector<double> x_;
};
......
......@@ -71,7 +71,9 @@ public:
virtual void eval_hessian ( const double* _x, SMatrixNC& _h );
virtual bool is_linear() { return true;}
virtual bool is_linear() const { return true;}
virtual bool constant_gradient() const { return true;}
virtual bool constant_hessian () const { return true;}
// inherited from base
// virtual ConstraintType constraint_type ( ) { return type_; }
......
......@@ -73,7 +73,10 @@ public:
return false;
}
virtual bool is_linear() { return false;}
// provide special properties
virtual bool is_linear() const { return false;}
virtual bool constant_gradient() const { return false;}
virtual bool constant_hessian () const { return false;}
virtual double gradient_update_factor( const double* _x, double _eps = 1e-6 )
{
......
......@@ -66,7 +66,8 @@ public:
virtual void store_result ( const double* _x ) = 0;
// advanced properties
virtual bool constant_hessian() { return false; }
virtual bool constant_gradient() const { return false; }
virtual bool constant_hessian() const { return false; }
};
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment