MOOCHO (Single Doxygen Collection) Version of the Day
NLPInterfacePack_ExampleNLPObjGrad.cpp
Go to the documentation of this file.
00001 // @HEADER
00002 // ***********************************************************************
00003 // 
00004 // Moocho: Multi-functional Object-Oriented arCHitecture for Optimization
00005 //                  Copyright (2003) Sandia Corporation
00006 // 
00007 // Under terms of Contract DE-AC04-94AL85000, there is a non-exclusive
00008 // license for use of this work by or on behalf of the U.S. Government.
00009 // 
00010 // Redistribution and use in source and binary forms, with or without
00011 // modification, are permitted provided that the following conditions are
00012 // met:
00013 //
00014 // 1. Redistributions of source code must retain the above copyright
00015 // notice, this list of conditions and the following disclaimer.
00016 //
00017 // 2. Redistributions in binary form must reproduce the above copyright
00018 // notice, this list of conditions and the following disclaimer in the
00019 // documentation and/or other materials provided with the distribution.
00020 //
00021 // 3. Neither the name of the Corporation nor the names of the
00022 // contributors may be used to endorse or promote products derived from
00023 // this software without specific prior written permission.
00024 //
00025 // THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
00026 // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
00027 // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
00028 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
00029 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
00030 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
00031 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
00032 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
00033 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
00034 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
00035 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
00036 //
00037 // Questions? Contact Roscoe A. Bartlett (rabartl@sandia.gov) 
00038 // 
00039 // ***********************************************************************
00040 // @HEADER
00041 
00042 #include <assert.h>
00043 
00044 #include <stdexcept>
00045 #include <limits>
00046 
00047 #include "NLPInterfacePack_ExampleNLPObjGrad.hpp"
00048 #include "ExampleNLPDirectRTOps.h"
00049 #include "AbstractLinAlgPack_BasisSystemComposite.hpp"
00050 #include "AbstractLinAlgPack_VectorMutable.hpp"
00051 #include "AbstractLinAlgPack_VectorStdOps.hpp"
00052 #include "AbstractLinAlgPack_VectorAuxiliaryOps.hpp"
00053 #include "AbstractLinAlgPack_LinAlgOpPack.hpp"
00054 #include "RTOpPack_RTOpC.hpp"
00055 #include "Teuchos_dyn_cast.hpp"
00056 #include "Teuchos_Assert.hpp"
00057 #include "Teuchos_AbstractFactoryStd.hpp"
00058 
00059 namespace {
00060 
00061 static RTOpPack::RTOpC explnlp2_c_eval_op;
00062 
00063 class init_rtop_server_t {
00064 public:
00065   init_rtop_server_t() {
00066     TEUCHOS_TEST_FOR_EXCEPT(0!=RTOp_TOp_explnlp2_c_eval_construct(&explnlp2_c_eval_op.op()));
00067   }
00068 }; 
00069 init_rtop_server_t  init_rtop_server;
00070 
00071 } // end namespace
00072 
00073 namespace NLPInterfacePack {
00074 
00075 ExampleNLPObjGrad::ExampleNLPObjGrad(
00076   const VectorSpace::space_ptr_t&  vec_space
00077   ,value_type                      xo
00078   ,bool                            has_bounds
00079   ,bool                            dep_bounded
00080   )
00081   :vec_space_(vec_space), vec_space_comp_(Teuchos::null)
00082   ,initialized_(false), obj_scale_(1.0)
00083   ,has_bounds_(has_bounds), force_xinit_in_bounds_(true), n_(2*vec_space->dim())
00084 {
00085   namespace rcp = MemMngPack;
00086 
00087   // Assert the size of the NLP
00088   TEUCHOS_TEST_FOR_EXCEPTION(
00089     vec_space->dim() <= 0, std::logic_error
00090     ,"ExampleNLPObjGrad::ExampleNLPObjGrad(...) Error!" );
00091 
00092   // Setup the aggregate vector space object
00093   BasisSystemComposite::initialize_space_x(
00094     vec_space, vec_space, &var_dep_, &var_indep_, &vec_space_comp_ );
00095 
00096   // Set the initial starting point.
00097   xinit_ = vec_space_comp_->create_member();
00098   *xinit_ = xo;
00099 
00100   /*
00101     Setup the sparse bounds
00102     
00103     xl(i) = 0.01  \ 
00104                     }  for i <: bounded_rng
00105     xu(i) = 20    /
00106   */
00107 
00108   xl_ = vec_space_comp_->create_member();
00109   xu_ = vec_space_comp_->create_member();
00110 
00111   if(has_bounds) {
00112     const Range1D
00113       bounded_rng   = ( dep_bounded ? var_dep_   : var_indep_ ),
00114       unbounded_rng = ( dep_bounded ? var_indep_ : var_dep_   );
00115     *xl_->sub_view(bounded_rng)   = 0.01;
00116     *xl_->sub_view(unbounded_rng) = -NLP::infinite_bound();
00117     *xu_->sub_view(bounded_rng)   = 20.0;
00118     *xu_->sub_view(unbounded_rng) = +NLP::infinite_bound();
00119   }
00120   else {
00121     *xl_ = -NLP::infinite_bound();
00122     *xu_ = +NLP::infinite_bound();
00123   }
00124 }
00125 
00126 // Overridden public members from NLP
00127 
00128 void ExampleNLPObjGrad::initialize(bool test_setup)
00129 {
00130   if( initialized_ ) {
00131     NLPObjGrad::initialize(test_setup);
00132     return;
00133   }
00134 
00135   AbstractLinAlgPack::force_in_bounds( *xl_, *xu_, xinit_.get() );
00136 
00137   NLPObjGrad::initialize(test_setup);
00138 
00139   initialized_ = true;
00140 }
00141 
00142 bool ExampleNLPObjGrad::is_initialized() const
00143 {
00144   return initialized_;
00145 }
00146 
00147 size_type ExampleNLPObjGrad::n() const
00148 {
00149   assert_is_initialized();
00150   return n_;
00151 }
00152 
00153 size_type ExampleNLPObjGrad::m() const
00154 {
00155   assert_is_initialized();
00156   return n_ / 2;
00157 }
00158 
00159 NLP::vec_space_ptr_t ExampleNLPObjGrad::space_x() const
00160 {
00161   return vec_space_comp_;
00162 }
00163 
00164 NLP::vec_space_ptr_t ExampleNLPObjGrad::space_c() const
00165 {
00166   return vec_space_;
00167 }
00168 
00169 size_type ExampleNLPObjGrad::num_bounded_x() const
00170 {
00171   return has_bounds_ ? n_/2 : 0;
00172 }
00173 
00174 void ExampleNLPObjGrad::force_xinit_in_bounds(bool force_xinit_in_bounds)
00175 {
00176   force_xinit_in_bounds_ = force_xinit_in_bounds;
00177 }
00178 
00179 bool ExampleNLPObjGrad::force_xinit_in_bounds() const
00180 {
00181   return force_xinit_in_bounds_;
00182 }
00183 
00184 const Vector& ExampleNLPObjGrad::xinit() const
00185 {
00186   assert_is_initialized();
00187   return *xinit_;
00188 }
00189 
00190 const Vector& ExampleNLPObjGrad::xl() const
00191 {
00192   assert_is_initialized();
00193   return *xl_;
00194 }
00195 
00196 const Vector& ExampleNLPObjGrad::xu() const
00197 {
00198   assert_is_initialized();
00199   return *xu_;
00200 }
00201 
00202 value_type ExampleNLPObjGrad::max_var_bounds_viol() const
00203 {
00204   return std::numeric_limits<value_type>::max(); // No limits on the bounds
00205 }
00206 
00207 void ExampleNLPObjGrad::scale_f( value_type scale_f )
00208 {
00209   assert_is_initialized();
00210   obj_scale_ = scale_f;
00211 }
00212 
00213 value_type ExampleNLPObjGrad::scale_f() const
00214 {
00215   assert_is_initialized();
00216   return obj_scale_;
00217 }
00218 
00219 void ExampleNLPObjGrad::report_final_solution(
00220   const Vector&    x
00221   ,const Vector*   lambda
00222   ,const Vector*   nu
00223   ,bool            optimal
00224   )
00225 {
00226   assert_is_initialized();
00227   // Do what you want with the solution (or final values) here.
00228   // For this example we will just ignore it.
00229 }
00230 
00231 Range1D ExampleNLPObjGrad::var_dep() const
00232 {
00233   return var_dep_;
00234 }
00235 
00236 Range1D ExampleNLPObjGrad::var_indep() const
00237 {
00238   return var_indep_;
00239 }
00240 
00241 // Overridden protected members from NLP
00242 
00243 void ExampleNLPObjGrad::imp_calc_f(const Vector& x, bool newx
00244   , const ZeroOrderInfo& zero_order_info) const
00245 {
00246   using AbstractLinAlgPack::dot;
00247   assert_is_initialized();
00248   f(); // assert f is set
00249   TEUCHOS_TEST_FOR_EXCEPTION( n() != x.dim(), std::length_error, "ExampleNLPObjGrad::imp_calc_f(...)"  );
00250   // f(x) = (obj_scale/2) * sum( x(i)^2, for i = 1..n )
00251   *zero_order_info.f = obj_scale_ / 2.0 * dot(x,x);
00252 }
00253 
00254 void ExampleNLPObjGrad::imp_calc_c(const Vector& x, bool newx
00255   , const ZeroOrderInfo& zero_order_info) const
00256 {
00257   assert_is_initialized();
00258   const size_type n = this->n();
00259   TEUCHOS_TEST_FOR_EXCEPTION( n != x.dim(), std::length_error, "ExampleNLPObjGrad::imp_calc_c(...)"  );
00260 
00261   // c(x)(j) = x(j) * (x(m+j) -1) - 10 * x(m+j) = 0, for j = 1...m
00262 
00263   Vector::vec_ptr_t
00264     xD= x.sub_view(var_dep()),
00265     xI = x.sub_view(var_indep());
00266 
00267   const Vector*  vecs[]      = { xD.get(), xI.get() };
00268   VectorMutable* targ_vecs[] = { zero_order_info.c };
00269   AbstractLinAlgPack::apply_op(explnlp2_c_eval_op,2,vecs,1,targ_vecs,NULL);
00270 
00271 }
00272 
00273 void ExampleNLPObjGrad::imp_calc_h(
00274   const Vector& x, bool newx, const ZeroOrderInfo& zero_order_info) const
00275 {
00276   TEUCHOS_TEST_FOR_EXCEPT(true); // Should never be called!
00277 }
00278 
00279 // Overridden protected members from NLPFirstOrder
00280 
00281 void ExampleNLPObjGrad::imp_calc_Gf(const Vector& x, bool newx
00282   , const ObjGradInfo& obj_grad_info) const
00283 {
00284   assert_is_initialized();
00285   TEUCHOS_TEST_FOR_EXCEPTION( n() != x.dim(), std::length_error, "ExampleNLPObjGrad::imp_calc_Gf(...)"  );
00286   // Gf = obj_scale * x
00287   LinAlgOpPack::V_StV(obj_grad_info.Gf,obj_scale_,x);
00288 }
00289 
00290 } // end namespace NLPInterfacePack
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Defines