1 #include "ROL_AugmentedLagrangian.hpp"     2 #include "optimization/full_space_step.hpp"     3 #include "optimization/kkt_operator.hpp"     4 #include "optimization/kkt_birosghattas_preconditioners.hpp"     6 #include "global_counter.hpp"    13     ROL::ParameterList &parlist,
    14     const ROL::Ptr<LineSearch<Real> > &lineSearch,
    15     const ROL::Ptr<Secant<Real> > &secant)
    18     , lineSearch_(lineSearch)
    19     , els_(LINESEARCH_USERDEFINED)
    20     , econd_(CURVATURECONDITION_WOLFE)
    23     , pcout(
std::cout, dealii::Utilities::MPI::this_mpi_process(MPI_COMM_WORLD)==0)
    26     ROL::ParameterList& Llist = parlist.sublist(
"Step").sublist(
"Line Search");
    27     ROL::ParameterList& Glist = parlist.sublist(
"General");
    28     econd_ = StringToECurvatureCondition(Llist.sublist(
"Curvature Condition").get(
"Type",
"Strong Wolfe Conditions") );
    37         lineSearchName_ = Llist.sublist(
"Line-Search Method").get(
"Type",
"Backtracking");
    42         lineSearchName_ = Llist.sublist(
"Line-Search Method").get(
"User Defined Line-Search Name",
    43                                                                   "Unspecified User Defined Line-Search");
    46     secantName_ = Glist.sublist(
"Secant").get(
"Type",
"Limited-Memory BFGS");
    48     secant_ = SecantFactory<Real>(parlist);
    54     Vector<Real> &lagrangian_gradient,
    55     const Vector<Real> &design_variables,
    56     const Vector<Real> &lagrange_mult,
    57     const Vector<Real> &objective_gradient,
    58     Constraint<Real> &equal_constraints)
 const    61     Real tol = std::sqrt(ROL_EPSILON<Real>());
    64     const double old_CFL = flow_constraint.flow_CFL_;
    68     flow_constraint.flow_CFL_ = 0.0;
    69     equal_constraints.applyAdjointJacobian(lagrangian_gradient, lagrange_mult, design_variables, tol);
    70     flow_constraint.flow_CFL_ = old_CFL;
    71     lagrangian_gradient.plus(objective_gradient);
    76     Vector<Real> &lagrange_mult,
    77     const Vector<Real> &design_variables,
    78     const Vector<Real> &objective_gradient,
    79     Constraint<Real> &equal_constraints)
 const    99     Real tol = std::sqrt(ROL_EPSILON<Real>());
   102     const std::vector<Real> augiters = equal_constraints.solveAugmentedSystem(*lhs1, *lhs2, *rhs1, *rhs2, design_variables, tol);
   106     lagrange_mult.plus(*lhs2);
   117 template <
class Real>
   119     Vector<Real> &design_variables,
   120     const Vector<Real> &gradient,
   121     Vector<Real> &lagrange_mult,
   122     const Vector<Real> &equal_constraints_values,
   123     Objective<Real> &objective,
   124     Constraint<Real> &equal_constraints,
   125     AlgorithmState<Real> &algo_state )
   127     BoundConstraint<Real> bound_constraints;
   128     bound_constraints.deactivate();
   133         equal_constraints_values,
   140 template <
class Real>
   142     Vector<Real> &design_variables,
   143     const Vector<Real> &gradient,
   144     Vector<Real> &lagrange_mult,
   145     const Vector<Real> &equal_constraints_values,
   146     Objective<Real> &objective,
   147     Constraint<Real> &equal_constraints,
   148     BoundConstraint<Real> &bound_constraints,
   149     AlgorithmState<Real> &algo_state )
   151     pcout << __PRETTY_FUNCTION__ << std::endl;
   152     Real tol = ROL_EPSILON<Real>();
   157     algo_state.nfval = 0;
   158     algo_state.ncval = 0;
   159     algo_state.ngrad = 0;
   170     step_state->descentVec  = design_variables.clone();
   171     step_state->gradientVec = gradient.clone();
   172     step_state->constraintVec = equal_constraints_values.clone();
   173     step_state->searchSize  = zero;
   176     if ( bound_constraints.isActivated() ) {
   177       bound_constraints.project(design_variables);
   180     const bool changed_design_variables = 
true;
   181     objective.update(design_variables, changed_design_variables, algo_state.iter);
   182     algo_state.value = objective.value(design_variables, tol);
   184     objective.gradient(*(step_state->gradientVec), design_variables, tol);
   188     equal_constraints.update(design_variables,
true,algo_state.iter);
   189     equal_constraints.value(*(step_state->constraintVec), design_variables, zero);
   194     auto &equal_constraints_sim_opt = 
dynamic_cast<ROL::Constraint_SimOpt<Real>&
>(equal_constraints);
   195     const auto &objective_ctl_gradient = *(
dynamic_cast<const Vector_SimOpt<Real>&
>(*(step_state->gradientVec)).get_1());
   196     const auto &design_variables_sim_opt = 
dynamic_cast<ROL::Vector_SimOpt<Real>&
>(design_variables);
   197     const auto &simulation_variables = *(design_variables_sim_opt.get_1());
   198     const auto &control_variables    = *(design_variables_sim_opt.get_2());
   199     equal_constraints_sim_opt.applyInverseAdjointJacobian_1(lagrange_mult, objective_ctl_gradient, simulation_variables, control_variables, tol);
   200     lagrange_mult.scale(-1.0);
   203     ROL::Ptr<Vector<Real> > lagrangian_gradient = step_state->gradientVec->clone();
   204     computeLagrangianGradient(*lagrangian_gradient, design_variables, lagrange_mult, *(step_state->gradientVec), equal_constraints);
   205     const auto &lagrangian_gradient_simopt = 
dynamic_cast<const Vector_SimOpt<Real>&
>(*lagrangian_gradient);
   214     flow_constraint.flow_CFL_ = -100;
   233             makePtrFromRef<Objective<Real>>(objective),
   234             makePtrFromRef<Constraint<Real>>(equal_constraints),
   238             equal_constraints_values,
   242     ROL::Ptr<Vector<Real> > search_direction_dummy = design_variables.clone();
   246 template <
class Real>
   248     const Vector<Real> &search_direction,
   249     const Vector<Real> &lagrange_mult_search_direction,
   250     const Vector<Real> &design_variables,
   251     const Vector<Real> &objective_gradient,
   252     const Vector<Real> &equal_constraints_values,
   253     const Vector<Real> &adjoint_jacobian_lagrange,
   254     Constraint<Real> &equal_constraints,
   257     pcout << __PRETTY_FUNCTION__ << std::endl;
   260     Real penalty = objective_gradient.dot(search_direction);
   261     penalty += adjoint_jacobian_lagrange.dot(search_direction);
   262     penalty += equal_constraints_values.dot(lagrange_mult_search_direction);
   264     const ROL::Ptr<Vector<Real>> jacobian_search_direction = equal_constraints_values.clone();
   265     Real tol = std::sqrt(ROL_EPSILON<Real>());
   266     equal_constraints.applyJacobian(*jacobian_search_direction, search_direction, design_variables, tol);
   268     Real denom = jacobian_search_direction->dot(equal_constraints_values);
   288 template <
class Real>
   289 template<
typename MatrixType, 
typename VectorType, 
typename PreconditionerType>
   291     MatrixType &matrix_A,
   292     VectorType &right_hand_side,
   293     VectorType &solution,
   294     PreconditionerType &preconditioner)
   297     const bool print_kkt_operator = 
false;
   298     const bool print_precond_kkt_operator = 
false;
   300     if (print_kkt_operator) {
   301         matrix_A.print(right_hand_side);
   304     if (print_precond_kkt_operator) {
   305        const int do_full_matrix = (1 == dealii::Utilities::MPI::n_mpi_processes(MPI_COMM_WORLD));
   306        pcout << 
"do_full_matrix: " << do_full_matrix << std::endl;
   307        if (do_full_matrix) {
   309            column_of_kkt_operator.
reinit(right_hand_side);
   310            column_of_precond_kkt_operator.
reinit(right_hand_side);
   311            dealii::FullMatrix<double> fullA(right_hand_side.size());
   312            for (
int i = 0; i < right_hand_side.size(); ++i) {
   313                pcout << 
"COLUMN NUMBER: " << i+1 << 
" OUT OF " << right_hand_side.size() << std::endl;
   314                auto basis = right_hand_side.basis(i);
   315                MPI_Barrier(MPI_COMM_WORLD);
   317                    matrix_A.vmult(column_of_kkt_operator,*basis);
   318                    preconditioner.vmult(column_of_precond_kkt_operator,column_of_kkt_operator);
   321                if (do_full_matrix) {
   322                    for (
int j = 0; j < right_hand_side.size(); ++j) {
   323                        fullA[j][i] = column_of_precond_kkt_operator[j];
   328            pcout<<
"Dense matrix:"<<std::endl;
   329            fullA.print_formatted(std::cout, 14, 
true, 10, 
"0", 1., 0.);
   334     enum Solver_types { gmres, fgmres };
   339     const double rhs_norm = right_hand_side.l2_norm();
   348     const double tolerance = std::min(1e-4, std::max(1e-6 * rhs_norm, 1e-11));
   350     dealii::SolverControl solver_control(2000, tolerance, 
true, 
true);
   351     solver_control.enable_history_data();
   353     (void) preconditioner;
   354     const unsigned int     max_n_tmp_vectors = 1000;
   357     Solver_types solver_type = fgmres;
   358     switch(solver_type) {
   361             const bool     right_preconditioning = 
true; 
   362             const bool     use_default_residual = 
true;
   363             const bool     force_re_orthogonalization = 
false; 
   364             typedef typename dealii::SolverGMRES<VectorType>::AdditionalData AddiData_GMRES;
   365             AddiData_GMRES add_data_gmres( max_n_tmp_vectors, right_preconditioning, use_default_residual, force_re_orthogonalization);
   366             dealii::SolverGMRES<VectorType> solver_gmres(solver_control, add_data_gmres);
   367             solution = right_hand_side;
   369                 solver_gmres.solve(matrix_A, solution, right_hand_side
   377             typedef typename dealii::SolverFGMRES<VectorType>::AdditionalData AddiData_FGMRES;
   378             AddiData_FGMRES add_data_fgmres( max_n_tmp_vectors );
   379             dealii::SolverFGMRES<VectorType> solver_fgmres(solver_control, add_data_fgmres);
   381                 solver_fgmres.solve(matrix_A, solution, right_hand_side
   385                 solution = right_hand_side;
   391     return solver_control.get_history_data();
   395 template <
class Real>
   397     Vector<Real> &search_direction,
   398     Vector<Real> &lag_search_direction,
   399     const Vector<Real> &design_variables,
   400     const Vector<Real> &lagrange_mult,
   401     Objective<Real> &objective,
   402     Constraint<Real> &equal_constraints)
   404     Real tol = std::sqrt(ROL_EPSILON<Real>());
   405     const Real one = 1.0;
   409     objective.gradient(*objective_gradient, design_variables, tol);
   412     equal_constraints.applyAdjointJacobian(*adjoint_jacobian_lagrange, lagrange_mult, design_variables, tol);
   421     equal_constraints.value(*rhs2, design_variables, tol);
   428     ROL::Ptr<Vector<Real> > lhs1 = rhs1->clone();
   429     ROL::Ptr<Vector<Real> > lhs2 = rhs2->clone();
   431     ROL::Vector_SimOpt lhs_rol(lhs1, lhs2);
   432     ROL::Vector_SimOpt rhs_rol(rhs1, rhs2);
   435         makePtrFromRef<Objective<Real>>(objective),
   436         makePtrFromRef<Constraint<Real>>(equal_constraints),
   437         makePtrFromRef<
const Vector<Real>>(design_variables),
   438         makePtrFromRef<
const Vector<Real>>(lagrange_mult));
   441     std::shared_ptr<BirosGhattasPreconditioner<Real>> kkt_precond =
   452     std::vector<double> linear_residuals = 
solve_linear (kkt_operator, rhs, lhs, *kkt_precond);
   453     pcout << 
"Solving the KKT system took "   454         << linear_residuals.size() << 
" iterations "   455         << 
" to achieve a residual of " << linear_residuals.back() << std::endl;
   457     search_direction.set(*(lhs_rol.get_1()));
   458     lag_search_direction.set(*(lhs_rol.get_2()));
   479     return linear_residuals;
   483 template <
class Real>
   485     Vector<Real> &search_direction,
   486     const Vector<Real> &design_variables,
   487     const Vector<Real> &lagrange_mult,
   488     Objective<Real> &objective,
   489     Constraint<Real> &equal_constraints,
   490     AlgorithmState<Real> &algo_state )
   492     BoundConstraint<Real> bound_constraints;
   493     bound_constraints.deactivate();
   494     compute( search_direction, design_variables, lagrange_mult, objective, equal_constraints, bound_constraints, algo_state );
   497 template <
class Real>
   499     Vector<Real> &search_direction,
   500     const Vector<Real> &design_variables,
   501     const Vector<Real> &lagrange_mult,
   502     Objective<Real> &objective,
   503     Constraint<Real> &equal_constraints,
   504     BoundConstraint<Real> &bound_constraints,
   505     AlgorithmState<Real> &algo_state )
   507     pcout << __PRETTY_FUNCTION__ << std::endl;
   510     Real tol = std::sqrt(ROL_EPSILON<Real>());
   511     const Real one = 1.0;
   515     objective.gradient(*objective_gradient, design_variables, tol);
   518     equal_constraints.applyAdjointJacobian(*adjoint_jacobian_lagrange, lagrange_mult, design_variables, tol);
   524     ROL::Ptr<Vector<Real> > lagrangian_gradient = step_state->gradientVec->clone();
   525     computeLagrangianGradient(*lagrangian_gradient, design_variables, lagrange_mult, *objective_gradient, equal_constraints);
   526     rhs1->set(*lagrangian_gradient);
   529     equal_constraints.value(*rhs2, design_variables, tol);
   543         << 
"Startingto solve augmented system..."   571     const std::vector<Real> kkt_iters = 
solve_KKT_system(*lhs1, *lhs2, design_variables, lagrange_mult, objective, equal_constraints);
   573     step_state->SPiter = kkt_iters.size();
   574     pcout << 
"Finished solving augmented system..." << std::endl;
   577         search_direction.set(*lhs1);
   611     const Real penalty_offset = 10;
   617         *(step_state->constraintVec),
   618         *adjoint_jacobian_lagrange,
   621     const auto reduced_gradient = (
dynamic_cast<Vector_SimOpt<Real>&
>(*lagrangian_gradient)).get_2();
   625         << 
"Finished computeAugmentedLagrangianPenalty..."   627     AugmentedLagrangian<Real> &augLag = 
dynamic_cast<AugmentedLagrangian<Real>&
>(*merit_function_);
   629     step_state->nfval = 0;
   630     step_state->ngrad = 0;
   631     Real merit_function_value = 0.0;
   633     bool linesearch_success = 
false;
   636     while (!linesearch_success) {
   640         const bool changed_design_variables = 
true;
   641         merit_function_->update(design_variables, changed_design_variables, algo_state.iter);
   642         fold = merit_function_value;
   644         merit_function_->gradient( *merit_function_gradient, design_variables, tol );
   645         Real directional_derivative_step = merit_function_gradient->dot(search_direction);
   649             << 
"Directional_derivative_step (Should be negative for descent direction)"   650             << directional_derivative_step
   659         merit_function_value = merit_function_->value(design_variables, tol );
   661             << 
"Performing line search..."   662             << 
" Initial merit function value = " << merit_function_value
   664         lineSearch_->setData(algo_state.gnorm,*merit_function_gradient);
   668                          merit_function_value,
   671                          directional_derivative_step,
   677         const int max_line_searches = 
parlist_.sublist(
"Step").sublist(
"Line Search").get(
"Function Evaluation Limit",20);
   679             linesearch_success = 
true;
   681                 << 
"End of line search... searchSize is..."   682                 << step_state->searchSize
   683                 << 
" and number of function evaluations: "   685                 << 
" and n_linesearches: "   687                 << 
" Max linesearches : " << max_line_searches
   688                 << 
" Final merit function value = " << merit_function_value
   692             Real penalty_reduction = 0.1;
   694                 << 
" Max linesearches achieved: " << max_line_searches
   695                 << 
" Current merit_function_value value = " << merit_function_value
   701             if (n_searches > 1) {
   702                 pcout << 
" Linesearch failed, searching other direction " << std::endl;
   703                 search_direction.scale(-1.0);
   706             if (n_searches > 2) {
   707                 pcout << 
" Linesearch failed in other direction... ending " << std::endl;
   711         lineSearch_->setMaxitUpdate(step_state->searchSize, merit_function_value, fold);
   715         << 
"End of line search... searchSize is..."   716         << step_state->searchSize
   717         << 
" and number of function evaluations: "   719         << 
" Final merit function value = " << merit_function_value
   728     search_direction.scale(step_state->searchSize);
   729     if ( bound_constraints.isActivated() ) {
   730         search_direction.plus(design_variables);
   731         bound_constraints.project(search_direction);
   732         search_direction.axpy(static_cast<Real>(-1),design_variables);
   735         << 
"End of compute..."   740 template <
class Real>
   742     Vector<Real> &design_variables,
   743     Vector<Real> &lagrange_mult,
   744     const Vector<Real> &search_direction,
   745     Objective<Real> &objective,
   746     Constraint<Real> &equal_constraints,
   747     AlgorithmState<Real> &algo_state )
   749     pcout << __PRETTY_FUNCTION__ << std::endl;
   750     BoundConstraint<Real> bound_constraints;
   751     bound_constraints.deactivate();
   752     update( design_variables, lagrange_mult, search_direction, objective, equal_constraints, bound_constraints, algo_state );
   755 template <
class Real>
   757     Vector<Real> &design_variables,
   758     Vector<Real> &lagrange_mult,
   759     const Vector<Real> &search_direction,
   760     Objective<Real> &objective,
   761     Constraint<Real> &equal_constraints,
   762     BoundConstraint< Real > &bound_constraints,
   763     AlgorithmState<Real> &algo_state )
   765     pcout << __PRETTY_FUNCTION__ << std::endl;
   766     Real tol = std::sqrt(ROL_EPSILON<Real>());
   767     (void) bound_constraints;
   768     design_variables.plus(search_direction);
   773     step_state->descentVec  = design_variables.clone();
   774     objective.gradient(*(step_state->gradientVec), design_variables, tol);
   775     equal_constraints.value(*(step_state->constraintVec), design_variables, tol);
   778     ROL::Ptr<Vector<Real> > lagrangian_gradient = step_state->gradientVec->clone();
   779     computeLagrangianGradient(*lagrangian_gradient, design_variables, lagrange_mult, *(step_state->gradientVec), equal_constraints);
   781     algo_state.nfval += step_state->nfval;
   782     algo_state.ngrad += step_state->ngrad;
   785     algo_state.value = objective.value(design_variables, tol);
   786     algo_state.gnorm = lagrangian_gradient->norm();
   787     algo_state.cnorm = step_state->constraintVec->norm();
   788     search_sim_norm = ((
dynamic_cast<const Vector_SimOpt<Real>&
>(search_direction)).get_1())->norm();
   789     search_ctl_norm = ((
dynamic_cast<const Vector_SimOpt<Real>&
>(search_direction)).get_2())->norm();
   795     algo_state.snorm = std::sqrt(algo_state.snorm);
   806     flow_constraint.flow_CFL_ = -10000*std::max(1.0, 1.0/std::pow(algo_state.cnorm, 2.00));
   810     algo_state.iterateVec->set(design_variables);
   811     algo_state.lagmultVec->set(lagrange_mult);
   814     const auto current_reduced_gradient = (
dynamic_cast<Vector_SimOpt<Real>&
>(*lagrangian_gradient)).get_2();
   815     const auto control_search_direction = (
dynamic_cast<const Vector_SimOpt<Real>&
>(search_direction)).get_2();
   816     const double search_norm = control_search_direction->norm();
   818     const double modified_search_norm = std::sqrt(std::pow(search_norm,2) * 1e-8 / ROL_EPSILON<Real>());
   820         secant_->updateStorage(design_variables, *current_reduced_gradient, *
previous_reduced_gradient_, *control_search_direction, modified_search_norm, algo_state.iter+1);
   825     MPI_Barrier(MPI_COMM_WORLD);
   826     if (dealii::Utilities::MPI::this_mpi_process(MPI_COMM_WORLD) == 0) {
   828         << 
" algo_state.iter: "  <<   algo_state.iter << std::endl
   830         << 
" step_state->searchSize: " << step_state->searchSize << std::endl
   831         << 
" algo_state.value: "  <<   algo_state.value << std::endl
   832         << 
" algo_state.gnorm: "  <<   algo_state.gnorm << std::endl
   833         << 
" algo_state.cnorm: "  <<   algo_state.cnorm << std::endl
   834         << 
" algo_state.snorm: "  <<   algo_state.snorm << std::endl
   835         << 
" n_vmult_total: "  <<   n_vmult << std::endl
   836         << 
"  dRdW_form " << dRdW_form << std::endl
   837         << 
"  dRdW_mult " << dRdW_mult << std::endl
   838         << 
"  dRdX_mult " << dRdX_mult << std::endl
   839         << 
"  d2R_mult  " << d2R_mult  << std::endl
   842     MPI_Barrier(MPI_COMM_WORLD);
   845 template <
class Real>
   849   std::stringstream hist;
   853   hist << std::setw(18) << std::left << 
"Iteration";
   854   hist << std::setw(18) << std::left << 
"Func. val.";
   855   hist << std::setw(18) << std::left << 
"||Lagr. grad.||";
   856   hist << std::setw(18) << std::left << 
"||Constraint||";
   857   hist << std::setw(18) << std::left << 
"||Search dir||";
   858   hist << std::setw(18) << std::left << 
"search_ctl_norm";
   859   hist << std::setw(18) << std::left << 
"search_sim_norm";
   860   hist << std::setw(18) << std::left << 
"search_adj_norm";
   861   hist << std::setw(18) << std::left << 
"n_kkt_iter";
   862   hist << std::setw(18) << std::left << 
"n_linesearches";
   863   hist << std::setw(18) << std::left << 
"n_grad";
   864   hist << std::setw(18) << std::left << 
"n_vmult";
   865   hist << std::setw(18) << std::left << 
"dRdW_form";
   866   hist << std::setw(18) << std::left << 
"dRdW_mult";
   867   hist << std::setw(18) << std::left << 
"dRdX_mult";
   868   hist << std::setw(18) << std::left << 
"d2R_mult";
   873 template <
class Real>
   876   std::stringstream hist;
   878   hist << 
"********************************************************" << std::endl;
   879   hist << 
"Biros and Ghattas Full-space method...";
   881   const auto &design_variable_simopt = 
dynamic_cast<const Vector_SimOpt<Real>&
>(*design_variable_cloner_);
   883       << design_variable_cloner_->dimension() << 
" design variables: "   884       << design_variable_simopt.get_1()->dimension() << 
" simulation variables and "   885       << design_variable_simopt.get_2()->dimension() << 
" control variables."   889   hist << 
" satisfying " << ECurvatureConditionToString(
econd_) << 
"\n";
   890   hist << 
"********************************************************" << std::endl;
   894 template <
class Real>
   904   std::stringstream hist;
   905   if ( algo_state.iter == 0 ) {
   909   if ( algo_state.iter == 0 ) {
   914   if ( algo_state.iter == 0 ) {
   918     hist << std::setw(18) << std::left << algo_state.iter;
   919     hist << std::setw(18) << std::left << algo_state.value;
   920     hist << std::setw(18) << std::left << algo_state.gnorm;
   921     hist << std::setw(18) << std::left << algo_state.cnorm;
   922     hist << std::setw(18) << std::left << algo_state.snorm;
   926     hist << std::setw(18) << std::left << step_state->SPiter;
   927     hist << std::setw(18) << std::left << step_state->nfval;
   928     hist << std::setw(18) << std::left << step_state->ngrad;
   929     hist << std::setw(18) << std::left << n_vmult;
   930     hist << std::setw(18) << std::left << dRdW_form;
   931     hist << std::setw(18) << std::left << dRdW_mult;
   932     hist << std::setw(18) << std::left << dRdX_mult;
   933     hist << std::setw(18) << std::left << d2R_mult;
 Real penalty_value_
Penalty value of the augmented Lagrangian. 
std::string print(AlgorithmState< Real > &algo_state, bool print_header=false) const override
Print iterate status. 
virtual void initialize(Vector< Real > &design_variables, const Vector< Real > &gradient, Vector< Real > &lagrange_mult, const Vector< Real > &equal_constraints_values, Objective< Real > &objective, Constraint< Real > &equal_constraints, AlgorithmState< Real > &algo_state) override
Initialize with objective and equality constraints. 
ROL::Ptr< Secant< Real > > secant_
Secant object (used for quasi-Newton preconditioner). 
ROL::Ptr< LineSearch< Real > > lineSearch_
Line-search object for globalization. 
std::string preconditioner_name_
Preconditioner name. 
ROL::Ptr< Vector< Real > > lagrange_mult_search_direction_
Lagrange multipliers search direction. 
double search_adj_norm
Norm of the adjoint search direction. 
void reinit(const dealiiSolverVectorWrappingROL &model_vector, const bool leave_elements_uninitialized=false)
ROL::Ptr< Vector< Real > > lagrange_variable_cloner_
Vector used to clone a vector like the Lagrange variables' / constraints size and parallel distributi...
Wrap the ROL vector into a vector that can be used by deal.II's solver. 
bool acceptLastAlpha_
Whether the last line search's step length is accepted when the maximum iterations is reached...
ROL::Ptr< Objective< Real > > merit_function_
Merit function used within the line search. 
int verbosity_
Print verbosity. 
std::string printHeader(void) const override
Print iterate header. 
std::string secantName_
Name of secant used as a reduced-Hessian preconditioner. 
std::vector< double > solve_linear(MatrixType &matrix_A, VectorType &right_hand_side, VectorType &solution, PreconditionerType &preconditioner)
Solve a linear system using deal.II's F/GMRES solver. 
std::string lineSearchName_
Line search name. 
Use DGBase as a Simulation Constraint ROL::Constraint_SimOpt. 
void compute(Vector< Real > &search_direction, const Vector< Real > &design_variables, const Vector< Real > &lagrange_mult, Objective< Real > &objective, Constraint< Real > &equal_constraints, AlgorithmState< Real > &algo_state) override
Computes the search directions. 
ROL::ParameterList parlist_
Parameter list. 
static std::shared_ptr< BirosGhattasPreconditioner< Real > > create_KKT_preconditioner(ROL::ParameterList &parlist, ROL::Objective< Real > &objective, ROL::Constraint< Real > &equal_constraints, const ROL::Vector< Real > &design_variables, const ROL::Vector< Real > &lagrange_mult, const ROL::Ptr< ROL::Secant< Real > > secant_)
Creates a derived preconditioner object, but returns it as BirosGhattasPreconditioner. 
ESecant esec_
Enum determines type of secant to use as reduced Hessian preconditioner. 
dealii::ConditionalOStream pcout
Parallel std::cout that only outputs on mpi_rank==0. 
FullSpace_BirosGhattas(ROL::ParameterList &parlist, const ROL::Ptr< LineSearch< Real > > &lineSearch=ROL::nullPtr, const ROL::Ptr< Secant< Real > > &secant=ROL::nullPtr)
< See base class. 
int n_linesearches
Number of line searches used in the last design cycle. 
bool use_approximate_full_space_preconditioner_
Use the Tilde{P} version of Biros and Ghattas' preconditioner. 
ROL::Ptr< Vector< Real > > previous_reduced_gradient_
Store previous gradient for secant method. 
ECurvatureCondition econd_
Enum determines type of curvature condition. 
std::vector< Real > solve_KKT_system(Vector< Real > &search_direction, Vector< Real > &lag_search_direction, const Vector< Real > &design_variables, const Vector< Real > &lagrange_mult, Objective< Real > &objective, Constraint< Real > &equal_constraints)
Setup and solve the large KKT system. 
KKT_Operator to be used with dealii::SolverBase class. 
Real computeAugmentedLagrangianPenalty(const Vector< Real > &search_direction, const Vector< Real > &lagrange_mult_search_direction, const Vector< Real > &design_variables, const Vector< Real > &objective_gradient, const Vector< Real > &equal_constraints_values, const Vector< Real > &adjoint_jacobian_lagrange, Constraint< Real > &equal_constraints, const Real offset)
Evaluates the penalty of the augmented Lagrangian function using Biros and Ghattas' lower bound...
void computeInitialLagrangeMultiplier(Vector< Real > &lagrange_mult, const Vector< Real > &design_variables, const Vector< Real > &objective_gradient, Constraint< Real > &equal_constraints) const
void computeLagrangianGradient(Vector< Real > &lagrangian_gradient, const Vector< Real > &design_variables, const Vector< Real > &lagrange_mult, const Vector< Real > &objective_gradient, Constraint< Real > &equal_constraints) const
double search_ctl_norm
Norm of the control search direction. 
double search_sim_norm
Norm of the simulation search direction. 
std::string printName(void) const override
Print step name. 
void update(Vector< Real > &design_variables, Vector< Real > &lagrange_mult, const Vector< Real > &search_direction, Objective< Real > &objective, Constraint< Real > &equal_constraints, AlgorithmState< Real > &algo_state) override
Update step, if successful. 
ROL::Ptr< Vector< Real > > design_variable_cloner_
Vector used to clone a vector like the design variables' size and parallel distribution. 
ELineSearch els_
Enum determines type of line search.