53 if (
gates.size() == 0 ) {
58 if (solution_guess.
size() == 0 ) {
76 int iteration_loops_max;
78 if (
config.count(
"max_iteration_loops_grad_descend") > 0 ) {
79 config[
"max_iteration_loops_grad_descend"].get_property( value );
80 iteration_loops_max = (
int) value;
82 else if (
config.count(
"max_iteration_loops") > 0 ) {
83 config[
"max_iteration_loops"].get_property( value );
84 iteration_loops_max = (
int) value;
91 iteration_loops_max = 1;
95 std::uniform_real_distribution<> distrib_real(0.0, 2*M_PI);
98 long long max_inner_iterations_loc;
99 if (
config.count(
"max_inner_iterations_grad_descend") > 0 ) {
100 config[
"max_inner_iterations_grad_descend"].get_property( max_inner_iterations_loc );
102 else if (
config.count(
"max_inner_iterations") > 0 ) {
103 config[
"max_inner_iterations"].get_property( max_inner_iterations_loc );
111 int output_periodicity;
112 if (
config.count(
"output_periodicity_cosine") > 0 ) {
114 config[
"output_periodicity_cosine"].get_property( value );
115 output_periodicity = (
int) value;
117 if (
config.count(
"output_periodicity") > 0 ) {
119 config[
"output_periodicity"].get_property( value );
120 output_periodicity = (
int) value;
123 output_periodicity = 0;
128 for (
long long idx=0; idx<iteration_loops_max; idx++) {
132 double f = cGrad_Descend.
Start_Optimization(solution_guess, max_inner_iterations_loc);
138 solution_guess[jdx] = solution_guess[jdx] + distrib_real(
gen)/100;
144 solution_guess[jdx] = solution_guess[jdx] + distrib_real(
gen);
149 if ( output_periodicity>0 && idx % output_periodicity == 0 ) {
void export_current_cost_fnc(double current_minimum)
Call to print out into a file the current cost function and the second Rényi entropy on the subsyste...
A class implementing the BFGS iterations on the.
double current_minimum
The current minimum of the optimization problem.
int get_accelerator_num()
Get the number of accelerators to be reserved on DFEs on users demand.
void solve_layer_optimization_problem_GRAD_DESCEND(int num_of_parameters, Matrix_real &solution_guess)
Call to solve layer by layer the optimization problem via the GRAD_DESCEND (line search in the direct...
scalar * get_data() const
Call to get the pointer to the stored data.
std::vector< Gate * > gates
The list of stored gates.
double Start_Optimization(Matrix_real &x, long maximal_iterations_in=5001)
Call this method to start the optimization.
std::map< int, int > iteration_loops
A map of <int n: int num> indicating the number of iteration in each step of the decomposition.
int size() const
Call to get the number of the allocated elements.
static void optimization_problem_combined(Matrix_real parameters, void *void_instance, double *f0, Matrix_real &grad)
Call to calculate both the cost function and the its gradient components.
std::map< std::string, Config_Element > config
config metadata utilized during the optimization
Header file for the paralleized calculation of the cost function of the final optimization problem (s...
int qbit_num
number of qubits spanning the matrix of the operation
Header file for DFE support in unitary simulation.
int max_inner_iterations
the maximal number of iterations for which an optimization engine tries to solve the optimization pro...
Matrix_real optimized_parameters_mtx
The optimized parameters for the gates.
Class to store data of complex arrays and its properties.
std::mt19937 gen
Standard mersenne_twister_engine seeded with rd()