54 if (
gates.size() == 0 ) {
60 if (solution_guess.
size() == 0 ) {
70 int random_shift_count = 0;
71 long long sub_iter_idx = 0;
75 tbb::tick_count adam_start = tbb::tick_count::now();
85 memcpy(solution_guess_tmp.
get_data(), solution_guess.
get_data(), num_of_parameters*
sizeof(double) );
96 int randomization_successful = 0;
99 long long max_inner_iterations_loc;
100 if (
config.count(
"max_inner_iterations_adam") > 0 ) {
101 config[
"max_inner_iterations_adam"].get_property( max_inner_iterations_loc );
103 else if (
config.count(
"max_inner_iterations") > 0 ) {
104 config[
"max_inner_iterations"].get_property( max_inner_iterations_loc );
110 long long iteration_threshold_of_randomization_loc;
111 if (
config.count(
"randomization_threshold_adam") > 0 ) {
112 config[
"randomization_threshold_adam"].get_property( iteration_threshold_of_randomization_loc );
114 else if (
config.count(
"randomization_threshold") > 0 ) {
115 config[
"randomization_threshold"].get_property( iteration_threshold_of_randomization_loc );
118 iteration_threshold_of_randomization_loc = 2500000;
121 long long export_circuit_2_binary_loc;
122 if (
config.count(
"export_circuit_2_binary_adam") > 0 ) {
123 config[
"export_circuit_2_binary_adam"].get_property( export_circuit_2_binary_loc );
125 else if (
config.count(
"export_circuit_2_binary") > 0 ) {
126 config[
"export_circuit_2_binary"].get_property( export_circuit_2_binary_loc );
129 export_circuit_2_binary_loc = 0;
133 double optimization_tolerance_loc;
134 if (
config.count(
"optimization_tolerance_adam") > 0 ) {
135 config[
"optimization_tolerance_adam"].get_property( optimization_tolerance_loc );
137 else if (
config.count(
"optimization_tolerance") > 0 ) {
138 config[
"optimization_tolerance"].get_property( optimization_tolerance_loc );
145 bool adaptive_eta_loc;
146 if (
config.count(
"adaptive_eta_adam") > 0 ) {
148 config[
"adaptive_eta_adam"].get_property( tmp );
149 adaptive_eta_loc = (
bool)tmp;
151 if (
config.count(
"adaptive_eta") > 0 ) {
153 config[
"adaptive_eta"].get_property( tmp );
154 adaptive_eta_loc = (
bool)tmp;
162 if (
config.count(
"eta_adam") > 0 ) {
163 config[
"eta_adam"].get_property( eta_loc );
165 if (
config.count(
"eta") > 0 ) {
166 config[
"eta"].get_property( eta_loc );
171 optimizer.
eta = eta_loc;
176 int output_periodicity;
177 if (
config.count(
"output_periodicity_cosine") > 0 ) {
179 config[
"output_periodicity_cosine"].get_property( value );
180 output_periodicity = (
int) value;
182 if (
config.count(
"output_periodicity") > 0 ) {
184 config[
"output_periodicity"].get_property( value );
185 output_periodicity = (
int) value;
188 output_periodicity = 0;
193 std::stringstream sstream;
194 sstream <<
"max_inner_iterations: " << max_inner_iterations_loc <<
", randomization threshold: " << iteration_threshold_of_randomization_loc << std::endl;
198 for (
long long iter_idx=0; iter_idx<max_inner_iterations_loc; iter_idx++ ) {
205 if (sub_iter_idx == 1 ) {
206 current_minimum_hold = f0;
208 if ( adaptive_eta_loc ) {
209 optimizer.
eta = optimizer.
eta > 1e-3 ? optimizer.
eta : 1e-3;
216 if ((
cost_fnc !=
VQE) && (current_minimum_hold*0.95 > f0 || (current_minimum_hold*0.97 > f0 && f0 < 1e-3) || (current_minimum_hold*0.99 > f0 && f0 < 1e-4) )) {
218 current_minimum_hold = f0;
226 if ( adaptive_eta_loc ) {
227 double new_eta = 1e-3 * f0;
228 optimizer.
eta = new_eta > 1e-6 ? new_eta : 1e-6;
229 optimizer.
eta = new_eta < 1e-1 ? new_eta : 1e-1;
232 randomization_successful = 1;
235 if ( output_periodicity>0 && iter_idx % output_periodicity == 0 ) {
239 if ( iter_idx % 5000 == 0 ) {
242 std::stringstream sstream;
243 sstream <<
"ADAM: processed iterations " << (double)iter_idx/max_inner_iterations_loc*100 <<
"\%, current minimum:" <<
current_minimum <<
", current cost function:" <<
optimization_problem(solution_guess_tmp) <<
", sub_iter_idx:" << sub_iter_idx <<std::endl;
247 std::stringstream sstream;
248 sstream <<
"ADAM: processed iterations " << (double)iter_idx/max_inner_iterations_loc*100 <<
"\%, current minimum:" <<
current_minimum <<
", sub_iter_idx:" << sub_iter_idx <<std::endl;
251 if ( export_circuit_2_binary_loc > 0 ) {
252 std::string
filename(
"initial_circuit_iteration.binary");
270 norm += grad_mtx[grad_idx]*grad_mtx[grad_idx];
272 norm = std::sqrt(norm);
296 if ( sub_iter_idx> iteration_threshold_of_randomization_loc || ADAM_status != 0 ) {
300 random_shift_count++;
305 std::stringstream sstream;
306 if ( ADAM_status == 0 ) {
307 sstream <<
"ADAM: initiate randomization at " << f0 <<
", gradient norm " << norm << std::endl;
310 sstream <<
"ADAM: leaving local minimum " << f0 <<
", gradient norm " << norm <<
" eta: " << optimizer.
eta << std::endl;
315 randomization_successful = 0;
327 ADAM_status = optimizer.
update(solution_guess_tmp, grad_mtx, f0);
337 tbb::tick_count adam_end = tbb::tick_count::now();
339 sstream <<
"adam time: " <<
CPU_time <<
" " << f0 << std::endl;
int update(Matrix_real ¶meters, Matrix_real &grad, const double &f0)
Call to set the number of gate blocks to be optimized in one shot.
bool adaptive_eta
logical variable indicating whether adaptive learning reate is used in the ADAM algorithm ...
Header file for a class containing basic methods for the decomposition process.
void export_current_cost_fnc(double current_minimum)
Call to print out into a file the current cost function and the second Rényi entropy on the subsyste...
void print(const std::stringstream &sstream, int verbose_level=1) const
Call to print output messages in the function of the verbosity level.
double current_minimum
The current minimum of the optimization problem.
cost_function_type cost_fnc
The chosen variant of the cost function.
int get_accelerator_num()
Get the number of accelerators to be reserved on DFEs on users demand.
double optimization_problem(double *parameters)
Evaluate the optimization problem of the optimization.
double prev_cost_fnv_val
the previous value of the cost funtion to be used to evaluate bitflip errors in the cost funtion (see...
scalar * get_data() const
Call to get the pointer to the stored data.
void initialize_moment_and_variance(int parameter_num)
?????????????
std::vector< Gate * > gates
The list of stored gates.
std::string project_name
the name of the project
double optimization_tolerance
The maximal allowed error of the optimization problem (The error of the decomposition would scale wit...
void reset()
?????????????
double CPU_time
time spent on optimization
int verbose
Set the verbosity level of the output messages.
int size() const
Call to get the number of the allocated elements.
static void optimization_problem_combined(Matrix_real parameters, void *void_instance, double *f0, Matrix_real &grad)
Call to calculate both the cost function and the its gradient components.
std::map< std::string, Config_Element > config
config metadata utilized during the optimization
void solve_layer_optimization_problem_ADAM(int num_of_parameters, Matrix_real &solution_guess)
Call to solve layer by layer the optimization problem via ADAM algorithm.
A class for Adam optimization according to https://towardsdatascience.com/how-to-implement-an-adam-op...
Header file for the paralleized calculation of the cost function of the final optimization problem (s...
void export_gate_list_to_binary(Matrix_real ¶meters, Gates_block *gates_block, const std::string &filename, int verbosity)
?????????
int qbit_num
number of qubits spanning the matrix of the operation
Header file for DFE support in unitary simulation.
void randomize_parameters(Matrix_real &input, Matrix_real &output, const double &f0)
Call to randomize the parameter.
int max_inner_iterations
the maximal number of iterations for which an optimization engine tries to solve the optimization pro...
int random_shift_count_max
the maximal number of parameter randomization tries to escape a local minimum.
Matrix_real optimized_parameters_mtx
The optimized parameters for the gates.
Class to store data of complex arrays and its properties.