Sequential Quantum Gate Decomposer  v1.9.3
Powerful decomposition of general unitarias into one- and two-qubit gates gates
common/Adam.cpp
Go to the documentation of this file.
1 /*
2 
3 Copyright 2020 Peter Rakyta, Ph.D.
4 
5 Licensed under the Apache License, Version 2.0 (the "License");
6 you may not use this file except in compliance with the License.
7 You may obtain a copy of the License at
8 
9  http://www.apache.org/licenses/LICENSE-2.0
10 
11 Unless required by applicable law or agreed to in writing, software
12 distributed under the License is distributed on an "AS IS" BASIS,
13 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 See the License for the specific language governing permissions and
15 limitations under the License.
16 
17 @author: Peter Rakyta, Ph.D.
18 */
23 #include "Adam.h"
24 #include "tbb/tbb.h"
25 
26 #include <cfloat>
27 
32 
33  beta1 = 0.68;
34  beta2 = 0.8;
35  epsilon = 1e-4;
36  eta = 0.001;
37 
38 
39 
40  reset();
41 
42 #if CBLAS==1
43  num_threads = mkl_get_max_threads();
44 #elif CBLAS==2
45  num_threads = openblas_get_num_threads();
46 #endif
47 
48 }
49 
50 
58 Adam::Adam( double beta1_in, double beta2_in, double epsilon_in, double eta_in ) {
59 
60 
61  beta1 = beta1_in;
62  beta2 = beta2_in;
63  epsilon = epsilon_in;
64  eta = eta_in;
65 
66 
67  reset();
68 
69 
70 #if CBLAS==1
71  num_threads = mkl_get_max_threads();
72 #elif CBLAS==2
73  num_threads = openblas_get_num_threads();
74 #endif
75 
76 }
77 
83 }
84 
85 
86 
90 void Adam::reset() {
91 
92 
93  mom = Matrix_real(0,0);
94  var = Matrix_real(0,0);
95 
96  iter_t = 0;
97  beta1_t = 1.0;
98  beta2_t = 1.0;
99 
100  // vector stroing the lates values of cost function to test local minimum
101  f0_vec = Matrix_real(1, 100);
102  memset( f0_vec.get_data(), 0.0, f0_vec.size()*sizeof(double) );
103  f0_mean = 0.0;
104  f0_idx = 0;
105 
106 
107  // decreasing_test
109  memset( decreasing_vec.get_data(), -1, decreasing_vec.size()*sizeof(int) );
110  decreasing_idx = 0;
111  decreasing_test = -1.0;
112 
113  // previous value of the cost function
114  f0_prev = DBL_MAX;
115 
116 
117 }
118 
119 
124 
125  mom = Matrix_real(parameter_num,1);
126  var = Matrix_real(parameter_num,1);
127 
128  memset( mom.get_data(), 0.0, mom.size()*sizeof(double) );
129  memset( var.get_data(), 0.0, var.size()*sizeof(double) );
130 }
131 
132 
138 int Adam::update( Matrix_real& parameters, Matrix_real& grad, const double& f0 ) {
139 
140  int parameter_num = parameters.size();
141  if ( parameter_num != grad.size() ) {
142  std::string error("Adam::update: number of parameters shoulod be equal to the number of elements in gradient vector");
143  throw error;
144  }
145 
146  if ( mom.size() == 0 ) {
147  initialize_moment_and_variance( parameter_num );
148  }
149 
150  if ( parameter_num != mom.size() ) {
151  std::string error("Adam::update: number of parameters shoulod be equal to the number of elements in momentum vector");
152  throw error;
153  }
154 
155 
156  // test local minimum convergence
157  f0_mean = f0_mean + (f0 - f0_vec[ f0_idx ])/f0_vec.size();
158  f0_vec[ f0_idx ] = f0;
159  f0_idx = (f0_idx + 1) % f0_vec.size();
160 
161  double var_f0 = 0.0;
162  for (int idx=0; idx<f0_vec.size(); idx++) {
163  var_f0 = var_f0 + (f0_vec[idx]-f0_mean)*(f0_vec[idx]-f0_mean);
164  }
165  var_f0 = std::sqrt(var_f0)/f0_vec.size();
166 
167 
168  if ( f0 < f0_prev ) {
169  if ( decreasing_vec[ decreasing_idx ] == 1 ) {
170  // the decresing test did not changed
171  }
172  else {
173  // element in decreasing vec changed from -1 to 1
175  }
176 
178  }
179  else {
180  if ( decreasing_vec[ decreasing_idx ] == 1 ) {
181  // element in decreasing vec changed from 1 to -1
183  }
184  else {
185  // the decresing test did not changed
186  }
187 
189  }
190 
191 
193 
194  f0_prev = f0;
195 
196 
197 
198 
199  // test barren plateau
200  double grad_var = 0.0;
201  for( int idx=0; idx<parameter_num; idx++ ) {
202  grad_var += var[idx];
203  }
204 
205  int barren_plateau = 0;
206  if ( grad_var < epsilon && decreasing_test > 0.7 ) {
207  // barren plateau
208  barren_plateau = 1;
209  }
210 
211 
212  double* mom_data = mom.get_data();
213  double* var_data = var.get_data();
214  double* grad_data = grad.get_data();
215  double* param_data = parameters.get_data();
216 
217 tbb::task_arena ta(4);
218 ta.execute( [&](){
219  tbb::parallel_for( 0, parameter_num, 1, [&](int idx) {
220  //for (int idx=0; idx<parameter_num; idx++) {
221  mom_data[idx] = beta1 * mom_data[idx] + (1-beta1) * grad_data[idx];
222  var_data[idx] = beta2 * var_data[idx] + (1-beta2) * grad_data[idx] * grad_data[idx];
223 
224  // bias correction step
225  beta1_t = beta1_t * beta1;
226  double mom_bias_corr = mom_data[idx]/(1-beta1_t);
227 
228  beta2_t = beta2_t * beta2;
229  double var_bias_corr = var_data[idx]/(1-beta2_t);
230 
231  // update parameters
232  if ( barren_plateau ) {
233  param_data[idx] = param_data[idx] - eta * mom_bias_corr/(sqrt(var_bias_corr) + epsilon/100);
234  }
235  else {
236  param_data[idx] = param_data[idx] - eta * mom_bias_corr/(sqrt(var_bias_corr) + epsilon);
237  }
238  /*
239  if ( std::abs(eta * mom_bias_corr/(sqrt(var_bias_corr) + epsilon)) > 1e-3 ) {
240  std::cout << std::abs(eta * mom_bias_corr/(sqrt(var_bias_corr) + epsilon)) << std::endl;
241  }
242  */
243 
244  //}
245  });
246 
247 });
248 
249 
250  iter_t++;
251 
252 
253 
254  int ADAM_status = 0;
255  if ( std::abs( f0_mean - f0) < 1e-6 && decreasing_test <= 0.7 && var_f0/f0_mean < 1e-6 ) {
256  // local minimum
257  ADAM_status = 1;
258  }
259  else {
260  ADAM_status = 0;
261  }
262 
263  return ADAM_status;
264 
265 
266 
267 }
268 
269 
274 
275  return decreasing_test;
276 
277 }
278 
int update(Matrix_real &parameters, Matrix_real &grad, const double &f0)
Call to set the number of gate blocks to be optimized in one shot.
Header file for a class containing basic methods for the decomposition process.
int64_t iter_t
iteration index
Definition: Adam.h:61
double f0_prev
previous value of the cost function
Definition: Adam.h:82
parameter_num
[set adaptive gate structure]
int decreasing_idx
current index in the decreasing_vec array
Definition: Adam.h:78
Matrix_real f0_vec
vector stroing the lates values of cost function values to test local minimum
Definition: Adam.h:69
matrix_base< int > decreasing_vec
vector containing 1 if cost function decreased from previous value, and -1 if it increased ...
Definition: Adam.h:76
virtual ~Adam()
Destructor of the class.
Definition: common/Adam.cpp:82
double beta2_t
beta2^t
Definition: Adam.h:66
double f0_mean
Mean of the latest cost function values to test local minimum.
Definition: Adam.h:71
scalar * get_data() const
Call to get the pointer to the stored data.
double epsilon
Definition: Adam.h:54
void initialize_moment_and_variance(int parameter_num)
?????????????
double beta1_t
beta1^t
Definition: Adam.h:64
double get_decreasing_test()
?????????????
Matrix_real var
variance parameter of the Adam algorithm
Definition: Adam.h:59
void reset()
?????????????
Definition: common/Adam.cpp:90
double beta2
parameter beta2 of the Adam algorithm
Definition: Adam.h:52
double eta
Definition: Adam.h:42
Adam()
Nullary constructor of the class.
Definition: common/Adam.cpp:31
Matrix_real mom
momentum parameter of the Adam algorithm
Definition: Adam.h:57
double beta1
parameter beta1 of the Adam algorithm
Definition: Adam.h:50
double decreasing_test
decreasing_test
Definition: Adam.h:80
int f0_idx
current index in the f0_vec array
Definition: Adam.h:73
int size() const
Call to get the number of the allocated elements.
Class to store data of complex arrays and its properties.
Definition: matrix_real.h:39
int num_threads
Store the number of OpenMP threads. (During the calculations OpenMP multithreading is turned off...
Definition: Adam.h:48