Line data Source code
1 : /* +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
2 : Copyright (c) 2017-2020 The plumed team
3 : (see the PEOPLE file at the root of the distribution for a list of names)
4 :
5 : See http://www.plumed.org for more information.
6 :
7 : This file is part of plumed, version 2.
8 :
9 : plumed is free software: you can redistribute it and/or modify
10 : it under the terms of the GNU Lesser General Public License as published by
11 : the Free Software Foundation, either version 3 of the License, or
12 : (at your option) any later version.
13 :
14 : plumed is distributed in the hope that it will be useful,
15 : but WITHOUT ANY WARRANTY; without even the implied warranty of
16 : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 : GNU Lesser General Public License for more details.
18 :
19 : You should have received a copy of the GNU Lesser General Public License
20 : along with plumed. If not, see <http://www.gnu.org/licenses/>.
21 : +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ */
22 : #include "MetainferenceBase.h"
23 : #include "tools/File.h"
24 : #include <cmath>
25 : #include <ctime>
26 : #include <numeric>
27 :
28 : using namespace std;
29 :
30 : #ifndef M_PI
31 : #define M_PI 3.14159265358979323846
32 : #endif
33 :
34 : namespace PLMD {
35 : namespace isdb {
36 :
37 93 : void MetainferenceBase::registerKeywords( Keywords& keys ) {
38 93 : Action::registerKeywords(keys);
39 93 : ActionAtomistic::registerKeywords(keys);
40 93 : ActionWithValue::registerKeywords(keys);
41 93 : ActionWithArguments::registerKeywords(keys);
42 93 : componentsAreNotOptional(keys);
43 186 : keys.use("ARG");
44 279 : keys.addFlag("DOSCORE",false,"activate metainference");
45 279 : keys.addFlag("NOENSEMBLE",false,"don't perform any replica-averaging");
46 279 : keys.addFlag("REWEIGHT",false,"simple REWEIGHT using the ARG as energy");
47 372 : keys.add("optional","AVERAGING", "Stride for calculation of averaged weights and sigma_mean");
48 465 : keys.add("compulsory","NOISETYPE","MGAUSS","functional form of the noise (GAUSS,MGAUSS,OUTLIERS,MOUTLIERS,GENERIC)");
49 465 : keys.add("compulsory","LIKELIHOOD","GAUSS","the likelihood for the GENERIC metainference model, GAUSS or LOGN");
50 465 : keys.add("compulsory","DFTILDE","0.1","fraction of sigma_mean used to evolve ftilde");
51 279 : keys.addFlag("SCALEDATA",false,"Set to TRUE if you want to sample a scaling factor common to all values and replicas");
52 465 : keys.add("compulsory","SCALE0","1.0","initial value of the scaling factor");
53 465 : keys.add("compulsory","SCALE_PRIOR","FLAT","either FLAT or GAUSSIAN");
54 372 : keys.add("optional","SCALE_MIN","minimum value of the scaling factor");
55 372 : keys.add("optional","SCALE_MAX","maximum value of the scaling factor");
56 372 : keys.add("optional","DSCALE","maximum MC move of the scaling factor");
57 279 : keys.addFlag("ADDOFFSET",false,"Set to TRUE if you want to sample an offset common to all values and replicas");
58 465 : keys.add("compulsory","OFFSET0","0.0","initial value of the offset");
59 465 : keys.add("compulsory","OFFSET_PRIOR","FLAT","either FLAT or GAUSSIAN");
60 372 : keys.add("optional","OFFSET_MIN","minimum value of the offset");
61 372 : keys.add("optional","OFFSET_MAX","maximum value of the offset");
62 372 : keys.add("optional","DOFFSET","maximum MC move of the offset");
63 372 : keys.add("optional","REGRES_ZERO","stride for regression with zero offset");
64 465 : keys.add("compulsory","SIGMA0","1.0","initial value of the uncertainty parameter");
65 465 : keys.add("compulsory","SIGMA_MIN","0.0","minimum value of the uncertainty parameter");
66 465 : keys.add("compulsory","SIGMA_MAX","10.","maximum value of the uncertainty parameter");
67 372 : keys.add("optional","DSIGMA","maximum MC move of the uncertainty parameter");
68 465 : keys.add("compulsory","OPTSIGMAMEAN","NONE","Set to NONE/SEM to manually set sigma mean, or to estimate it on the fly");
69 372 : keys.add("optional","SIGMA_MEAN0","starting value for the uncertainty in the mean estimate");
70 372 : keys.add("optional","TEMP","the system temperature - this is only needed if code doesn't pass the temperature to plumed");
71 372 : keys.add("optional","MC_STEPS","number of MC steps");
72 372 : keys.add("optional","MC_STRIDE","MC stride");
73 372 : keys.add("optional","MC_CHUNKSIZE","MC chunksize");
74 372 : keys.add("optional","STATUS_FILE","write a file with all the data useful for restart/continuation of Metainference");
75 465 : keys.add("compulsory","WRITE_STRIDE","10000","write the status to a file every N steps, this can be used for restart/continuation");
76 372 : keys.add("optional","SELECTOR","name of selector");
77 372 : keys.add("optional","NSELECT","range of values for selector [0, N-1]");
78 186 : keys.use("RESTART");
79 93 : useCustomisableComponents(keys);
80 372 : keys.addOutputComponent("sigma", "default", "uncertainty parameter");
81 372 : keys.addOutputComponent("sigmaMean", "default", "uncertainty in the mean estimate");
82 372 : keys.addOutputComponent("acceptSigma", "default", "MC acceptance");
83 372 : keys.addOutputComponent("acceptScale", "SCALEDATA", "MC acceptance");
84 372 : keys.addOutputComponent("weight", "REWEIGHT", "weights of the weighted average");
85 372 : keys.addOutputComponent("biasDer", "REWEIGHT", "derivatives with respect to the bias");
86 372 : keys.addOutputComponent("scale", "SCALEDATA", "scale parameter");
87 372 : keys.addOutputComponent("offset", "ADDOFFSET", "offset parameter");
88 372 : keys.addOutputComponent("ftilde", "GENERIC", "ensemble average estimator");
89 93 : }
90 :
91 86 : MetainferenceBase::MetainferenceBase(const ActionOptions&ao):
92 : Action(ao),
93 : ActionAtomistic(ao),
94 : ActionWithArguments(ao),
95 : ActionWithValue(ao),
96 : doscore_(false),
97 : write_stride_(0),
98 : narg(0),
99 : doscale_(false),
100 : scale_(1.),
101 : scale_mu_(0),
102 : scale_min_(1),
103 : scale_max_(-1),
104 : Dscale_(-1),
105 : dooffset_(false),
106 : offset_(0.),
107 : offset_mu_(0),
108 : offset_min_(1),
109 : offset_max_(-1),
110 : Doffset_(-1),
111 : doregres_zero_(false),
112 : nregres_zero_(0),
113 : Dftilde_(0.1),
114 : random(3),
115 : MCsteps_(1),
116 : MCstride_(1),
117 : MCaccept_(0),
118 : MCacceptScale_(0),
119 : MCacceptFT_(0),
120 : MCtrial_(0),
121 : MCchunksize_(0),
122 : firstTime(true),
123 : do_reweight_(false),
124 : do_optsigmamean_(0),
125 : nsel_(1),
126 : iselect(0),
127 : optsigmamean_stride_(0),
128 1118 : decay_w_(1.)
129 : {
130 172 : parseFlag("DOSCORE", doscore_);
131 :
132 86 : bool noensemble = false;
133 172 : parseFlag("NOENSEMBLE", noensemble);
134 :
135 : // set up replica stuff
136 86 : master = (comm.Get_rank()==0);
137 86 : if(master) {
138 55 : nrep_ = multi_sim_comm.Get_size();
139 55 : replica_ = multi_sim_comm.Get_rank();
140 55 : if(noensemble) nrep_ = 1;
141 : } else {
142 31 : nrep_ = 0;
143 31 : replica_ = 0;
144 : }
145 86 : comm.Sum(&nrep_,1);
146 86 : comm.Sum(&replica_,1);
147 :
148 172 : parse("SELECTOR", selector_);
149 172 : parse("NSELECT", nsel_);
150 : // do checks
151 86 : if(selector_.length()>0 && nsel_<=1) error("With SELECTOR active, NSELECT must be greater than 1");
152 86 : if(selector_.length()==0 && nsel_>1) error("With NSELECT greater than 1, you must specify SELECTOR");
153 :
154 : // initialise firstTimeW
155 86 : firstTimeW.resize(nsel_, true);
156 :
157 : // reweight implies a different number of arguments (the latest one must always be the bias)
158 172 : parseFlag("REWEIGHT", do_reweight_);
159 108 : if(do_reweight_&&getNumberOfArguments()!=1) error("To REWEIGHT one must provide one single bias as an argument");
160 86 : if(do_reweight_&&nrep_<2) error("REWEIGHT can only be used in parallel with 2 or more replicas");
161 258 : if(!getRestart()) average_weights_.resize(nsel_, vector<double> (nrep_, 1./static_cast<double>(nrep_)));
162 0 : else average_weights_.resize(nsel_, vector<double> (nrep_, 0.));
163 :
164 86 : unsigned averaging=0;
165 172 : parse("AVERAGING", averaging);
166 86 : if(averaging>0) {
167 0 : decay_w_ = 1./static_cast<double> (averaging);
168 0 : optsigmamean_stride_ = averaging;
169 : }
170 :
171 : string stringa_noise;
172 172 : parse("NOISETYPE",stringa_noise);
173 86 : if(stringa_noise=="GAUSS") noise_type_ = GAUSS;
174 79 : else if(stringa_noise=="MGAUSS") noise_type_ = MGAUSS;
175 10 : else if(stringa_noise=="OUTLIERS") noise_type_ = OUTLIERS;
176 9 : else if(stringa_noise=="MOUTLIERS") noise_type_ = MOUTLIERS;
177 1 : else if(stringa_noise=="GENERIC") noise_type_ = GENERIC;
178 0 : else error("Unknown noise type!");
179 :
180 86 : if(noise_type_== GENERIC) {
181 : string stringa_like;
182 2 : parse("LIKELIHOOD",stringa_like);
183 1 : if(stringa_like=="GAUSS") gen_likelihood_ = LIKE_GAUSS;
184 0 : else if(stringa_like=="LOGN") gen_likelihood_ = LIKE_LOGN;
185 0 : else error("Unknown likelihood type!");
186 :
187 2 : parse("DFTILDE",Dftilde_);
188 : }
189 :
190 172 : parse("WRITE_STRIDE",write_stride_);
191 172 : parse("STATUS_FILE",status_file_name_);
192 258 : if(status_file_name_=="") status_file_name_ = "MISTATUS"+getLabel();
193 0 : else status_file_name_ = status_file_name_+getLabel();
194 :
195 : string stringa_optsigma;
196 172 : parse("OPTSIGMAMEAN", stringa_optsigma);
197 86 : if(stringa_optsigma=="NONE") do_optsigmamean_=0;
198 4 : else if(stringa_optsigma=="SEM") do_optsigmamean_=1;
199 :
200 : vector<double> read_sigma_mean_;
201 172 : parseVector("SIGMA_MEAN0",read_sigma_mean_);
202 223 : if(!do_optsigmamean_ && read_sigma_mean_.size()==0 && !getRestart() && doscore_)
203 0 : error("If you don't use OPTSIGMAMEAN and you are not RESTARTING then you MUST SET SIGMA_MEAN0");
204 :
205 86 : if(noise_type_==MGAUSS||noise_type_==MOUTLIERS||noise_type_==GENERIC) {
206 78 : if(read_sigma_mean_.size()>0) {
207 23 : sigma_mean2_.resize(read_sigma_mean_.size());
208 138 : for(unsigned i=0; i<read_sigma_mean_.size(); i++) sigma_mean2_[i]=read_sigma_mean_[i]*read_sigma_mean_[i];
209 : } else {
210 55 : sigma_mean2_.resize(1,0.000001);
211 : }
212 : } else {
213 8 : if(read_sigma_mean_.size()==1) {
214 8 : sigma_mean2_.resize(1, read_sigma_mean_[0]*read_sigma_mean_[0]);
215 0 : } else if(read_sigma_mean_.size()==0) {
216 0 : sigma_mean2_.resize(1, 0.000001);
217 : } else {
218 0 : error("If you want to use more than one SIGMA_MEAN0 you should use NOISETYPE=MGAUSS|MOUTLIERS");
219 : }
220 : }
221 :
222 172 : parseFlag("SCALEDATA", doscale_);
223 86 : if(doscale_) {
224 : string stringa_noise;
225 24 : parse("SCALE_PRIOR",stringa_noise);
226 12 : if(stringa_noise=="GAUSSIAN") scale_prior_ = SC_GAUSS;
227 12 : else if(stringa_noise=="FLAT") scale_prior_ = SC_FLAT;
228 0 : else error("Unknown SCALE_PRIOR type!");
229 24 : parse("SCALE0",scale_);
230 24 : parse("DSCALE",Dscale_);
231 12 : if(Dscale_<0.) error("DSCALE must be set when using SCALEDATA");
232 12 : if(scale_prior_==SC_GAUSS) {
233 0 : scale_mu_=scale_;
234 : } else {
235 24 : parse("SCALE_MIN",scale_min_);
236 24 : parse("SCALE_MAX",scale_max_);
237 12 : if(scale_max_<scale_min_) error("SCALE_MAX and SCALE_MIN must be set when using SCALE_PRIOR=FLAT");
238 : }
239 : }
240 :
241 172 : parseFlag("ADDOFFSET", dooffset_);
242 86 : if(dooffset_) {
243 : string stringa_noise;
244 4 : parse("OFFSET_PRIOR",stringa_noise);
245 2 : if(stringa_noise=="GAUSSIAN") offset_prior_ = SC_GAUSS;
246 2 : else if(stringa_noise=="FLAT") offset_prior_ = SC_FLAT;
247 0 : else error("Unknown OFFSET_PRIOR type!");
248 4 : parse("OFFSET0",offset_);
249 4 : parse("DOFFSET",Doffset_);
250 2 : if(offset_prior_==SC_GAUSS) {
251 0 : offset_mu_=offset_;
252 0 : if(Doffset_<0.) error("DOFFSET must be set when using OFFSET_PRIOR=GAUSS");
253 : } else {
254 4 : parse("OFFSET_MIN",offset_min_);
255 4 : parse("OFFSET_MAX",offset_max_);
256 2 : if(Doffset_<0) Doffset_ = 0.05*(offset_max_ - offset_min_);
257 2 : if(offset_max_<offset_min_) error("OFFSET_MAX and OFFSET_MIN must be set when using OFFSET_PRIOR=FLAT");
258 : }
259 : }
260 :
261 : // regression with zero intercept
262 172 : parse("REGRES_ZERO", nregres_zero_);
263 86 : if(nregres_zero_>0) {
264 : // set flag
265 0 : doregres_zero_=true;
266 : // check if already sampling scale and offset
267 0 : if(doscale_) error("REGRES_ZERO and SCALEDATA are mutually exclusive");
268 0 : if(dooffset_) error("REGRES_ZERO and ADDOFFSET are mutually exclusive");
269 : }
270 :
271 : vector<double> readsigma;
272 172 : parseVector("SIGMA0",readsigma);
273 94 : if((noise_type_!=MGAUSS&&noise_type_!=MOUTLIERS&&noise_type_!=GENERIC)&&readsigma.size()>1)
274 0 : error("If you want to use more than one SIGMA you should use NOISETYPE=MGAUSS|MOUTLIERS|GENERIC");
275 86 : if(noise_type_==MGAUSS||noise_type_==MOUTLIERS||noise_type_==GENERIC) {
276 78 : sigma_.resize(readsigma.size());
277 78 : sigma_=readsigma;
278 8 : } else sigma_.resize(1, readsigma[0]);
279 :
280 : vector<double> readsigma_min;
281 172 : parseVector("SIGMA_MIN",readsigma_min);
282 94 : if((noise_type_!=MGAUSS&&noise_type_!=MOUTLIERS&&noise_type_!=GENERIC)&&readsigma_min.size()>1)
283 0 : error("If you want to use more than one SIGMA you should use NOISETYPE=MGAUSS|MOUTLIERS|GENERIC");
284 86 : if(noise_type_==MGAUSS||noise_type_==MOUTLIERS||noise_type_==GENERIC) {
285 78 : sigma_min_.resize(readsigma_min.size());
286 78 : sigma_min_=readsigma_min;
287 8 : } else sigma_min_.resize(1, readsigma_min[0]);
288 :
289 : vector<double> readsigma_max;
290 172 : parseVector("SIGMA_MAX",readsigma_max);
291 94 : if((noise_type_!=MGAUSS&&noise_type_!=MOUTLIERS&&noise_type_!=GENERIC)&&readsigma_max.size()>1)
292 0 : error("If you want to use more than one SIGMA you should use NOISETYPE=MGAUSS|MOUTLIERS|GENERIC");
293 86 : if(noise_type_==MGAUSS||noise_type_==MOUTLIERS||noise_type_==GENERIC) {
294 78 : sigma_max_.resize(readsigma_max.size());
295 78 : sigma_max_=readsigma_max;
296 8 : } else sigma_max_.resize(1, readsigma_max[0]);
297 :
298 86 : if(sigma_max_.size()!=sigma_min_.size()) error("The number of values for SIGMA_MIN and SIGMA_MAX must be the same");
299 :
300 : vector<double> read_dsigma;
301 172 : parseVector("DSIGMA",read_dsigma);
302 94 : if((noise_type_!=MGAUSS&&noise_type_!=MOUTLIERS&&noise_type_!=GENERIC)&&readsigma_max.size()>1)
303 0 : error("If you want to use more than one SIGMA you should use NOISETYPE=MGAUSS|MOUTLIERS|GENERIC");
304 86 : if(read_dsigma.size()>0) {
305 31 : Dsigma_.resize(read_dsigma.size());
306 31 : Dsigma_=read_dsigma;
307 : } else {
308 55 : Dsigma_.resize(sigma_max_.size());
309 385 : for(unsigned i=0; i<sigma_max_.size(); i++) Dsigma_[i] = 0.05*(sigma_max_[i] - sigma_min_[i]);
310 : }
311 :
312 : // monte carlo stuff
313 172 : parse("MC_STEPS",MCsteps_);
314 172 : parse("MC_STRIDE",MCstride_);
315 172 : parse("MC_CHUNKSIZE", MCchunksize_);
316 : // get temperature
317 86 : double temp=0.0;
318 172 : parse("TEMP",temp);
319 117 : if(temp>0.0) kbt_=plumed.getAtoms().getKBoltzmann()*temp;
320 110 : else kbt_=plumed.getAtoms().getKbT();
321 86 : if(kbt_==0.0&&doscore_) error("Unless the MD engine passes the temperature to plumed, you must specify it using TEMP");
322 :
323 : // initialize random seed
324 : unsigned iseed;
325 86 : if(master) iseed = time(NULL)+replica_;
326 31 : else iseed = 0;
327 86 : comm.Sum(&iseed, 1);
328 86 : random[0].setSeed(-iseed);
329 : // Random chunk
330 86 : if(master) iseed = time(NULL)+replica_;
331 31 : else iseed = 0;
332 86 : comm.Sum(&iseed, 1);
333 86 : random[2].setSeed(-iseed);
334 86 : if(doscale_||dooffset_) {
335 : // in this case we want the same seed everywhere
336 14 : iseed = time(NULL);
337 14 : if(master&&nrep_>1) multi_sim_comm.Bcast(iseed,0);
338 14 : comm.Bcast(iseed,0);
339 14 : random[1].setSeed(-iseed);
340 : }
341 :
342 : // outfile stuff
343 86 : if(write_stride_>0&&doscore_) {
344 31 : sfile_.link(*this);
345 31 : sfile_.open(status_file_name_);
346 : }
347 :
348 86 : }
349 :
350 516 : MetainferenceBase::~MetainferenceBase()
351 : {
352 86 : if(sfile_.isOpen()) sfile_.close();
353 86 : }
354 :
355 31 : void MetainferenceBase::Initialise(const unsigned input)
356 : {
357 : setNarg(input);
358 62 : if(narg!=parameters.size()) {
359 0 : std::string num1; Tools::convert(parameters.size(),num1);
360 0 : std::string num2; Tools::convert(narg,num2);
361 0 : std::string msg = "The number of experimental values " + num1 +" must be the same of the calculated values " + num2;
362 0 : error(msg);
363 : }
364 :
365 : // resize vector for sigma_mean history
366 31 : sigma_mean2_last_.resize(nsel_);
367 93 : for(unsigned i=0; i<nsel_; i++) sigma_mean2_last_[i].resize(narg);
368 31 : if(noise_type_==MGAUSS||noise_type_==MOUTLIERS||noise_type_==GENERIC) {
369 23 : if(sigma_mean2_.size()==1) {
370 23 : double tmp = sigma_mean2_[0];
371 23 : sigma_mean2_.resize(narg, tmp);
372 0 : } else if(sigma_mean2_.size()>1&&sigma_mean2_.size()!=narg) {
373 0 : error("SIGMA_MEAN0 can accept either one single value or as many values as the number of arguments (with NOISETYPE=MGAUSS|MOUTLIERS|GENERIC)");
374 : }
375 : // set the initial value for the history
376 2516 : for(unsigned i=0; i<nsel_; i++) for(unsigned j=0; j<narg; j++) sigma_mean2_last_[i][j].push_back(sigma_mean2_[j]);
377 : } else {
378 : // set the initial value for the history
379 43 : for(unsigned i=0; i<nsel_; i++) for(unsigned j=0; j<narg; j++) sigma_mean2_last_[i][j].push_back(sigma_mean2_[0]);
380 : }
381 :
382 : // set sigma_bias
383 31 : if(noise_type_==MGAUSS||noise_type_==MOUTLIERS||noise_type_==GENERIC) {
384 23 : if(sigma_.size()==1) {
385 23 : double tmp = sigma_[0];
386 23 : sigma_.resize(narg, tmp);
387 0 : } else if(sigma_.size()>1&&sigma_.size()!=narg) {
388 0 : error("SIGMA0 can accept either one single value or as many values as the number of arguments (with NOISETYPE=MGAUSS|MOUTLIERS|GENERIC)");
389 : }
390 23 : if(sigma_min_.size()==1) {
391 23 : double tmp = sigma_min_[0];
392 23 : sigma_min_.resize(narg, tmp);
393 0 : } else if(sigma_min_.size()>1&&sigma_min_.size()!=narg) {
394 0 : error("SIGMA_MIN can accept either one single value or as many values as the number of arguments (with NOISETYPE=MGAUSS|MOUTLIERS|GENERIC)");
395 : }
396 23 : if(sigma_max_.size()==1) {
397 23 : double tmp = sigma_max_[0];
398 23 : sigma_max_.resize(narg, tmp);
399 0 : } else if(sigma_max_.size()>1&&sigma_max_.size()!=narg) {
400 0 : error("SIGMA_MAX can accept either one single value or as many values as the number of arguments (with NOISETYPE=MGAUSS|MOUTLIERS|GENERIC)");
401 : }
402 23 : if(Dsigma_.size()==1) {
403 23 : double tmp = Dsigma_[0];
404 23 : Dsigma_.resize(narg, tmp);
405 0 : } else if(Dsigma_.size()>1&&Dsigma_.size()!=narg) {
406 0 : error("DSIGMA can accept either one single value or as many values as the number of arguments (with NOISETYPE=MGAUSS|MOUTLIERS|GENERIC)");
407 : }
408 : }
409 :
410 62 : IFile restart_sfile;
411 31 : restart_sfile.link(*this);
412 62 : if(getRestart()&&restart_sfile.FileExist(status_file_name_)) {
413 0 : firstTime = false;
414 0 : for(unsigned i=0; i<nsel_; i++) firstTimeW[i] = false;
415 0 : restart_sfile.open(status_file_name_);
416 0 : log.printf(" Restarting from %s\n", status_file_name_.c_str());
417 : double dummy;
418 0 : if(restart_sfile.scanField("time",dummy)) {
419 : // nsel
420 0 : for(unsigned i=0; i<sigma_mean2_last_.size(); i++) {
421 : std::string msg_i;
422 0 : Tools::convert(i,msg_i);
423 : // narg
424 0 : if(noise_type_==MGAUSS||noise_type_==MOUTLIERS||noise_type_==GENERIC) {
425 0 : for(unsigned j=0; j<narg; ++j) {
426 : std::string msg_j;
427 0 : Tools::convert(j,msg_j);
428 0 : std::string msg = msg_i+"_"+msg_j;
429 : double read_sm;
430 0 : restart_sfile.scanField("sigmaMean_"+msg,read_sm);
431 0 : sigma_mean2_last_[i][j][0] = read_sm*read_sm;
432 : }
433 : }
434 0 : if(noise_type_==GAUSS||noise_type_==OUTLIERS) {
435 : double read_sm;
436 : std::string msg_j;
437 0 : Tools::convert(0,msg_j);
438 0 : std::string msg = msg_i+"_"+msg_j;
439 0 : restart_sfile.scanField("sigmaMean_"+msg,read_sm);
440 0 : for(unsigned j=0; j<narg; j++) sigma_mean2_last_[i][j][0] = read_sm*read_sm;
441 : }
442 : }
443 :
444 0 : for(unsigned i=0; i<sigma_.size(); ++i) {
445 : std::string msg;
446 0 : Tools::convert(i,msg);
447 0 : restart_sfile.scanField("sigma_"+msg,sigma_[i]);
448 : }
449 0 : if(noise_type_==GENERIC) {
450 0 : for(unsigned i=0; i<ftilde_.size(); ++i) {
451 : std::string msg;
452 0 : Tools::convert(i,msg);
453 0 : restart_sfile.scanField("ftilde_"+msg,ftilde_[i]);
454 : }
455 : }
456 0 : restart_sfile.scanField("scale0_",scale_);
457 0 : restart_sfile.scanField("offset0_",offset_);
458 :
459 0 : for(unsigned i=0; i<nsel_; i++) {
460 : std::string msg;
461 0 : Tools::convert(i,msg);
462 : double tmp_w;
463 0 : restart_sfile.scanField("weight_"+msg,tmp_w);
464 0 : if(master) {
465 0 : average_weights_[i][replica_] = tmp_w;
466 0 : if(nrep_>1) multi_sim_comm.Sum(&average_weights_[i][0], nrep_);
467 : }
468 0 : comm.Sum(&average_weights_[i][0], nrep_);
469 : }
470 :
471 : }
472 0 : restart_sfile.scanField();
473 0 : restart_sfile.close();
474 : }
475 :
476 62 : addComponentWithDerivatives("score");
477 62 : componentIsNotPeriodic("score");
478 62 : valueScore=getPntrToComponent("score");
479 :
480 31 : if(do_reweight_) {
481 44 : addComponent("biasDer");
482 44 : componentIsNotPeriodic("biasDer");
483 44 : addComponent("weight");
484 44 : componentIsNotPeriodic("weight");
485 : }
486 :
487 31 : if(doscale_ || doregres_zero_) {
488 24 : addComponent("scale");
489 24 : componentIsNotPeriodic("scale");
490 24 : valueScale=getPntrToComponent("scale");
491 : }
492 :
493 31 : if(dooffset_) {
494 4 : addComponent("offset");
495 4 : componentIsNotPeriodic("offset");
496 4 : valueOffset=getPntrToComponent("offset");
497 : }
498 :
499 31 : if(dooffset_||doscale_) {
500 28 : addComponent("acceptScale");
501 28 : componentIsNotPeriodic("acceptScale");
502 28 : valueAcceptScale=getPntrToComponent("acceptScale");
503 : }
504 :
505 31 : if(noise_type_==GENERIC) {
506 2 : addComponent("acceptFT");
507 2 : componentIsNotPeriodic("acceptFT");
508 2 : valueAcceptFT=getPntrToComponent("acceptFT");
509 : }
510 :
511 62 : addComponent("acceptSigma");
512 62 : componentIsNotPeriodic("acceptSigma");
513 62 : valueAccept=getPntrToComponent("acceptSigma");
514 :
515 31 : if(noise_type_==MGAUSS||noise_type_==MOUTLIERS||noise_type_==GENERIC) {
516 7456 : for(unsigned i=0; i<sigma_mean2_.size(); ++i) {
517 2470 : std::string num; Tools::convert(i,num);
518 7410 : addComponent("sigmaMean_"+num); componentIsNotPeriodic("sigmaMean_"+num);
519 7410 : valueSigmaMean.push_back(getPntrToComponent("sigmaMean_"+num));
520 7410 : getPntrToComponent("sigmaMean_"+num)->set(sqrt(sigma_mean2_[i]));
521 7410 : addComponent("sigma_"+num); componentIsNotPeriodic("sigma_"+num);
522 7410 : valueSigma.push_back(getPntrToComponent("sigma_"+num));
523 7410 : getPntrToComponent("sigma_"+num)->set(sigma_[i]);
524 2470 : if(noise_type_==GENERIC) {
525 6 : addComponent("ftilde_"+num); componentIsNotPeriodic("ftilde_"+num);
526 6 : valueFtilde.push_back(getPntrToComponent("ftilde_"+num));
527 : }
528 : }
529 : } else {
530 24 : addComponent("sigmaMean"); componentIsNotPeriodic("sigmaMean");
531 24 : valueSigmaMean.push_back(getPntrToComponent("sigmaMean"));
532 24 : getPntrToComponent("sigmaMean")->set(sqrt(sigma_mean2_[0]));
533 24 : addComponent("sigma"); componentIsNotPeriodic("sigma");
534 24 : valueSigma.push_back(getPntrToComponent("sigma"));
535 24 : getPntrToComponent("sigma")->set(sigma_[0]);
536 : }
537 :
538 31 : switch(noise_type_) {
539 1 : case GENERIC:
540 1 : log.printf(" with general metainference ");
541 1 : if(gen_likelihood_==LIKE_GAUSS) log.printf(" and a gaussian likelihood\n");
542 0 : else if(gen_likelihood_==LIKE_LOGN) log.printf(" and a log-normal likelihood\n");
543 1 : log.printf(" ensemble average parameter sampled with a step %lf of sigma_mean\n", Dftilde_);
544 : break;
545 7 : case GAUSS:
546 7 : log.printf(" with gaussian noise and a single noise parameter for all the data\n");
547 : break;
548 14 : case MGAUSS:
549 14 : log.printf(" with gaussian noise and a noise parameter for each data point\n");
550 : break;
551 1 : case OUTLIERS:
552 1 : log.printf(" with long tailed gaussian noise and a single noise parameter for all the data\n");
553 : break;
554 8 : case MOUTLIERS:
555 8 : log.printf(" with long tailed gaussian noise and a noise parameter for each data point\n");
556 : break;
557 : }
558 :
559 31 : if(doscale_) {
560 12 : log.printf(" sampling a common scaling factor with:\n");
561 12 : log.printf(" initial scale parameter %f\n",scale_);
562 12 : if(scale_prior_==SC_GAUSS) {
563 0 : log.printf(" gaussian prior with mean %f and width %f\n",scale_mu_,Dscale_);
564 : }
565 12 : if(scale_prior_==SC_FLAT) {
566 12 : log.printf(" flat prior between %f - %f\n",scale_min_,scale_max_);
567 12 : log.printf(" maximum MC move of scale parameter %f\n",Dscale_);
568 : }
569 : }
570 :
571 31 : if(dooffset_) {
572 2 : log.printf(" sampling a common offset with:\n");
573 2 : log.printf(" initial offset parameter %f\n",offset_);
574 2 : if(offset_prior_==SC_GAUSS) {
575 0 : log.printf(" gaussian prior with mean %f and width %f\n",offset_mu_,Doffset_);
576 : }
577 2 : if(offset_prior_==SC_FLAT) {
578 2 : log.printf(" flat prior between %f - %f\n",offset_min_,offset_max_);
579 2 : log.printf(" maximum MC move of offset parameter %f\n",Doffset_);
580 : }
581 : }
582 :
583 31 : log.printf(" number of experimental data points %u\n",narg);
584 31 : log.printf(" number of replicas %u\n",nrep_);
585 31 : log.printf(" initial data uncertainties");
586 7496 : for(unsigned i=0; i<sigma_.size(); ++i) log.printf(" %f", sigma_[i]);
587 31 : log.printf("\n");
588 31 : log.printf(" minimum data uncertainties");
589 7496 : for(unsigned i=0; i<sigma_.size(); ++i) log.printf(" %f",sigma_min_[i]);
590 31 : log.printf("\n");
591 31 : log.printf(" maximum data uncertainties");
592 7496 : for(unsigned i=0; i<sigma_.size(); ++i) log.printf(" %f",sigma_max_[i]);
593 31 : log.printf("\n");
594 31 : log.printf(" maximum MC move of data uncertainties");
595 7496 : for(unsigned i=0; i<sigma_.size(); ++i) log.printf(" %f",Dsigma_[i]);
596 31 : log.printf("\n");
597 31 : log.printf(" temperature of the system %f\n",kbt_);
598 31 : log.printf(" MC steps %u\n",MCsteps_);
599 31 : log.printf(" MC stride %u\n",MCstride_);
600 31 : log.printf(" initial standard errors of the mean");
601 7496 : for(unsigned i=0; i<sigma_mean2_.size(); ++i) log.printf(" %f", sqrt(sigma_mean2_[i]));
602 31 : log.printf("\n");
603 :
604 : //resize the number of metainference derivatives and the number of back-calculated data
605 31 : metader_.resize(narg, 0.);
606 31 : calc_data_.resize(narg, 0.);
607 :
608 93 : log<<" Bibliography "<<plumed.cite("Bonomi, Camilloni, Cavalli, Vendruscolo, Sci. Adv. 2, e150117 (2016)");
609 75 : if(do_reweight_) log<<plumed.cite("Bonomi, Camilloni, Vendruscolo, Sci. Rep. 6, 31232 (2016)");
610 39 : if(do_optsigmamean_>0) log<<plumed.cite("Loehr, Jussupow, Camilloni, J. Chem. Phys. 146, 165102 (2017)");
611 93 : log<<plumed.cite("Bonomi, Camilloni, Bioinformatics, 33, 3999 (2017)");
612 31 : log<<"\n";
613 31 : }
614 :
615 0 : void MetainferenceBase::Selector()
616 : {
617 0 : iselect = 0;
618 : // set the value of selector for REM-like stuff
619 0 : if(selector_.length()>0) iselect = static_cast<unsigned>(plumed.passMap[selector_]);
620 0 : }
621 :
622 12 : double MetainferenceBase::getEnergySP(const vector<double> &mean, const vector<double> &sigma,
623 : const double scale, const double offset)
624 : {
625 12 : const double scale2 = scale*scale;
626 12 : const double sm2 = sigma_mean2_[0];
627 12 : const double ss2 = sigma[0]*sigma[0] + scale2*sm2;
628 12 : const double sss = sigma[0]*sigma[0] + sm2;
629 :
630 : double ene = 0.0;
631 36 : #pragma omp parallel num_threads(OpenMP::getNumThreads()) shared(ene)
632 : {
633 24 : #pragma omp for reduction( + : ene)
634 24 : for(unsigned i=0; i<narg; ++i) {
635 252 : const double dev = scale*mean[i]-parameters[i]+offset;
636 84 : const double a2 = 0.5*dev*dev + ss2;
637 84 : ene += std::log(2.0*a2/(1.0-exp(-a2/sm2)));
638 : }
639 : }
640 : // add one single Jeffrey's prior and one normalisation per data point
641 12 : ene += 0.5*std::log(sss) + static_cast<double>(narg)*0.5*std::log(0.5*M_PI*M_PI/ss2);
642 12 : if(doscale_ || doregres_zero_) ene += 0.5*std::log(sss);
643 12 : if(dooffset_) ene += 0.5*std::log(sss);
644 12 : return kbt_ * ene;
645 : }
646 :
647 5328 : double MetainferenceBase::getEnergySPE(const vector<double> &mean, const vector<double> &sigma,
648 : const double scale, const double offset)
649 : {
650 5328 : const double scale2 = scale*scale;
651 : double ene = 0.0;
652 15984 : #pragma omp parallel num_threads(OpenMP::getNumThreads()) shared(ene)
653 : {
654 10656 : #pragma omp for reduction( + : ene)
655 10656 : for(unsigned i=0; i<narg; ++i) {
656 42624 : const double sm2 = sigma_mean2_[i];
657 42624 : const double ss2 = sigma[i]*sigma[i] + scale2*sm2;
658 21312 : const double sss = sigma[i]*sigma[i] + sm2;
659 63936 : const double dev = scale*mean[i]-parameters[i]+offset;
660 21312 : const double a2 = 0.5*dev*dev + ss2;
661 21312 : ene += 0.5*std::log(sss) + 0.5*std::log(0.5*M_PI*M_PI/ss2) + std::log(2.0*a2/(1.0-exp(-a2/sm2)));
662 42624 : if(doscale_ || doregres_zero_) ene += 0.5*std::log(sss);
663 21312 : if(dooffset_) ene += 0.5*std::log(sss);
664 : }
665 : }
666 5328 : return kbt_ * ene;
667 : }
668 :
669 48 : double MetainferenceBase::getEnergyMIGEN(const vector<double> &mean, const vector<double> &ftilde, const vector<double> &sigma,
670 : const double scale, const double offset)
671 : {
672 : double ene = 0.0;
673 144 : #pragma omp parallel num_threads(OpenMP::getNumThreads()) shared(ene)
674 : {
675 96 : #pragma omp for reduction( + : ene)
676 96 : for(unsigned i=0; i<narg; ++i) {
677 192 : const double inv_sb2 = 1./(sigma[i]*sigma[i]);
678 96 : const double inv_sm2 = 1./sigma_mean2_[i];
679 : double devb = 0;
680 384 : if(gen_likelihood_==LIKE_GAUSS) devb = scale*ftilde[i]-parameters[i]+offset;
681 0 : else if(gen_likelihood_==LIKE_LOGN) devb = std::log(scale*ftilde[i]/parameters[i]);
682 288 : double devm = mean[i] - ftilde[i];
683 : // deviation + normalisation + jeffrey
684 : double normb = 0.;
685 192 : if(gen_likelihood_==LIKE_GAUSS) normb = -0.5*std::log(0.5/M_PI*inv_sb2);
686 0 : else if(gen_likelihood_==LIKE_LOGN) normb = -0.5*std::log(0.5/M_PI*inv_sb2/(parameters[i]*parameters[i]));
687 96 : const double normm = -0.5*std::log(0.5/M_PI*inv_sm2);
688 96 : const double jeffreys = -0.5*std::log(2.*inv_sb2);
689 96 : ene += 0.5*devb*devb*inv_sb2 + 0.5*devm*devm*inv_sm2 + normb + normm + jeffreys;
690 96 : if(doscale_ || doregres_zero_) ene += jeffreys;
691 96 : if(dooffset_) ene += jeffreys;
692 : }
693 : }
694 48 : return kbt_ * ene;
695 : }
696 :
697 554 : double MetainferenceBase::getEnergyGJ(const vector<double> &mean, const vector<double> &sigma,
698 : const double scale, const double offset)
699 : {
700 554 : const double scale2 = scale*scale;
701 1108 : const double inv_s2 = 1./(sigma[0]*sigma[0] + scale2*sigma_mean2_[0]);
702 554 : const double inv_sss = 1./(sigma[0]*sigma[0] + sigma_mean2_[0]);
703 :
704 : double ene = 0.0;
705 1662 : #pragma omp parallel num_threads(OpenMP::getNumThreads()) shared(ene)
706 : {
707 1108 : #pragma omp for reduction( + : ene)
708 1108 : for(unsigned i=0; i<narg; ++i) {
709 4878 : double dev = scale*mean[i]-parameters[i]+offset;
710 1626 : ene += 0.5*dev*dev*inv_s2;
711 : }
712 : }
713 554 : const double normalisation = -0.5*std::log(0.5/M_PI*inv_s2);
714 554 : const double jeffreys = -0.5*std::log(2.*inv_sss);
715 : // add Jeffrey's prior in case one sigma for all data points + one normalisation per datapoint
716 554 : ene += jeffreys + static_cast<double>(narg)*normalisation;
717 554 : if(doscale_ || doregres_zero_) ene += jeffreys;
718 554 : if(dooffset_) ene += jeffreys;
719 :
720 554 : return kbt_ * ene;
721 : }
722 :
723 368 : double MetainferenceBase::getEnergyGJE(const vector<double> &mean, const vector<double> &sigma,
724 : const double scale, const double offset)
725 : {
726 368 : const double scale2 = scale*scale;
727 :
728 : double ene = 0.0;
729 1104 : #pragma omp parallel num_threads(OpenMP::getNumThreads()) shared(ene)
730 : {
731 736 : #pragma omp for reduction( + : ene)
732 736 : for(unsigned i=0; i<narg; ++i) {
733 23712 : const double inv_s2 = 1./(sigma[i]*sigma[i] + scale2*sigma_mean2_[i]);
734 7904 : const double inv_sss = 1./(sigma[i]*sigma[i] + sigma_mean2_[i]);
735 23712 : double dev = scale*mean[i]-parameters[i]+offset;
736 : // deviation + normalisation + jeffrey
737 7904 : const double normalisation = -0.5*std::log(0.5/M_PI*inv_s2);
738 7904 : const double jeffreys = -0.5*std::log(2.*inv_sss);
739 7904 : ene += 0.5*dev*dev*inv_s2 + normalisation + jeffreys;
740 8480 : if(doscale_ || doregres_zero_) ene += jeffreys;
741 7904 : if(dooffset_) ene += jeffreys;
742 : }
743 : }
744 368 : return kbt_ * ene;
745 : }
746 :
747 2225 : void MetainferenceBase::doMonteCarlo(const vector<double> &mean_)
748 : {
749 2225 : if(getStep()%MCstride_!=0||getExchangeStep()) return;
750 :
751 : // calculate old energy with the updated coordinates
752 : double old_energy=0.;
753 :
754 2225 : switch(noise_type_) {
755 271 : case GAUSS:
756 271 : old_energy = getEnergyGJ(mean_,sigma_,scale_,offset_);
757 : break;
758 160 : case MGAUSS:
759 160 : old_energy = getEnergyGJE(mean_,sigma_,scale_,offset_);
760 : break;
761 6 : case OUTLIERS:
762 6 : old_energy = getEnergySP(mean_,sigma_,scale_,offset_);
763 : break;
764 1776 : case MOUTLIERS:
765 1776 : old_energy = getEnergySPE(mean_,sigma_,scale_,offset_);
766 : break;
767 12 : case GENERIC:
768 12 : old_energy = getEnergyMIGEN(mean_,ftilde_,sigma_,scale_,offset_);
769 : break;
770 : }
771 :
772 : // Create vector of random sigma indices
773 : vector<unsigned> indices;
774 2225 : if (MCchunksize_ > 0) {
775 0 : for (unsigned j=0; j<sigma_.size(); j++) {
776 0 : indices.push_back(j);
777 : }
778 0 : random[2].Shuffle(indices);
779 : }
780 : bool breaknow = false;
781 :
782 : // cycle on MC steps
783 6675 : for(unsigned i=0; i<MCsteps_; ++i) {
784 :
785 2225 : MCtrial_++;
786 :
787 : // propose move for ftilde
788 2225 : vector<double> new_ftilde(sigma_.size());
789 2225 : new_ftilde = ftilde_;
790 :
791 2225 : if(noise_type_==GENERIC) {
792 : // change all sigmas
793 96 : for(unsigned j=0; j<sigma_.size(); j++) {
794 24 : const double r3 = random[0].Gaussian();
795 48 : const double ds3 = Dftilde_*sqrt(sigma_mean2_[j])*r3;
796 48 : new_ftilde[j] = ftilde_[j] + ds3;
797 : }
798 : // calculate new energy
799 12 : double new_energy = getEnergyMIGEN(mean_,new_ftilde,sigma_,scale_,offset_);
800 :
801 : // accept or reject
802 12 : const double delta = ( new_energy - old_energy ) / kbt_;
803 : // if delta is negative always accept move
804 12 : if( delta <= 0.0 ) {
805 : old_energy = new_energy;
806 0 : ftilde_ = new_ftilde;
807 0 : MCacceptFT_++;
808 : // otherwise extract random number
809 : } else {
810 12 : const double s = random[0].RandU01();
811 12 : if( s < exp(-delta) ) {
812 : old_energy = new_energy;
813 0 : ftilde_ = new_ftilde;
814 0 : MCacceptFT_++;
815 : }
816 : }
817 : }
818 :
819 : // propose move for scale and/or offset
820 2225 : double new_scale = scale_;
821 2225 : double new_offset = offset_;
822 2225 : if(doscale_||dooffset_) {
823 1848 : if(doscale_) {
824 1824 : if(scale_prior_==SC_FLAT) {
825 1824 : const double r1 = random[1].Gaussian();
826 1824 : const double ds1 = Dscale_*r1;
827 1824 : new_scale += ds1;
828 : // check boundaries
829 1824 : if(new_scale > scale_max_) {new_scale = 2.0 * scale_max_ - new_scale;}
830 1824 : if(new_scale < scale_min_) {new_scale = 2.0 * scale_min_ - new_scale;}
831 : } else {
832 0 : const double r1 = random[1].Gaussian();
833 0 : const double ds1 = 0.5*(scale_mu_-new_scale)+Dscale_*exp(1)/M_PI*r1;
834 0 : new_scale += ds1;
835 : }
836 : }
837 :
838 1848 : if(dooffset_) {
839 24 : if(offset_prior_==SC_FLAT) {
840 24 : const double r1 = random[1].Gaussian();
841 24 : const double ds1 = Doffset_*r1;
842 24 : new_offset += ds1;
843 : // check boundaries
844 24 : if(new_offset > offset_max_) {new_offset = 2.0 * offset_max_ - new_offset;}
845 24 : if(new_offset < offset_min_) {new_offset = 2.0 * offset_min_ - new_offset;}
846 : } else {
847 0 : const double r1 = random[1].Gaussian();
848 0 : const double ds1 = 0.5*(offset_mu_-new_offset)+Doffset_*exp(1)/M_PI*r1;
849 0 : new_offset += ds1;
850 : }
851 : }
852 :
853 : // calculate new energy
854 : double new_energy = 0.;
855 :
856 1848 : switch(noise_type_) {
857 12 : case GAUSS:
858 12 : new_energy = getEnergyGJ(mean_,sigma_,new_scale,new_offset);
859 : break;
860 48 : case MGAUSS:
861 48 : new_energy = getEnergyGJE(mean_,sigma_,new_scale,new_offset);
862 : break;
863 0 : case OUTLIERS:
864 0 : new_energy = getEnergySP(mean_,sigma_,new_scale,new_offset);
865 : break;
866 1776 : case MOUTLIERS:
867 1776 : new_energy = getEnergySPE(mean_,sigma_,new_scale,new_offset);
868 : break;
869 12 : case GENERIC:
870 12 : new_energy = getEnergyMIGEN(mean_,ftilde_,sigma_,new_scale,new_offset);
871 : break;
872 : }
873 : // for the scale we need to consider the total energy
874 1848 : vector<double> totenergies(2);
875 1848 : if(master) {
876 936 : totenergies[0] = old_energy;
877 936 : totenergies[1] = new_energy;
878 936 : if(nrep_>1) multi_sim_comm.Sum(totenergies);
879 : } else {
880 912 : totenergies[0] = 0;
881 912 : totenergies[1] = 0;
882 : }
883 1848 : comm.Sum(totenergies);
884 :
885 : // accept or reject
886 1848 : const double delta = ( totenergies[1] - totenergies[0] ) / kbt_;
887 : // if delta is negative always accept move
888 1848 : if( delta <= 0.0 ) {
889 : old_energy = new_energy;
890 1836 : scale_ = new_scale;
891 1836 : offset_ = new_offset;
892 1836 : MCacceptScale_++;
893 : // otherwise extract random number
894 : } else {
895 12 : double s = random[1].RandU01();
896 12 : if( s < exp(-delta) ) {
897 : old_energy = new_energy;
898 0 : scale_ = new_scale;
899 0 : offset_ = new_offset;
900 0 : MCacceptScale_++;
901 : }
902 : }
903 : }
904 :
905 : // propose move for sigma
906 2225 : vector<double> new_sigma(sigma_.size());
907 2225 : new_sigma = sigma_;
908 :
909 : // change MCchunksize_ sigmas
910 2225 : if (MCchunksize_ > 0) {
911 0 : if ((MCchunksize_ * i) >= sigma_.size()) {
912 : // This means we are not moving any sigma, so we should break immediately
913 : breaknow = true;
914 : }
915 :
916 : // change random sigmas
917 0 : for(unsigned j=0; j<MCchunksize_; j++) {
918 0 : const unsigned shuffle_index = j + MCchunksize_ * i;
919 0 : if (shuffle_index >= sigma_.size()) {
920 : // Going any further will segfault but we should still evaluate the sigmas we changed
921 : break;
922 : }
923 0 : const unsigned index = indices[shuffle_index];
924 0 : const double r2 = random[0].Gaussian();
925 0 : const double ds2 = Dsigma_[index]*r2;
926 0 : new_sigma[index] = sigma_[index] + ds2;
927 : // check boundaries
928 0 : if(new_sigma[index] > sigma_max_[index]) {new_sigma[index] = 2.0 * sigma_max_[index] - new_sigma[index];}
929 0 : if(new_sigma[index] < sigma_min_[index]) {new_sigma[index] = 2.0 * sigma_min_[index] - new_sigma[index];}
930 : }
931 : } else {
932 : // change all sigmas
933 38233 : for(unsigned j=0; j<sigma_.size(); j++) {
934 11261 : const double r2 = random[0].Gaussian();
935 11261 : const double ds2 = Dsigma_[j]*r2;
936 22522 : new_sigma[j] = sigma_[j] + ds2;
937 : // check boundaries
938 22522 : if(new_sigma[j] > sigma_max_[j]) {new_sigma[j] = 2.0 * sigma_max_[j] - new_sigma[j];}
939 22522 : if(new_sigma[j] < sigma_min_[j]) {new_sigma[j] = 2.0 * sigma_min_[j] - new_sigma[j];}
940 : }
941 : }
942 :
943 2225 : if (breaknow) {
944 : // We didnt move any sigmas, so no sense in evaluating anything
945 : break;
946 : }
947 :
948 : // calculate new energy
949 : double new_energy=0.;
950 2225 : switch(noise_type_) {
951 271 : case GAUSS:
952 271 : new_energy = getEnergyGJ(mean_,new_sigma,scale_,offset_);
953 : break;
954 160 : case MGAUSS:
955 160 : new_energy = getEnergyGJE(mean_,new_sigma,scale_,offset_);
956 : break;
957 6 : case OUTLIERS:
958 6 : new_energy = getEnergySP(mean_,new_sigma,scale_,offset_);
959 : break;
960 1776 : case MOUTLIERS:
961 1776 : new_energy = getEnergySPE(mean_,new_sigma,scale_,offset_);
962 : break;
963 12 : case GENERIC:
964 12 : new_energy = getEnergyMIGEN(mean_,ftilde_,new_sigma,scale_,offset_);
965 : break;
966 : }
967 :
968 : // accept or reject
969 2225 : const double delta = ( new_energy - old_energy ) / kbt_;
970 : // if delta is negative always accept move
971 2225 : if( delta <= 0.0 ) {
972 : old_energy = new_energy;
973 2213 : sigma_ = new_sigma;
974 2213 : MCaccept_++;
975 : // otherwise extract random number
976 : } else {
977 12 : const double s = random[0].RandU01();
978 12 : if( s < exp(-delta) ) {
979 : old_energy = new_energy;
980 0 : sigma_ = new_sigma;
981 0 : MCaccept_++;
982 : }
983 : }
984 :
985 : }
986 : /* save the result of the sampling */
987 2225 : double accept = static_cast<double>(MCaccept_) / static_cast<double>(MCtrial_);
988 2225 : valueAccept->set(accept);
989 2225 : if(doscale_ || doregres_zero_) valueScale->set(scale_);
990 2225 : if(dooffset_) valueOffset->set(offset_);
991 2225 : if(doscale_||dooffset_) {
992 1848 : accept = static_cast<double>(MCacceptScale_) / static_cast<double>(MCtrial_);
993 1848 : valueAcceptScale->set(accept);
994 : }
995 60755 : for(unsigned i=0; i<sigma_.size(); i++) valueSigma[i]->set(sigma_[i]);
996 2225 : if(noise_type_==GENERIC) {
997 12 : accept = static_cast<double>(MCacceptFT_) / static_cast<double>(MCtrial_);
998 12 : valueAcceptFT->set(accept);
999 144 : for(unsigned i=0; i<sigma_.size(); i++) valueFtilde[i]->set(ftilde_[i]);
1000 : }
1001 : }
1002 :
1003 : /*
1004 : In the following energy-force functions we don't add the normalisation and the jeffreys priors
1005 : because they are not needed for the forces, the correct MetaInference energy is the one calculated
1006 : in the Monte-Carlo
1007 : */
1008 :
1009 6 : double MetainferenceBase::getEnergyForceSP(const vector<double> &mean, const vector<double> &dmean_x,
1010 : const vector<double> &dmean_b)
1011 : {
1012 6 : const double scale2 = scale_*scale_;
1013 6 : const double sm2 = sigma_mean2_[0];
1014 6 : const double ss2 = sigma_[0]*sigma_[0] + scale2*sm2;
1015 6 : vector<double> f(narg+1,0);
1016 :
1017 6 : if(master) {
1018 : double omp_ene=0.;
1019 18 : #pragma omp parallel num_threads(OpenMP::getNumThreads()) shared(omp_ene)
1020 : {
1021 12 : #pragma omp for reduction( + : omp_ene)
1022 12 : for(unsigned i=0; i<narg; ++i) {
1023 126 : const double dev = scale_*mean[i]-parameters[i]+offset_;
1024 42 : const double a2 = 0.5*dev*dev + ss2;
1025 42 : const double t = exp(-a2/sm2);
1026 42 : const double dt = 1./t;
1027 42 : const double it = 1./(1.-t);
1028 42 : const double dit = 1./(1.-dt);
1029 42 : omp_ene += std::log(2.*a2*it);
1030 84 : f[i] = -scale_*dev*(dit/sm2 + 1./a2);
1031 : }
1032 : }
1033 12 : f[narg] = omp_ene;
1034 : // collect contribution to forces and energy from other replicas
1035 6 : if(nrep_>1) multi_sim_comm.Sum(&f[0],narg+1);
1036 : }
1037 : // intra-replica summation
1038 12 : comm.Sum(&f[0],narg+1);
1039 :
1040 12 : const double ene = f[narg];
1041 : double w_tmp = 0.;
1042 90 : for(unsigned i=0; i<narg; ++i) {
1043 126 : setMetaDer(i, -kbt_*f[i]*dmean_x[i]);
1044 126 : w_tmp += kbt_*f[i]*dmean_b[i];
1045 : }
1046 :
1047 6 : if(do_reweight_) {
1048 0 : setArgDerivatives(valueScore, -w_tmp);
1049 0 : getPntrToComponent("biasDer")->set(-w_tmp);
1050 : }
1051 :
1052 12 : return kbt_*ene;
1053 : }
1054 :
1055 1776 : double MetainferenceBase::getEnergyForceSPE(const vector<double> &mean, const vector<double> &dmean_x,
1056 : const vector<double> &dmean_b)
1057 : {
1058 1776 : const double scale2 = scale_*scale_;
1059 1776 : vector<double> f(narg+1,0);
1060 :
1061 1776 : if(master) {
1062 : double omp_ene = 0;
1063 2664 : #pragma omp parallel num_threads(OpenMP::getNumThreads()) shared(omp_ene)
1064 : {
1065 1776 : #pragma omp for reduction( + : omp_ene)
1066 1776 : for(unsigned i=0; i<narg; ++i) {
1067 7104 : const double sm2 = sigma_mean2_[i];
1068 3552 : const double ss2 = sigma_[i]*sigma_[i] + scale2*sm2;
1069 10656 : const double dev = scale_*mean[i]-parameters[i]+offset_;
1070 3552 : const double a2 = 0.5*dev*dev + ss2;
1071 3552 : const double t = exp(-a2/sm2);
1072 3552 : const double dt = 1./t;
1073 3552 : const double it = 1./(1.-t);
1074 3552 : const double dit = 1./(1.-dt);
1075 3552 : omp_ene += std::log(2.*a2*it);
1076 7104 : f[i] = -scale_*dev*(dit/sm2 + 1./a2);
1077 : }
1078 : }
1079 1776 : f[narg] = omp_ene;
1080 : // collect contribution to forces and energy from other replicas
1081 1776 : if(nrep_>1) multi_sim_comm.Sum(&f[0],narg+1);
1082 : }
1083 3552 : comm.Sum(&f[0],narg+1);
1084 :
1085 3552 : const double ene = f[narg];
1086 : double w_tmp = 0.;
1087 15984 : for(unsigned i=0; i<narg; ++i) {
1088 21312 : setMetaDer(i, -kbt_ * dmean_x[i] * f[i]);
1089 21312 : w_tmp += kbt_ * dmean_b[i] *f[i];
1090 : }
1091 :
1092 1776 : if(do_reweight_) {
1093 1776 : setArgDerivatives(valueScore, -w_tmp);
1094 3552 : getPntrToComponent("biasDer")->set(-w_tmp);
1095 : }
1096 :
1097 3552 : return kbt_*ene;
1098 : }
1099 :
1100 271 : double MetainferenceBase::getEnergyForceGJ(const vector<double> &mean, const vector<double> &dmean_x,
1101 : const vector<double> &dmean_b)
1102 : {
1103 271 : const double scale2 = scale_*scale_;
1104 271 : double inv_s2=0.;
1105 :
1106 271 : if(master) {
1107 458 : inv_s2 = 1./(sigma_[0]*sigma_[0] + scale2*sigma_mean2_[0]);
1108 229 : if(nrep_>1) multi_sim_comm.Sum(inv_s2);
1109 : }
1110 271 : comm.Sum(inv_s2);
1111 :
1112 : double ene = 0.;
1113 271 : double w_tmp = 0.;
1114 813 : #pragma omp parallel num_threads(OpenMP::getNumThreads()) shared(ene,w_tmp)
1115 : {
1116 542 : #pragma omp for reduction( + : ene,w_tmp)
1117 542 : for(unsigned i=0; i<narg; ++i) {
1118 2403 : const double dev = scale_*mean[i]-parameters[i]+offset_;
1119 801 : const double mult = dev*scale_*inv_s2;
1120 801 : ene += 0.5*dev*dev*inv_s2;
1121 1602 : setMetaDer(i, kbt_*dmean_x[i]*mult);
1122 1602 : w_tmp += kbt_*dmean_b[i]*mult;
1123 : }
1124 : }
1125 :
1126 271 : if(do_reweight_) {
1127 84 : setArgDerivatives(valueScore, w_tmp);
1128 168 : getPntrToComponent("biasDer")->set(w_tmp);
1129 : }
1130 :
1131 271 : return kbt_*ene;
1132 : }
1133 :
1134 160 : double MetainferenceBase::getEnergyForceGJE(const vector<double> &mean, const vector<double> &dmean_x,
1135 : const vector<double> &dmean_b)
1136 : {
1137 160 : const double scale2 = scale_*scale_;
1138 320 : vector<double> inv_s2(sigma_.size(),0.);
1139 :
1140 160 : if(master) {
1141 9944 : for(unsigned i=0; i<sigma_.size(); ++i) inv_s2[i] = 1./(sigma_[i]*sigma_[i] + scale2*sigma_mean2_[i]);
1142 184 : if(nrep_>1) multi_sim_comm.Sum(&inv_s2[0],sigma_.size());
1143 : }
1144 480 : comm.Sum(&inv_s2[0],sigma_.size());
1145 :
1146 : double ene = 0.;
1147 160 : double w_tmp = 0.;
1148 480 : #pragma omp parallel num_threads(OpenMP::getNumThreads()) shared(ene,w_tmp)
1149 : {
1150 320 : #pragma omp for reduction( + : ene,w_tmp)
1151 320 : for(unsigned i=0; i<narg; ++i) {
1152 11568 : const double dev = scale_*mean[i]-parameters[i]+offset_;
1153 7712 : const double mult = dev*scale_*inv_s2[i];
1154 3856 : ene += 0.5*dev*dev*inv_s2[i];
1155 7712 : setMetaDer(i, kbt_*dmean_x[i]*mult);
1156 7712 : w_tmp += kbt_*dmean_b[i]*mult;
1157 : }
1158 : }
1159 :
1160 160 : if(do_reweight_) {
1161 76 : setArgDerivatives(valueScore, w_tmp);
1162 152 : getPntrToComponent("biasDer")->set(w_tmp);
1163 : }
1164 :
1165 320 : return kbt_*ene;
1166 : }
1167 :
1168 12 : double MetainferenceBase::getEnergyForceMIGEN(const vector<double> &mean, const vector<double> &dmean_x, const vector<double> &dmean_b)
1169 : {
1170 24 : vector<double> inv_s2(sigma_.size(),0.);
1171 24 : vector<double> dev(sigma_.size(),0.);
1172 24 : vector<double> dev2(sigma_.size(),0.);
1173 :
1174 96 : for(unsigned i=0; i<sigma_.size(); ++i) {
1175 48 : inv_s2[i] = 1./sigma_mean2_[i];
1176 24 : if(master) {
1177 72 : dev[i] = (mean[i]-ftilde_[i]);
1178 48 : dev2[i] = dev[i]*dev[i];
1179 : }
1180 : }
1181 12 : if(master&&nrep_>1) {
1182 0 : multi_sim_comm.Sum(&dev[0],dev.size());
1183 0 : multi_sim_comm.Sum(&dev2[0],dev2.size());
1184 : }
1185 24 : comm.Sum(&dev[0],dev.size());
1186 24 : comm.Sum(&dev2[0],dev2.size());
1187 :
1188 12 : double dene_b = 0.;
1189 : double ene = 0.;
1190 36 : #pragma omp parallel num_threads(OpenMP::getNumThreads()) shared(ene,dene_b)
1191 : {
1192 24 : #pragma omp for reduction( + : ene,dene_b)
1193 24 : for(unsigned i=0; i<narg; ++i) {
1194 96 : const double dene_x = kbt_*inv_s2[i]*dmean_x[i]*dev[i];
1195 48 : dene_b += kbt_*inv_s2[i]*dmean_b[i]*dev[i];
1196 48 : ene += 0.5*dev2[i]*inv_s2[i];
1197 : setMetaDer(i, dene_x);
1198 : }
1199 : }
1200 :
1201 12 : if(do_reweight_) {
1202 0 : setArgDerivatives(valueScore, dene_b);
1203 0 : getPntrToComponent("biasDer")->set(dene_b);
1204 : }
1205 :
1206 24 : return kbt_*ene;
1207 : }
1208 :
1209 2225 : void MetainferenceBase::get_weights(double &fact, double &var_fact)
1210 : {
1211 2225 : const double dnrep = static_cast<double>(nrep_);
1212 2225 : const double ave_fact = 1.0/dnrep;
1213 :
1214 : double norm = 0.0;
1215 :
1216 : // calculate the weights either from BIAS
1217 2225 : if(do_reweight_) {
1218 1936 : vector<double> bias(nrep_,0);
1219 1936 : if(master) {
1220 1960 : bias[replica_] = getArgument(0);
1221 1960 : if(nrep_>1) multi_sim_comm.Sum(&bias[0], nrep_);
1222 : }
1223 3872 : comm.Sum(&bias[0], nrep_);
1224 :
1225 1936 : const double maxbias = *(std::max_element(bias.begin(), bias.end()));
1226 9680 : for(unsigned i=0; i<nrep_; ++i) {
1227 7744 : bias[i] = exp((bias[i]-maxbias)/kbt_);
1228 3872 : norm += bias[i];
1229 : }
1230 :
1231 : // accumulate weights
1232 3872 : if(!firstTimeW[iselect]) {
1233 9570 : for(unsigned i=0; i<nrep_; ++i) {
1234 11484 : const double delta=bias[i]/norm-average_weights_[iselect][i];
1235 3828 : average_weights_[iselect][i]+=decay_w_*delta;
1236 : }
1237 : } else {
1238 : firstTimeW[iselect] = false;
1239 110 : for(unsigned i=0; i<nrep_; ++i) {
1240 132 : average_weights_[iselect][i] = bias[i]/norm;
1241 : }
1242 : }
1243 :
1244 : // set average back into bias and set norm to one
1245 9680 : for(unsigned i=0; i<nrep_; ++i) bias[i] = average_weights_[iselect][i];
1246 : // set local weight, norm and weight variance
1247 3872 : fact = bias[replica_];
1248 : norm = 1.0;
1249 9680 : for(unsigned i=0; i<nrep_; ++i) var_fact += (bias[i]/norm-ave_fact)*(bias[i]/norm-ave_fact);
1250 3872 : getPntrToComponent("weight")->set(fact);
1251 : } else {
1252 : // or arithmetic ones
1253 : norm = dnrep;
1254 289 : fact = 1.0/norm;
1255 : }
1256 2225 : }
1257 :
1258 2225 : void MetainferenceBase::get_sigma_mean(const double fact, const double var_fact, const vector<double> &mean)
1259 : {
1260 2225 : const double dnrep = static_cast<double>(nrep_);
1261 2225 : const double ave_fact = 1.0/dnrep;
1262 :
1263 2225 : vector<double> sigma_mean2_tmp(sigma_mean2_.size());
1264 :
1265 2225 : if(do_optsigmamean_>0) {
1266 : // remove first entry of the history vector
1267 168 : if(sigma_mean2_last_[iselect][0].size()==optsigmamean_stride_&&optsigmamean_stride_>0)
1268 0 : for(unsigned i=0; i<narg; ++i) sigma_mean2_last_[iselect][i].erase(sigma_mean2_last_[iselect][i].begin());
1269 : /* this is the current estimate of sigma mean for each argument
1270 : there is one of this per argument in any case because it is
1271 : the maximum among these to be used in case of GAUSS/OUTLIER */
1272 84 : vector<double> sigma_mean2_now(narg,0);
1273 84 : if(do_reweight_) {
1274 0 : if(master) {
1275 0 : for(unsigned i=0; i<narg; ++i) {
1276 0 : double tmp1 = (fact*getCalcData(i)-ave_fact*mean[i])*(fact*getCalcData(i)-ave_fact*mean[i]);
1277 0 : double tmp2 = -2.*mean[i]*(fact-ave_fact)*(fact*getCalcData(i)-ave_fact*mean[i]);
1278 0 : sigma_mean2_now[i] = tmp1 + tmp2;
1279 : }
1280 0 : if(nrep_>1) multi_sim_comm.Sum(&sigma_mean2_now[0], narg);
1281 : }
1282 0 : comm.Sum(&sigma_mean2_now[0], narg);
1283 0 : for(unsigned i=0; i<narg; ++i) sigma_mean2_now[i] = dnrep/(dnrep-1.)*(sigma_mean2_now[i] + mean[i]*mean[i]*var_fact);
1284 : } else {
1285 84 : if(master) {
1286 1302 : for(unsigned i=0; i<narg; ++i) {
1287 630 : double tmp = getCalcData(i)-mean[i];
1288 1260 : sigma_mean2_now[i] = fact*tmp*tmp;
1289 : }
1290 84 : if(nrep_>1) multi_sim_comm.Sum(&sigma_mean2_now[0], narg);
1291 : }
1292 168 : comm.Sum(&sigma_mean2_now[0], narg);
1293 2604 : for(unsigned i=0; i<narg; ++i) sigma_mean2_now[i] /= dnrep;
1294 : }
1295 :
1296 : // add sigma_mean2 to history
1297 84 : if(optsigmamean_stride_>0) {
1298 0 : for(unsigned i=0; i<narg; ++i) sigma_mean2_last_[iselect][i].push_back(sigma_mean2_now[i]);
1299 : } else {
1300 3864 : for(unsigned i=0; i<narg; ++i) if(sigma_mean2_now[i] > sigma_mean2_last_[iselect][i][0]) sigma_mean2_last_[iselect][i][0] = sigma_mean2_now[i];
1301 : }
1302 :
1303 84 : if(noise_type_==MGAUSS||noise_type_==MOUTLIERS||noise_type_==GENERIC) {
1304 2604 : for(unsigned i=0; i<narg; ++i) {
1305 : /* set to the maximum in history vector */
1306 2520 : sigma_mean2_tmp[i] = *max_element(sigma_mean2_last_[iselect][i].begin(), sigma_mean2_last_[iselect][i].end());
1307 : /* the standard error of the mean */
1308 2520 : valueSigmaMean[i]->set(sqrt(sigma_mean2_tmp[i]));
1309 1260 : if(noise_type_==GENERIC) {
1310 0 : sigma_min_[i] = sqrt(sigma_mean2_tmp[i]);
1311 0 : if(sigma_[i] < sigma_min_[i]) sigma_[i] = sigma_min_[i];
1312 : }
1313 : }
1314 0 : } else if(noise_type_==GAUSS||noise_type_==OUTLIERS) {
1315 : // find maximum for each data point
1316 : vector <double> max_values;
1317 0 : for(unsigned i=0; i<narg; ++i) max_values.push_back(*max_element(sigma_mean2_last_[iselect][i].begin(), sigma_mean2_last_[iselect][i].end()));
1318 : // find maximum across data points
1319 0 : const double max_now = *max_element(max_values.begin(), max_values.end());
1320 : // set new value
1321 0 : sigma_mean2_tmp[0] = max_now;
1322 0 : valueSigmaMean[0]->set(sqrt(sigma_mean2_tmp[0]));
1323 : }
1324 : // endif sigma optimization
1325 : } else {
1326 2141 : if(noise_type_==MGAUSS||noise_type_==MOUTLIERS||noise_type_==GENERIC) {
1327 21312 : for(unsigned i=0; i<narg; ++i) {
1328 19448 : sigma_mean2_tmp[i] = sigma_mean2_last_[iselect][i][0];
1329 19448 : valueSigmaMean[i]->set(sqrt(sigma_mean2_tmp[i]));
1330 : }
1331 277 : } else if(noise_type_==GAUSS||noise_type_==OUTLIERS) {
1332 554 : sigma_mean2_tmp[0] = sigma_mean2_last_[iselect][0][0];
1333 554 : valueSigmaMean[0]->set(sqrt(sigma_mean2_tmp[0]));
1334 : }
1335 : }
1336 :
1337 2225 : sigma_mean2_ = sigma_mean2_tmp;
1338 2225 : }
1339 :
1340 2225 : void MetainferenceBase::replica_averaging(const double fact, vector<double> &mean, vector<double> &dmean_b)
1341 : {
1342 2225 : if(master) {
1343 19962 : for(unsigned i=0; i<narg; ++i) mean[i] = fact*calc_data_[i];
1344 2249 : if(nrep_>1) multi_sim_comm.Sum(&mean[0], narg);
1345 : }
1346 4450 : comm.Sum(&mean[0], narg);
1347 : // set the derivative of the mean with respect to the bias
1348 49533 : for(unsigned i=0; i<narg; ++i) dmean_b[i] = fact/kbt_*(calc_data_[i]-mean[i])*decay_w_;
1349 :
1350 : // this is only for generic metainference
1351 2225 : if(firstTime) {ftilde_ = mean; firstTime = false;}
1352 2225 : }
1353 :
1354 0 : void MetainferenceBase::do_regression_zero(const vector<double> &mean)
1355 : {
1356 : // parameters[i] = scale_ * mean[i]: find scale_ with linear regression
1357 : double num = 0.0;
1358 : double den = 0.0;
1359 0 : for(unsigned i=0; i<parameters.size(); ++i) {
1360 0 : num += mean[i] * parameters[i];
1361 0 : den += mean[i] * mean[i];
1362 : }
1363 0 : if(den>0) {
1364 0 : scale_ = num / den;
1365 : } else {
1366 0 : scale_ = 1.0;
1367 : }
1368 0 : }
1369 :
1370 2225 : double MetainferenceBase::getScore()
1371 : {
1372 : /* Metainference */
1373 : /* 1) collect weights */
1374 2225 : double fact = 0.;
1375 2225 : double var_fact = 0.;
1376 2225 : get_weights(fact, var_fact);
1377 :
1378 : /* 2) calculate average */
1379 4450 : vector<double> mean(getNarg(),0);
1380 : // this is the derivative of the mean with respect to the argument
1381 2225 : vector<double> dmean_x(getNarg(),fact);
1382 : // this is the derivative of the mean with respect to the bias
1383 4450 : vector<double> dmean_b(getNarg(),0);
1384 : // calculate it
1385 2225 : replica_averaging(fact, mean, dmean_b);
1386 :
1387 : /* 3) calculates parameters */
1388 2225 : get_sigma_mean(fact, var_fact, mean);
1389 :
1390 : // in case of regression with zero intercept, calculate scale
1391 2225 : if(doregres_zero_ && getStep()%nregres_zero_==0) do_regression_zero(mean);
1392 :
1393 : /* 4) run monte carlo */
1394 2225 : doMonteCarlo(mean);
1395 :
1396 : // calculate bias and forces
1397 : double ene = 0;
1398 2225 : switch(noise_type_) {
1399 271 : case GAUSS:
1400 271 : ene = getEnergyForceGJ(mean, dmean_x, dmean_b);
1401 : break;
1402 160 : case MGAUSS:
1403 160 : ene = getEnergyForceGJE(mean, dmean_x, dmean_b);
1404 : break;
1405 6 : case OUTLIERS:
1406 6 : ene = getEnergyForceSP(mean, dmean_x, dmean_b);
1407 : break;
1408 1776 : case MOUTLIERS:
1409 1776 : ene = getEnergyForceSPE(mean, dmean_x, dmean_b);
1410 : break;
1411 12 : case GENERIC:
1412 12 : ene = getEnergyForceMIGEN(mean, dmean_x, dmean_b);
1413 : break;
1414 : }
1415 :
1416 2225 : return ene;
1417 : }
1418 :
1419 86 : void MetainferenceBase::writeStatus()
1420 : {
1421 86 : if(!doscore_) return;
1422 31 : sfile_.rewind();
1423 62 : sfile_.printField("time",getTimeStep()*getStep());
1424 : //nsel
1425 155 : for(unsigned i=0; i<sigma_mean2_last_.size(); i++) {
1426 : std::string msg_i,msg_j;
1427 31 : Tools::convert(i,msg_i);
1428 : vector <double> max_values;
1429 : //narg
1430 5025 : for(unsigned j=0; j<narg; ++j) {
1431 2497 : Tools::convert(j,msg_j);
1432 4994 : std::string msg = msg_i+"_"+msg_j;
1433 2497 : if(noise_type_==MGAUSS||noise_type_==MOUTLIERS||noise_type_==GENERIC) {
1434 7410 : sfile_.printField("sigmaMean_"+msg,sqrt(*max_element(sigma_mean2_last_[i][j].begin(), sigma_mean2_last_[i][j].end())));
1435 : } else {
1436 : // find maximum for each data point
1437 54 : max_values.push_back(*max_element(sigma_mean2_last_[i][j].begin(), sigma_mean2_last_[i][j].end()));
1438 : }
1439 : }
1440 31 : if(noise_type_==GAUSS||noise_type_==OUTLIERS) {
1441 : // find maximum across data points
1442 8 : const double max_now = sqrt(*max_element(max_values.begin(), max_values.end()));
1443 8 : Tools::convert(0,msg_j);
1444 16 : std::string msg = msg_i+"_"+msg_j;
1445 16 : sfile_.printField("sigmaMean_"+msg, max_now);
1446 : }
1447 : }
1448 7496 : for(unsigned i=0; i<sigma_.size(); ++i) {
1449 : std::string msg;
1450 2478 : Tools::convert(i,msg);
1451 4956 : sfile_.printField("sigma_"+msg,sigma_[i]);
1452 : }
1453 31 : if(noise_type_==GENERIC) {
1454 8 : for(unsigned i=0; i<ftilde_.size(); ++i) {
1455 : std::string msg;
1456 2 : Tools::convert(i,msg);
1457 4 : sfile_.printField("ftilde_"+msg,ftilde_[i]);
1458 : }
1459 : }
1460 62 : sfile_.printField("scale0_",scale_);
1461 62 : sfile_.printField("offset0_",offset_);
1462 155 : for(unsigned i=0; i<average_weights_.size(); i++) {
1463 : std::string msg_i;
1464 31 : Tools::convert(i,msg_i);
1465 93 : sfile_.printField("weight_"+msg_i,average_weights_[i][replica_]);
1466 : }
1467 31 : sfile_.printField();
1468 31 : sfile_.flush();
1469 : }
1470 :
1471 : }
1472 5517 : }
1473 :
|