LCOV - code coverage report
Current view: top level - isdb - MetainferenceBase.cpp (source / functions) Hit Total Coverage
Test: plumed test coverage Lines: 696 819 85.0 %
Date: 2020-11-18 11:20:57 Functions: 32 36 88.9 %

          Line data    Source code
       1             : /* +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
       2             :    Copyright (c) 2017-2019 The plumed team
       3             :    (see the PEOPLE file at the root of the distribution for a list of names)
       4             : 
       5             :    See http://www.plumed.org for more information.
       6             : 
       7             :    This file is part of plumed, version 2.
       8             : 
       9             :    plumed is free software: you can redistribute it and/or modify
      10             :    it under the terms of the GNU Lesser General Public License as published by
      11             :    the Free Software Foundation, either version 3 of the License, or
      12             :    (at your option) any later version.
      13             : 
      14             :    plumed is distributed in the hope that it will be useful,
      15             :    but WITHOUT ANY WARRANTY; without even the implied warranty of
      16             :    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
      17             :    GNU Lesser General Public License for more details.
      18             : 
      19             :    You should have received a copy of the GNU Lesser General Public License
      20             :    along with plumed.  If not, see <http://www.gnu.org/licenses/>.
      21             : +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ */
      22             : #include "MetainferenceBase.h"
      23             : #include "tools/File.h"
      24             : #include <cmath>
      25             : #include <ctime>
      26             : #include <numeric>
      27             : 
      28             : using namespace std;
      29             : 
      30             : #ifndef M_PI
      31             : #define M_PI           3.14159265358979323846
      32             : #endif
      33             : 
      34             : namespace PLMD {
      35             : namespace isdb {
      36             : 
      37          77 : void MetainferenceBase::registerKeywords( Keywords& keys ) {
      38          77 :   Action::registerKeywords(keys);
      39          77 :   ActionAtomistic::registerKeywords(keys);
      40          77 :   ActionWithValue::registerKeywords(keys);
      41          77 :   ActionWithArguments::registerKeywords(keys);
      42          77 :   componentsAreNotOptional(keys);
      43         154 :   keys.use("ARG");
      44         231 :   keys.addFlag("DOSCORE",false,"activate metainference");
      45         231 :   keys.addFlag("NOENSEMBLE",false,"don't perform any replica-averaging");
      46         231 :   keys.addFlag("REWEIGHT",false,"simple REWEIGHT using the ARG as energy");
      47         308 :   keys.add("optional","AVERAGING", "Stride for calculation of averaged weights and sigma_mean");
      48         385 :   keys.add("compulsory","NOISETYPE","MGAUSS","functional form of the noise (GAUSS,MGAUSS,OUTLIERS,MOUTLIERS,GENERIC)");
      49         385 :   keys.add("compulsory","LIKELIHOOD","GAUSS","the likelihood for the GENERIC metainference model, GAUSS or LOGN");
      50         385 :   keys.add("compulsory","DFTILDE","0.1","fraction of sigma_mean used to evolve ftilde");
      51         231 :   keys.addFlag("SCALEDATA",false,"Set to TRUE if you want to sample a scaling factor common to all values and replicas");
      52         385 :   keys.add("compulsory","SCALE0","1.0","initial value of the scaling factor");
      53         385 :   keys.add("compulsory","SCALE_PRIOR","FLAT","either FLAT or GAUSSIAN");
      54         308 :   keys.add("optional","SCALE_MIN","minimum value of the scaling factor");
      55         308 :   keys.add("optional","SCALE_MAX","maximum value of the scaling factor");
      56         308 :   keys.add("optional","DSCALE","maximum MC move of the scaling factor");
      57         231 :   keys.addFlag("ADDOFFSET",false,"Set to TRUE if you want to sample an offset common to all values and replicas");
      58         385 :   keys.add("compulsory","OFFSET0","0.0","initial value of the offset");
      59         385 :   keys.add("compulsory","OFFSET_PRIOR","FLAT","either FLAT or GAUSSIAN");
      60         308 :   keys.add("optional","OFFSET_MIN","minimum value of the offset");
      61         308 :   keys.add("optional","OFFSET_MAX","maximum value of the offset");
      62         308 :   keys.add("optional","DOFFSET","maximum MC move of the offset");
      63         385 :   keys.add("compulsory","SIGMA0","1.0","initial value of the uncertainty parameter");
      64         385 :   keys.add("compulsory","SIGMA_MIN","0.0","minimum value of the uncertainty parameter");
      65         385 :   keys.add("compulsory","SIGMA_MAX","10.","maximum value of the uncertainty parameter");
      66         308 :   keys.add("optional","DSIGMA","maximum MC move of the uncertainty parameter");
      67         385 :   keys.add("compulsory","OPTSIGMAMEAN","NONE","Set to NONE/SEM to manually set sigma mean, or to estimate it on the fly");
      68         308 :   keys.add("optional","SIGMA_MEAN0","starting value for the uncertainty in the mean estimate");
      69         308 :   keys.add("optional","TEMP","the system temperature - this is only needed if code doesnt' pass the temperature to plumed");
      70         308 :   keys.add("optional","MC_STEPS","number of MC steps");
      71         308 :   keys.add("optional","MC_STRIDE","MC stride");
      72         308 :   keys.add("optional","MC_CHUNKSIZE","MC chunksize");
      73         308 :   keys.add("optional","STATUS_FILE","write a file with all the data usefull for restart/continuation of Metainference");
      74         385 :   keys.add("compulsory","WRITE_STRIDE","1000","write the status to a file every N steps, this can be used for restart/continuation");
      75         308 :   keys.add("optional","SELECTOR","name of selector");
      76         308 :   keys.add("optional","NSELECT","range of values for selector [0, N-1]");
      77         154 :   keys.use("RESTART");
      78          77 :   useCustomisableComponents(keys);
      79         308 :   keys.addOutputComponent("sigma",        "default",      "uncertainty parameter");
      80         308 :   keys.addOutputComponent("sigmaMean",    "default",      "uncertainty in the mean estimate");
      81         308 :   keys.addOutputComponent("acceptSigma",  "default",      "MC acceptance");
      82         308 :   keys.addOutputComponent("acceptScale",  "SCALEDATA",    "MC acceptance");
      83         308 :   keys.addOutputComponent("weight",       "REWEIGHT",     "weights of the weighted average");
      84         308 :   keys.addOutputComponent("biasDer",      "REWEIGHT",     "derivatives wrt the bias");
      85         308 :   keys.addOutputComponent("scale",        "SCALEDATA",    "scale parameter");
      86         308 :   keys.addOutputComponent("offset",       "ADDOFFSET",    "offset parameter");
      87         308 :   keys.addOutputComponent("ftilde",       "GENERIC",      "ensemble average estimator");
      88          77 : }
      89             : 
      90          70 : MetainferenceBase::MetainferenceBase(const ActionOptions&ao):
      91             :   Action(ao),
      92             :   ActionAtomistic(ao),
      93             :   ActionWithArguments(ao),
      94             :   ActionWithValue(ao),
      95             :   doscore_(false),
      96             :   write_stride_(0),
      97             :   narg(0),
      98             :   doscale_(false),
      99             :   scale_(1.),
     100             :   scale_mu_(0),
     101             :   scale_min_(1),
     102             :   scale_max_(-1),
     103             :   Dscale_(-1),
     104             :   dooffset_(false),
     105             :   offset_(0.),
     106             :   offset_mu_(0),
     107             :   offset_min_(1),
     108             :   offset_max_(-1),
     109             :   Doffset_(-1),
     110             :   Dftilde_(0.1),
     111             :   random(3),
     112             :   MCsteps_(1),
     113             :   MCstride_(1),
     114             :   MCaccept_(0),
     115             :   MCacceptScale_(0),
     116             :   MCacceptFT_(0),
     117             :   MCtrial_(0),
     118             :   MCchunksize_(0),
     119             :   firstTime(true),
     120             :   do_reweight_(false),
     121             :   do_optsigmamean_(0),
     122             :   nsel_(1),
     123             :   iselect(0),
     124             :   optsigmamean_stride_(0),
     125         910 :   decay_w_(1.)
     126             : {
     127         140 :   parseFlag("DOSCORE", doscore_);
     128             : 
     129          70 :   bool noensemble = false;
     130         140 :   parseFlag("NOENSEMBLE", noensemble);
     131             : 
     132             :   // set up replica stuff
     133          70 :   master = (comm.Get_rank()==0);
     134          70 :   if(master) {
     135          47 :     nrep_    = multi_sim_comm.Get_size();
     136          47 :     replica_ = multi_sim_comm.Get_rank();
     137          47 :     if(noensemble) nrep_ = 1;
     138             :   } else {
     139          23 :     nrep_    = 0;
     140          23 :     replica_ = 0;
     141             :   }
     142          70 :   comm.Sum(&nrep_,1);
     143          70 :   comm.Sum(&replica_,1);
     144             : 
     145         140 :   parse("SELECTOR", selector_);
     146         140 :   parse("NSELECT", nsel_);
     147             :   // do checks
     148          70 :   if(selector_.length()>0 && nsel_<=1) error("With SELECTOR active, NSELECT must be greater than 1");
     149          70 :   if(selector_.length()==0 && nsel_>1) error("With NSELECT greater than 1, you must specify SELECTOR");
     150             : 
     151             :   // initialise firstTimeW
     152          70 :   firstTimeW.resize(nsel_, true);
     153             : 
     154             :   // reweight implies a different number of arguments (the latest one must always be the bias)
     155         140 :   parseFlag("REWEIGHT", do_reweight_);
     156          86 :   if(do_reweight_&&getNumberOfArguments()!=1) error("To REWEIGHT one must provide a bias as an argument");
     157          70 :   if(do_reweight_&&nrep_<2) error("REWEIGHT can only be used in parallel with 2 or more replicas");
     158         210 :   if(!getRestart()) average_weights_.resize(nsel_, vector<double> (nrep_, 1./static_cast<double>(nrep_)));
     159           0 :   else average_weights_.resize(nsel_, vector<double> (nrep_, 0.));
     160             : 
     161          70 :   unsigned averaging=0;
     162         140 :   parse("AVERAGING", averaging);
     163          70 :   if(averaging>0) {
     164           0 :     decay_w_ = 1./static_cast<double> (averaging);
     165           0 :     optsigmamean_stride_    = averaging;
     166             :   }
     167             : 
     168             :   string stringa_noise;
     169         140 :   parse("NOISETYPE",stringa_noise);
     170          70 :   if(stringa_noise=="GAUSS")           noise_type_ = GAUSS;
     171          67 :   else if(stringa_noise=="MGAUSS")     noise_type_ = MGAUSS;
     172          10 :   else if(stringa_noise=="OUTLIERS")   noise_type_ = OUTLIERS;
     173           9 :   else if(stringa_noise=="MOUTLIERS")  noise_type_ = MOUTLIERS;
     174           1 :   else if(stringa_noise=="GENERIC")    noise_type_ = GENERIC;
     175           0 :   else error("Unknown noise type!");
     176             : 
     177          70 :   if(noise_type_== GENERIC) {
     178             :     string stringa_like;
     179           2 :     parse("LIKELIHOOD",stringa_like);
     180           1 :     if(stringa_like=="GAUSS") gen_likelihood_ = LIKE_GAUSS;
     181           0 :     else if(stringa_like=="LOGN") gen_likelihood_ = LIKE_LOGN;
     182           0 :     else error("Unknown likelihood type!");
     183             : 
     184           2 :     parse("DFTILDE",Dftilde_);
     185             :   }
     186             : 
     187         140 :   parse("WRITE_STRIDE",write_stride_);
     188         140 :   parse("STATUS_FILE",status_file_name_);
     189         210 :   if(status_file_name_=="") status_file_name_ = "MISTATUS"+getLabel();
     190           0 :   else                      status_file_name_ = status_file_name_+getLabel();
     191             : 
     192             :   string stringa_optsigma;
     193         140 :   parse("OPTSIGMAMEAN", stringa_optsigma);
     194          70 :   if(stringa_optsigma=="NONE")      do_optsigmamean_=0;
     195           4 :   else if(stringa_optsigma=="SEM")  do_optsigmamean_=1;
     196             : 
     197             :   vector<double> read_sigma_mean_;
     198         140 :   parseVector("SIGMA_MEAN0",read_sigma_mean_);
     199         181 :   if(!do_optsigmamean_ && read_sigma_mean_.size()==0 && !getRestart() && doscore_)
     200           0 :     error("If you don't use OPTSIGMAMEAN and you are not RESTARTING then you MUST SET SIGMA_MEAN0");
     201             : 
     202          70 :   if(noise_type_==MGAUSS||noise_type_==MOUTLIERS||noise_type_==GENERIC) {
     203          66 :     if(read_sigma_mean_.size()>0) {
     204          21 :       sigma_mean2_.resize(read_sigma_mean_.size());
     205         126 :       for(unsigned i=0; i<read_sigma_mean_.size(); i++) sigma_mean2_[i]=read_sigma_mean_[i]*read_sigma_mean_[i];
     206             :     } else {
     207          45 :       sigma_mean2_.resize(1,0.000001);
     208             :     }
     209             :   } else {
     210           4 :     if(read_sigma_mean_.size()==1) {
     211           4 :       sigma_mean2_.resize(1, read_sigma_mean_[0]*read_sigma_mean_[0]);
     212           0 :     } else if(read_sigma_mean_.size()==0) {
     213           0 :       sigma_mean2_.resize(1, 0.000001);
     214             :     } else {
     215           0 :       error("If you want to use more than one SIGMA_MEAN0 you should use NOISETYPE=MGAUSS|MOUTLIERS");
     216             :     }
     217             :   }
     218             : 
     219         140 :   parseFlag("SCALEDATA", doscale_);
     220          70 :   if(doscale_) {
     221             :     string stringa_noise;
     222          24 :     parse("SCALE_PRIOR",stringa_noise);
     223          12 :     if(stringa_noise=="GAUSSIAN")  scale_prior_ = SC_GAUSS;
     224          12 :     else if(stringa_noise=="FLAT") scale_prior_ = SC_FLAT;
     225           0 :     else error("Unknown SCALE_PRIOR type!");
     226          24 :     parse("SCALE0",scale_);
     227          24 :     parse("DSCALE",Dscale_);
     228          12 :     if(Dscale_<0.) error("DSCALE must be set when using SCALEDATA");
     229          12 :     if(scale_prior_==SC_GAUSS) {
     230           0 :       scale_mu_=scale_;
     231             :     } else {
     232          24 :       parse("SCALE_MIN",scale_min_);
     233          24 :       parse("SCALE_MAX",scale_max_);
     234          12 :       if(scale_max_<scale_min_) error("SCALE_MAX and SCALE_MIN must be set when using SCALE_PRIOR=FLAT");
     235             :     }
     236             :   }
     237             : 
     238         140 :   parseFlag("ADDOFFSET", dooffset_);
     239          70 :   if(dooffset_) {
     240             :     string stringa_noise;
     241           4 :     parse("OFFSET_PRIOR",stringa_noise);
     242           2 :     if(stringa_noise=="GAUSSIAN")  offset_prior_ = SC_GAUSS;
     243           2 :     else if(stringa_noise=="FLAT") offset_prior_ = SC_FLAT;
     244           0 :     else error("Unknown OFFSET_PRIOR type!");
     245           4 :     parse("OFFSET0",offset_);
     246           4 :     parse("DOFFSET",Doffset_);
     247           2 :     if(offset_prior_==SC_GAUSS) {
     248           0 :       offset_mu_=offset_;
     249           0 :       if(Doffset_<0.) error("DOFFSET must be set when using OFFSET_PRIOR=GAUSS");
     250             :     } else {
     251           4 :       parse("OFFSET_MIN",offset_min_);
     252           4 :       parse("OFFSET_MAX",offset_max_);
     253           2 :       if(Doffset_<0) Doffset_ = 0.05*(offset_max_ - offset_min_);
     254           2 :       if(offset_max_<offset_min_) error("OFFSET_MAX and OFFSET_MIN must be set when using OFFSET_PRIOR=FLAT");
     255             :     }
     256             :   }
     257             : 
     258             :   vector<double> readsigma;
     259         140 :   parseVector("SIGMA0",readsigma);
     260          74 :   if((noise_type_!=MGAUSS&&noise_type_!=MOUTLIERS&&noise_type_!=GENERIC)&&readsigma.size()>1)
     261           0 :     error("If you want to use more than one SIGMA you should use NOISETYPE=MGAUSS|MOUTLIERS|GENERIC");
     262          70 :   if(noise_type_==MGAUSS||noise_type_==MOUTLIERS||noise_type_==GENERIC) {
     263          66 :     sigma_.resize(readsigma.size());
     264          66 :     sigma_=readsigma;
     265           4 :   } else sigma_.resize(1, readsigma[0]);
     266             : 
     267             :   double read_smin_;
     268         140 :   parse("SIGMA_MIN",read_smin_);
     269          70 :   sigma_min_.resize(1,read_smin_);
     270             :   double read_smax_;
     271         140 :   parse("SIGMA_MAX",read_smax_);
     272          70 :   sigma_max_.resize(1,read_smax_);
     273          70 :   double read_dsigma_=-1.;
     274         140 :   parse("DSIGMA",read_dsigma_);
     275          70 :   if(read_dsigma_<0) read_dsigma_ = 0.05*(read_smax_ - read_smin_);
     276          70 :   Dsigma_.resize(1,read_dsigma_);
     277             : 
     278             :   // monte carlo stuff
     279         140 :   parse("MC_STEPS",MCsteps_);
     280         140 :   parse("MC_STRIDE",MCstride_);
     281         140 :   parse("MC_CHUNKSIZE", MCchunksize_);
     282             :   // get temperature
     283          70 :   double temp=0.0;
     284         140 :   parse("TEMP",temp);
     285          95 :   if(temp>0.0) kbt_=plumed.getAtoms().getKBoltzmann()*temp;
     286          90 :   else kbt_=plumed.getAtoms().getKbT();
     287          70 :   if(kbt_==0.0&&doscore_) error("Unless the MD engine passes the temperature to plumed, you must specify it using TEMP");
     288             : 
     289             :   // initialize random seed
     290             :   unsigned iseed;
     291          70 :   if(master) iseed = time(NULL)+replica_;
     292          23 :   else iseed = 0;
     293          70 :   comm.Sum(&iseed, 1);
     294          70 :   random[0].setSeed(-iseed);
     295             :   // Random chunk
     296          70 :   if(master) iseed = time(NULL)+replica_;
     297          23 :   else iseed = 0;
     298          70 :   comm.Sum(&iseed, 1);
     299          70 :   random[2].setSeed(-iseed);
     300          70 :   if(doscale_||dooffset_) {
     301             :     // in this case we want the same seed everywhere
     302          14 :     iseed = time(NULL);
     303          14 :     if(master&&nrep_>1) multi_sim_comm.Bcast(iseed,0);
     304          14 :     comm.Bcast(iseed,0);
     305          14 :     random[1].setSeed(-iseed);
     306             :   }
     307             : 
     308             :   // outfile stuff
     309          70 :   if(write_stride_>0&&doscore_) {
     310          25 :     sfile_.link(*this);
     311          25 :     sfile_.open(status_file_name_);
     312             :   }
     313             : 
     314          70 : }
     315             : 
     316         420 : MetainferenceBase::~MetainferenceBase()
     317             : {
     318          70 :   if(sfile_.isOpen()) sfile_.close();
     319          70 : }
     320             : 
     321          25 : void MetainferenceBase::Initialise(const unsigned input)
     322             : {
     323             :   setNarg(input);
     324          50 :   if(narg!=parameters.size()) {
     325           0 :     std::string num1; Tools::convert(parameters.size(),num1);
     326           0 :     std::string num2; Tools::convert(narg,num2);
     327           0 :     std::string msg = "The number of experimental values " + num1 +" must be the same of the calculated values " + num2;
     328           0 :     error(msg);
     329             :   }
     330             : 
     331             :   // resize vector for sigma_mean history
     332          25 :   sigma_mean2_last_.resize(nsel_);
     333          75 :   for(unsigned i=0; i<nsel_; i++) sigma_mean2_last_[i].resize(narg);
     334          25 :   if(noise_type_==MGAUSS||noise_type_==MOUTLIERS||noise_type_==GENERIC) {
     335          21 :     if(sigma_mean2_.size()==1) {
     336          21 :       double tmp = sigma_mean2_[0];
     337          21 :       sigma_mean2_.resize(narg, tmp);
     338           0 :     } else if(sigma_mean2_.size()>1&&sigma_mean2_.size()!=narg) {
     339           0 :       error("SIGMA_MEAN0 can accept either one single value or as many values as the number of arguments (with NOISETYPE=MGAUSS|MOUTLIERS|GENERIC)");
     340             :     }
     341             :     // set the initial value for the history
     342        2508 :     for(unsigned i=0; i<nsel_; i++) for(unsigned j=0; j<narg; j++) sigma_mean2_last_[i][j].push_back(sigma_mean2_[j]);
     343             :   } else {
     344             :     // set the initial value for the history
     345          23 :     for(unsigned i=0; i<nsel_; i++) for(unsigned j=0; j<narg; j++) sigma_mean2_last_[i][j].push_back(sigma_mean2_[0]);
     346             :   }
     347             : 
     348             :   // set sigma_bias
     349          25 :   if(noise_type_==MGAUSS||noise_type_==MOUTLIERS||noise_type_==GENERIC) {
     350          21 :     if(sigma_.size()==1) {
     351          21 :       double tmp = sigma_[0];
     352          21 :       sigma_.resize(narg, tmp);
     353           0 :     } else if(sigma_.size()>1&&sigma_.size()!=narg) {
     354           0 :       error("SIGMA0 can accept either one single value or as many values as the number of arguments (with NOISETYPE=MGAUSS|MOUTLIERS|GENERIC)");
     355             :     }
     356             :   }
     357             : 
     358          25 :   double tmp_min = sigma_min_[0];
     359          50 :   sigma_min_.resize(sigma_.size(),tmp_min);
     360          25 :   double tmp_max = sigma_max_[0];
     361          50 :   sigma_max_.resize(sigma_.size(),tmp_max);
     362          25 :   double tmp_ds = Dsigma_[0];
     363          50 :   Dsigma_.resize(sigma_.size(),tmp_ds);
     364             : 
     365          50 :   IFile restart_sfile;
     366          25 :   restart_sfile.link(*this);
     367          50 :   if(getRestart()&&restart_sfile.FileExist(status_file_name_)) {
     368           0 :     firstTime = false;
     369           0 :     for(unsigned i=0; i<nsel_; i++) firstTimeW[i] = false;
     370           0 :     restart_sfile.open(status_file_name_);
     371           0 :     log.printf("  Restarting from %s\n", status_file_name_.c_str());
     372             :     double dummy;
     373           0 :     if(restart_sfile.scanField("time",dummy)) {
     374             :       // nsel
     375           0 :       for(unsigned i=0; i<sigma_mean2_last_.size(); i++) {
     376             :         std::string msg_i;
     377           0 :         Tools::convert(i,msg_i);
     378             :         // narg
     379           0 :         if(noise_type_==MGAUSS||noise_type_==MOUTLIERS||noise_type_==GENERIC) {
     380           0 :           for(unsigned j=0; j<narg; ++j) {
     381             :             std::string msg_j;
     382           0 :             Tools::convert(j,msg_j);
     383           0 :             std::string msg = msg_i+"_"+msg_j;
     384             :             double read_sm;
     385           0 :             restart_sfile.scanField("sigma_mean_"+msg,read_sm);
     386           0 :             sigma_mean2_last_[i][j][0] = read_sm*read_sm;
     387             :           }
     388             :         }
     389           0 :         if(noise_type_==GAUSS||noise_type_==OUTLIERS) {
     390             :           double read_sm;
     391           0 :           restart_sfile.scanField("sigma_mean_0_"+msg_i,read_sm);
     392           0 :           for(unsigned j=0; j<narg; j++) sigma_mean2_last_[i][j][0] = read_sm*read_sm;
     393             :         }
     394             :       }
     395             : 
     396           0 :       for(unsigned i=0; i<sigma_.size(); ++i) {
     397             :         std::string msg;
     398           0 :         Tools::convert(i,msg);
     399           0 :         restart_sfile.scanField("sigma_"+msg,sigma_[i]);
     400             :       }
     401           0 :       if(noise_type_==GENERIC) {
     402           0 :         for(unsigned i=0; i<ftilde_.size(); ++i) {
     403             :           std::string msg;
     404           0 :           Tools::convert(i,msg);
     405           0 :           restart_sfile.scanField("ftilde_"+msg,ftilde_[i]);
     406             :         }
     407             :       }
     408           0 :       restart_sfile.scanField("scale0_",scale_);
     409           0 :       restart_sfile.scanField("offset0_",offset_);
     410             : 
     411           0 :       for(unsigned i=0; i<nsel_; i++) {
     412             :         std::string msg;
     413           0 :         Tools::convert(i,msg);
     414             :         double tmp_w;
     415           0 :         restart_sfile.scanField("weight_"+msg,tmp_w);
     416           0 :         if(master) {
     417           0 :           average_weights_[i][replica_] = tmp_w;
     418           0 :           if(nrep_>1) multi_sim_comm.Sum(&average_weights_[i][0], nrep_);
     419             :         }
     420           0 :         comm.Sum(&average_weights_[i][0], nrep_);
     421             :       }
     422             : 
     423             :     }
     424           0 :     restart_sfile.scanField();
     425           0 :     restart_sfile.close();
     426             :   }
     427             : 
     428          50 :   addComponentWithDerivatives("score");
     429          50 :   componentIsNotPeriodic("score");
     430          50 :   valueScore=getPntrToComponent("score");
     431             : 
     432          25 :   if(do_reweight_) {
     433          32 :     addComponent("biasDer");
     434          32 :     componentIsNotPeriodic("biasDer");
     435          32 :     addComponent("weight");
     436          32 :     componentIsNotPeriodic("weight");
     437             :   }
     438             : 
     439          25 :   if(doscale_) {
     440          24 :     addComponent("scale");
     441          24 :     componentIsNotPeriodic("scale");
     442          24 :     valueScale=getPntrToComponent("scale");
     443             :   }
     444             : 
     445          25 :   if(dooffset_) {
     446           4 :     addComponent("offset");
     447           4 :     componentIsNotPeriodic("offset");
     448           4 :     valueOffset=getPntrToComponent("offset");
     449             :   }
     450             : 
     451          25 :   if(dooffset_||doscale_) {
     452          28 :     addComponent("acceptScale");
     453          28 :     componentIsNotPeriodic("acceptScale");
     454          28 :     valueAcceptScale=getPntrToComponent("acceptScale");
     455             :   }
     456             : 
     457          25 :   if(noise_type_==GENERIC) {
     458           2 :     addComponent("acceptFT");
     459           2 :     componentIsNotPeriodic("acceptFT");
     460           2 :     valueAcceptFT=getPntrToComponent("acceptFT");
     461             :   }
     462             : 
     463          50 :   addComponent("acceptSigma");
     464          50 :   componentIsNotPeriodic("acceptSigma");
     465          50 :   valueAccept=getPntrToComponent("acceptSigma");
     466             : 
     467          25 :   if(noise_type_==MGAUSS||noise_type_==MOUTLIERS||noise_type_==GENERIC) {
     468        7440 :     for(unsigned i=0; i<sigma_mean2_.size(); ++i) {
     469        2466 :       std::string num; Tools::convert(i,num);
     470        7398 :       addComponent("sigmaMean_"+num); componentIsNotPeriodic("sigmaMean_"+num);
     471        7398 :       valueSigmaMean.push_back(getPntrToComponent("sigmaMean_"+num));
     472        7398 :       getPntrToComponent("sigmaMean_"+num)->set(sqrt(sigma_mean2_[i]));
     473        7398 :       addComponent("sigma_"+num); componentIsNotPeriodic("sigma_"+num);
     474        7398 :       valueSigma.push_back(getPntrToComponent("sigma_"+num));
     475        7398 :       getPntrToComponent("sigma_"+num)->set(sigma_[i]);
     476        2466 :       if(noise_type_==GENERIC) {
     477           6 :         addComponent("ftilde_"+num); componentIsNotPeriodic("ftilde_"+num);
     478           6 :         valueFtilde.push_back(getPntrToComponent("ftilde_"+num));
     479             :       }
     480             :     }
     481             :   } else {
     482          12 :     addComponent("sigmaMean"); componentIsNotPeriodic("sigmaMean");
     483          12 :     valueSigmaMean.push_back(getPntrToComponent("sigmaMean"));
     484          12 :     getPntrToComponent("sigmaMean")->set(sqrt(sigma_mean2_[0]));
     485          12 :     addComponent("sigma"); componentIsNotPeriodic("sigma");
     486          12 :     valueSigma.push_back(getPntrToComponent("sigma"));
     487          12 :     getPntrToComponent("sigma")->set(sigma_[0]);
     488             :   }
     489             : 
     490          25 :   switch(noise_type_) {
     491           1 :   case GENERIC:
     492           1 :     log.printf("  with general metainference ");
     493           1 :     if(gen_likelihood_==LIKE_GAUSS) log.printf(" and a gaussian likelihood\n");
     494           0 :     else if(gen_likelihood_==LIKE_LOGN) log.printf(" and a log-normal likelihood\n");
     495           1 :     log.printf("  ensemble average parameter sampled with a step %lf of sigma_mean\n", Dftilde_);
     496             :     break;
     497           3 :   case GAUSS:
     498           3 :     log.printf("  with gaussian noise and a single noise parameter for all the data\n");
     499             :     break;
     500          12 :   case MGAUSS:
     501          12 :     log.printf("  with gaussian noise and a noise parameter for each data point\n");
     502             :     break;
     503           1 :   case OUTLIERS:
     504           1 :     log.printf("  with long tailed gaussian noise and a single noise parameter for all the data\n");
     505             :     break;
     506           8 :   case MOUTLIERS:
     507           8 :     log.printf("  with long tailed gaussian noise and a noise parameter for each data point\n");
     508             :     break;
     509             :   }
     510             : 
     511          25 :   if(doscale_) {
     512          12 :     log.printf("  sampling a common scaling factor with:\n");
     513          12 :     log.printf("    initial scale parameter %f\n",scale_);
     514          12 :     if(scale_prior_==SC_GAUSS) {
     515           0 :       log.printf("    gaussian prior with mean %f and width %f\n",scale_mu_,Dscale_);
     516             :     }
     517          12 :     if(scale_prior_==SC_FLAT) {
     518          12 :       log.printf("    flat prior between %f - %f\n",scale_min_,scale_max_);
     519          12 :       log.printf("    maximum MC move of scale parameter %f\n",Dscale_);
     520             :     }
     521             :   }
     522             : 
     523          25 :   if(dooffset_) {
     524           2 :     log.printf("  sampling a common offset with:\n");
     525           2 :     log.printf("    initial offset parameter %f\n",offset_);
     526           2 :     if(offset_prior_==SC_GAUSS) {
     527           0 :       log.printf("    gaussian prior with mean %f and width %f\n",offset_mu_,Doffset_);
     528             :     }
     529           2 :     if(offset_prior_==SC_FLAT) {
     530           2 :       log.printf("    flat prior between %f - %f\n",offset_min_,offset_max_);
     531           2 :       log.printf("    maximum MC move of offset parameter %f\n",Doffset_);
     532             :     }
     533             :   }
     534             : 
     535          25 :   log.printf("  number of experimental data points %u\n",narg);
     536          25 :   log.printf("  number of replicas %u\n",nrep_);
     537          25 :   log.printf("  initial data uncertainties");
     538        7460 :   for(unsigned i=0; i<sigma_.size(); ++i) log.printf(" %f", sigma_[i]);
     539          25 :   log.printf("\n");
     540          50 :   log.printf("  minimum data uncertainty %f\n",sigma_min_[0]);
     541          50 :   log.printf("  maximum data uncertainty %f\n",sigma_max_[0]);
     542          50 :   log.printf("  maximum MC move of data uncertainty %f\n",Dsigma_[0]);
     543          25 :   log.printf("  temperature of the system %f\n",kbt_);
     544          25 :   log.printf("  MC steps %u\n",MCsteps_);
     545          25 :   log.printf("  MC stride %u\n",MCstride_);
     546          25 :   log.printf("  initial standard errors of the mean");
     547        7460 :   for(unsigned i=0; i<sigma_mean2_.size(); ++i) log.printf(" %f", sqrt(sigma_mean2_[i]));
     548          25 :   log.printf("\n");
     549             : 
     550             :   //resize the number of metainference derivatives and the number of back-calculated data
     551          25 :   metader_.resize(narg, 0.);
     552          25 :   calc_data_.resize(narg, 0.);
     553             : 
     554          75 :   log<<"  Bibliography "<<plumed.cite("Bonomi, Camilloni, Cavalli, Vendruscolo, Sci. Adv. 2, e150117 (2016)");
     555          57 :   if(do_reweight_) log<<plumed.cite("Bonomi, Camilloni, Vendruscolo, Sci. Rep. 6, 31232 (2016)");
     556          33 :   if(do_optsigmamean_>0) log<<plumed.cite("Loehr, Jussupow, Camilloni, J. Chem. Phys. 146, 165102 (2017)");
     557          75 :   log<<plumed.cite("Bonomi, Camilloni, Bioinformatics, 33, 3999 (2017)");
     558          25 :   log<<"\n";
     559          25 : }
     560             : 
     561           0 : void MetainferenceBase::Selector()
     562             : {
     563           0 :   iselect = 0;
     564             :   // set the value of selector for  REM-like stuff
     565           0 :   if(selector_.length()>0) iselect = static_cast<unsigned>(plumed.passMap[selector_]);
     566           0 : }
     567             : 
     568          12 : double MetainferenceBase::getEnergySP(const vector<double> &mean, const vector<double> &sigma,
     569             :                                       const double scale, const double offset)
     570             : {
     571          12 :   const double scale2 = scale*scale;
     572          12 :   const double sm2    = sigma_mean2_[0];
     573          12 :   const double ss2    = sigma[0]*sigma[0] + scale2*sm2;
     574          12 :   const double sss    = sigma[0]*sigma[0] + sm2;
     575             : 
     576             :   double ene = 0.0;
     577          36 :   #pragma omp parallel num_threads(OpenMP::getNumThreads()) shared(ene)
     578             :   {
     579          24 :     #pragma omp for reduction( + : ene)
     580          24 :     for(unsigned i=0; i<narg; ++i) {
     581         252 :       const double dev = scale*mean[i]-parameters[i]+offset;
     582          84 :       const double a2 = 0.5*dev*dev + ss2;
     583          84 :       ene += std::log(2.0*a2/(1.0-exp(-a2/sm2)));
     584             :     }
     585             :   }
     586             :   // add one single Jeffrey's prior and one normalisation per data point
     587          12 :   ene += 0.5*std::log(sss) + static_cast<double>(narg)*0.5*std::log(0.5*M_PI*M_PI/ss2);
     588          12 :   if(doscale_)  ene += 0.5*std::log(sss);
     589          12 :   if(dooffset_) ene += 0.5*std::log(sss);
     590          12 :   return kbt_ * ene;
     591             : }
     592             : 
     593        5328 : double MetainferenceBase::getEnergySPE(const vector<double> &mean, const vector<double> &sigma,
     594             :                                        const double scale, const double offset)
     595             : {
     596        5328 :   const double scale2 = scale*scale;
     597             :   double ene = 0.0;
     598       15984 :   #pragma omp parallel num_threads(OpenMP::getNumThreads()) shared(ene)
     599             :   {
     600       10656 :     #pragma omp for reduction( + : ene)
     601       10656 :     for(unsigned i=0; i<narg; ++i) {
     602       42624 :       const double sm2 = sigma_mean2_[i];
     603       42624 :       const double ss2 = sigma[i]*sigma[i] + scale2*sm2;
     604       21312 :       const double sss = sigma[i]*sigma[i] + sm2;
     605       63936 :       const double dev = scale*mean[i]-parameters[i]+offset;
     606       21312 :       const double a2  = 0.5*dev*dev + ss2;
     607       21312 :       ene += 0.5*std::log(sss) + 0.5*std::log(0.5*M_PI*M_PI/ss2) + std::log(2.0*a2/(1.0-exp(-a2/sm2)));
     608       42624 :       if(doscale_)  ene += 0.5*std::log(sss);
     609       21312 :       if(dooffset_) ene += 0.5*std::log(sss);
     610             :     }
     611             :   }
     612        5328 :   return kbt_ * ene;
     613             : }
     614             : 
     615          48 : double MetainferenceBase::getEnergyMIGEN(const vector<double> &mean, const vector<double> &ftilde, const vector<double> &sigma,
     616             :     const double scale, const double offset)
     617             : {
     618             :   double ene = 0.0;
     619         144 :   #pragma omp parallel num_threads(OpenMP::getNumThreads()) shared(ene)
     620             :   {
     621          96 :     #pragma omp for reduction( + : ene)
     622          96 :     for(unsigned i=0; i<narg; ++i) {
     623         192 :       const double inv_sb2  = 1./(sigma[i]*sigma[i]);
     624          96 :       const double inv_sm2  = 1./sigma_mean2_[i];
     625             :       double devb = 0;
     626         384 :       if(gen_likelihood_==LIKE_GAUSS)     devb = scale*ftilde[i]-parameters[i]+offset;
     627           0 :       else if(gen_likelihood_==LIKE_LOGN) devb = std::log(scale*ftilde[i]/parameters[i]);
     628         288 :       double devm = mean[i] - ftilde[i];
     629             :       // deviation + normalisation + jeffrey
     630             :       double normb = 0.;
     631         192 :       if(gen_likelihood_==LIKE_GAUSS)     normb = -0.5*std::log(0.5/M_PI*inv_sb2);
     632           0 :       else if(gen_likelihood_==LIKE_LOGN) normb = -0.5*std::log(0.5/M_PI*inv_sb2/(parameters[i]*parameters[i]));
     633          96 :       const double normm         = -0.5*std::log(0.5/M_PI*inv_sm2);
     634          96 :       const double jeffreys      = -0.5*std::log(2.*inv_sb2);
     635          96 :       ene += 0.5*devb*devb*inv_sb2 + 0.5*devm*devm*inv_sm2 + normb + normm + jeffreys;
     636          96 :       if(doscale_)  ene += jeffreys;
     637          96 :       if(dooffset_) ene += jeffreys;
     638             :     }
     639             :   }
     640          48 :   return kbt_ * ene;
     641             : }
     642             : 
     643         386 : double MetainferenceBase::getEnergyGJ(const vector<double> &mean, const vector<double> &sigma,
     644             :                                       const double scale, const double offset)
     645             : {
     646         386 :   const double scale2  = scale*scale;
     647         772 :   const double inv_s2  = 1./(sigma[0]*sigma[0] + scale2*sigma_mean2_[0]);
     648         386 :   const double inv_sss = 1./(sigma[0]*sigma[0] + sigma_mean2_[0]);
     649             : 
     650             :   double ene = 0.0;
     651        1158 :   #pragma omp parallel num_threads(OpenMP::getNumThreads()) shared(ene)
     652             :   {
     653         772 :     #pragma omp for reduction( + : ene)
     654         772 :     for(unsigned i=0; i<narg; ++i) {
     655        3366 :       double dev = scale*mean[i]-parameters[i]+offset;
     656        1122 :       ene += 0.5*dev*dev*inv_s2;
     657             :     }
     658             :   }
     659         386 :   const double normalisation = -0.5*std::log(0.5/M_PI*inv_s2);
     660         386 :   const double jeffreys = -0.5*std::log(2.*inv_sss);
     661             :   // add Jeffrey's prior in case one sigma for all data points + one normalisation per datapoint
     662         386 :   ene += jeffreys + static_cast<double>(narg)*normalisation;
     663         386 :   if(doscale_)  ene += jeffreys;
     664         386 :   if(dooffset_) ene += jeffreys;
     665             : 
     666         386 :   return kbt_ * ene;
     667             : }
     668             : 
     669         320 : double MetainferenceBase::getEnergyGJE(const vector<double> &mean, const vector<double> &sigma,
     670             :                                        const double scale, const double offset)
     671             : {
     672         320 :   const double scale2 = scale*scale;
     673             : 
     674             :   double ene = 0.0;
     675         960 :   #pragma omp parallel num_threads(OpenMP::getNumThreads()) shared(ene)
     676             :   {
     677         640 :     #pragma omp for reduction( + : ene)
     678         640 :     for(unsigned i=0; i<narg; ++i) {
     679       23424 :       const double inv_s2  = 1./(sigma[i]*sigma[i] + scale2*sigma_mean2_[i]);
     680        7808 :       const double inv_sss = 1./(sigma[i]*sigma[i] + sigma_mean2_[i]);
     681       23424 :       double dev = scale*mean[i]-parameters[i]+offset;
     682             :       // deviation + normalisation + jeffrey
     683        7808 :       const double normalisation = -0.5*std::log(0.5/M_PI*inv_s2);
     684        7808 :       const double jeffreys      = -0.5*std::log(2.*inv_sss);
     685        7808 :       ene += 0.5*dev*dev*inv_s2 + normalisation + jeffreys;
     686        8384 :       if(doscale_)  ene += jeffreys;
     687        7808 :       if(dooffset_) ene += jeffreys;
     688             :     }
     689             :   }
     690         320 :   return kbt_ * ene;
     691             : }
     692             : 
     693        2117 : void MetainferenceBase::doMonteCarlo(const vector<double> &mean_)
     694             : {
     695        2117 :   if(getStep()%MCstride_!=0||getExchangeStep()) return;
     696             : 
     697             :   // calculate old energy with the updated coordinates
     698             :   double old_energy=0.;
     699             : 
     700        2117 :   switch(noise_type_) {
     701         187 :   case GAUSS:
     702         187 :     old_energy = getEnergyGJ(mean_,sigma_,scale_,offset_);
     703             :     break;
     704         136 :   case MGAUSS:
     705         136 :     old_energy = getEnergyGJE(mean_,sigma_,scale_,offset_);
     706             :     break;
     707           6 :   case OUTLIERS:
     708           6 :     old_energy = getEnergySP(mean_,sigma_,scale_,offset_);
     709             :     break;
     710        1776 :   case MOUTLIERS:
     711        1776 :     old_energy = getEnergySPE(mean_,sigma_,scale_,offset_);
     712             :     break;
     713          12 :   case GENERIC:
     714          12 :     old_energy = getEnergyMIGEN(mean_,ftilde_,sigma_,scale_,offset_);
     715             :     break;
     716             :   }
     717             : 
     718             :   // Create vector of random sigma indices
     719             :   vector<unsigned> indices;
     720        2117 :   if (MCchunksize_ > 0) {
     721           0 :     for (unsigned j=0; j<sigma_.size(); j++) {
     722           0 :       indices.push_back(j);
     723             :     }
     724           0 :     random[2].Shuffle(indices);
     725             :   }
     726             :   bool breaknow = false;
     727             : 
     728             :   // cycle on MC steps
     729        6351 :   for(unsigned i=0; i<MCsteps_; ++i) {
     730             : 
     731        2117 :     MCtrial_++;
     732             : 
     733             :     // propose move for ftilde
     734        2117 :     vector<double> new_ftilde(sigma_.size());
     735        2117 :     new_ftilde = ftilde_;
     736             : 
     737        2117 :     if(noise_type_==GENERIC) {
     738             :       // change all sigmas
     739          96 :       for(unsigned j=0; j<sigma_.size(); j++) {
     740          24 :         const double r3 = random[0].Gaussian();
     741          48 :         const double ds3 = Dftilde_*sqrt(sigma_mean2_[j])*r3;
     742          48 :         new_ftilde[j] = ftilde_[j] + ds3;
     743             :       }
     744             :       // calculate new energy
     745          12 :       double new_energy = getEnergyMIGEN(mean_,new_ftilde,sigma_,scale_,offset_);
     746             : 
     747             :       // accept or reject
     748          12 :       const double delta = ( new_energy - old_energy ) / kbt_;
     749             :       // if delta is negative always accept move
     750          12 :       if( delta <= 0.0 ) {
     751             :         old_energy = new_energy;
     752           0 :         ftilde_ = new_ftilde;
     753           0 :         MCacceptFT_++;
     754             :         // otherwise extract random number
     755             :       } else {
     756          12 :         const double s = random[0].RandU01();
     757          12 :         if( s < exp(-delta) ) {
     758             :           old_energy = new_energy;
     759           0 :           ftilde_ = new_ftilde;
     760           0 :           MCacceptFT_++;
     761             :         }
     762             :       }
     763             :     }
     764             : 
     765             :     // propose move for scale and/or offset
     766        2117 :     double new_scale = scale_;
     767        2117 :     double new_offset = offset_;
     768        2117 :     if(doscale_||dooffset_) {
     769        1848 :       if(doscale_) {
     770        1824 :         if(scale_prior_==SC_FLAT) {
     771        1824 :           const double r1 = random[1].Gaussian();
     772        1824 :           const double ds1 = Dscale_*r1;
     773        1824 :           new_scale += ds1;
     774             :           // check boundaries
     775        1824 :           if(new_scale > scale_max_) {new_scale = 2.0 * scale_max_ - new_scale;}
     776        1824 :           if(new_scale < scale_min_) {new_scale = 2.0 * scale_min_ - new_scale;}
     777             :         } else {
     778           0 :           const double r1 = random[1].Gaussian();
     779           0 :           const double ds1 = 0.5*(scale_mu_-new_scale)+Dscale_*exp(1)/M_PI*r1;
     780           0 :           new_scale += ds1;
     781             :         }
     782             :       }
     783             : 
     784        1848 :       if(dooffset_) {
     785          24 :         if(offset_prior_==SC_FLAT) {
     786          24 :           const double r1 = random[1].Gaussian();
     787          24 :           const double ds1 = Doffset_*r1;
     788          24 :           new_offset += ds1;
     789             :           // check boundaries
     790          24 :           if(new_offset > offset_max_) {new_offset = 2.0 * offset_max_ - new_offset;}
     791          24 :           if(new_offset < offset_min_) {new_offset = 2.0 * offset_min_ - new_offset;}
     792             :         } else {
     793           0 :           const double r1 = random[1].Gaussian();
     794           0 :           const double ds1 = 0.5*(offset_mu_-new_offset)+Doffset_*exp(1)/M_PI*r1;
     795           0 :           new_offset += ds1;
     796             :         }
     797             :       }
     798             : 
     799             :       // calculate new energy
     800             :       double new_energy = 0.;
     801             : 
     802        1848 :       switch(noise_type_) {
     803          12 :       case GAUSS:
     804          12 :         new_energy = getEnergyGJ(mean_,sigma_,new_scale,new_offset);
     805             :         break;
     806          48 :       case MGAUSS:
     807          48 :         new_energy = getEnergyGJE(mean_,sigma_,new_scale,new_offset);
     808             :         break;
     809           0 :       case OUTLIERS:
     810           0 :         new_energy = getEnergySP(mean_,sigma_,new_scale,new_offset);
     811             :         break;
     812        1776 :       case MOUTLIERS:
     813        1776 :         new_energy = getEnergySPE(mean_,sigma_,new_scale,new_offset);
     814             :         break;
     815          12 :       case GENERIC:
     816          12 :         new_energy = getEnergyMIGEN(mean_,ftilde_,sigma_,new_scale,new_offset);
     817             :         break;
     818             :       }
     819             :       // for the scale we need to consider the total energy
     820        1848 :       vector<double> totenergies(2);
     821        1848 :       if(master) {
     822         936 :         totenergies[0] = old_energy;
     823         936 :         totenergies[1] = new_energy;
     824         936 :         if(nrep_>1) multi_sim_comm.Sum(totenergies);
     825             :       } else {
     826         912 :         totenergies[0] = 0;
     827         912 :         totenergies[1] = 0;
     828             :       }
     829        1848 :       comm.Sum(totenergies);
     830             : 
     831             :       // accept or reject
     832        1848 :       const double delta = ( totenergies[1] - totenergies[0] ) / kbt_;
     833             :       // if delta is negative always accept move
     834        1848 :       if( delta <= 0.0 ) {
     835             :         old_energy = new_energy;
     836        1836 :         scale_ = new_scale;
     837        1836 :         offset_ = new_offset;
     838        1836 :         MCacceptScale_++;
     839             :         // otherwise extract random number
     840             :       } else {
     841          12 :         double s = random[1].RandU01();
     842          12 :         if( s < exp(-delta) ) {
     843             :           old_energy = new_energy;
     844           0 :           scale_ = new_scale;
     845           0 :           offset_ = new_offset;
     846           0 :           MCacceptScale_++;
     847             :         }
     848             :       }
     849             :     }
     850             : 
     851             :     // propose move for sigma
     852        2117 :     vector<double> new_sigma(sigma_.size());
     853        2117 :     new_sigma = sigma_;
     854             : 
     855             :     // change MCchunksize_ sigmas
     856        2117 :     if (MCchunksize_ > 0) {
     857           0 :       if ((MCchunksize_ * i) >= sigma_.size()) {
     858             :         // This means we are not moving any sigma, so we should break immediately
     859             :         breaknow = true;
     860             :       }
     861             : 
     862             :       // change random sigmas
     863           0 :       for(unsigned j=0; j<MCchunksize_; j++) {
     864           0 :         const unsigned shuffle_index = j + MCchunksize_ * i;
     865           0 :         if (shuffle_index >= sigma_.size()) {
     866             :           // Going any further will segfault but we should still evaluate the sigmas we changed
     867             :           break;
     868             :         }
     869           0 :         const unsigned index = indices[shuffle_index];
     870           0 :         const double r2 = random[0].Gaussian();
     871           0 :         const double ds2 = Dsigma_[index]*r2;
     872           0 :         new_sigma[index] = sigma_[index] + ds2;
     873             :         // check boundaries
     874           0 :         if(new_sigma[index] > sigma_max_[index]) {new_sigma[index] = 2.0 * sigma_max_[index] - new_sigma[index];}
     875           0 :         if(new_sigma[index] < sigma_min_[index]) {new_sigma[index] = 2.0 * sigma_min_[index] - new_sigma[index];}
     876             :       }
     877             :     } else {
     878             :       // change all sigmas
     879       37621 :       for(unsigned j=0; j<sigma_.size(); j++) {
     880       11129 :         const double r2 = random[0].Gaussian();
     881       11129 :         const double ds2 = Dsigma_[j]*r2;
     882       22258 :         new_sigma[j] = sigma_[j] + ds2;
     883             :         // check boundaries
     884       22258 :         if(new_sigma[j] > sigma_max_[j]) {new_sigma[j] = 2.0 * sigma_max_[j] - new_sigma[j];}
     885       22258 :         if(new_sigma[j] < sigma_min_[j]) {new_sigma[j] = 2.0 * sigma_min_[j] - new_sigma[j];}
     886             :       }
     887             :     }
     888             : 
     889        2117 :     if (breaknow) {
     890             :       // We didnt move any sigmas, so no sense in evaluating anything
     891             :       break;
     892             :     }
     893             : 
     894             :     // calculate new energy
     895             :     double new_energy=0.;
     896        2117 :     switch(noise_type_) {
     897         187 :     case GAUSS:
     898         187 :       new_energy = getEnergyGJ(mean_,new_sigma,scale_,offset_);
     899             :       break;
     900         136 :     case MGAUSS:
     901         136 :       new_energy = getEnergyGJE(mean_,new_sigma,scale_,offset_);
     902             :       break;
     903           6 :     case OUTLIERS:
     904           6 :       new_energy = getEnergySP(mean_,new_sigma,scale_,offset_);
     905             :       break;
     906        1776 :     case MOUTLIERS:
     907        1776 :       new_energy = getEnergySPE(mean_,new_sigma,scale_,offset_);
     908             :       break;
     909          12 :     case GENERIC:
     910          12 :       new_energy = getEnergyMIGEN(mean_,ftilde_,new_sigma,scale_,offset_);
     911             :       break;
     912             :     }
     913             : 
     914             :     // accept or reject
     915        2117 :     const double delta = ( new_energy - old_energy ) / kbt_;
     916             :     // if delta is negative always accept move
     917        2117 :     if( delta <= 0.0 ) {
     918             :       old_energy = new_energy;
     919        2105 :       sigma_ = new_sigma;
     920        2105 :       MCaccept_++;
     921             :       // otherwise extract random number
     922             :     } else {
     923          12 :       const double s = random[0].RandU01();
     924          12 :       if( s < exp(-delta) ) {
     925             :         old_energy = new_energy;
     926           0 :         sigma_ = new_sigma;
     927           0 :         MCaccept_++;
     928             :       }
     929             :     }
     930             : 
     931             :   }
     932             :   /* save the result of the sampling */
     933        2117 :   double accept = static_cast<double>(MCaccept_) / static_cast<double>(MCtrial_);
     934        2117 :   valueAccept->set(accept);
     935        2117 :   if(doscale_) valueScale->set(scale_);
     936        2117 :   if(dooffset_) valueOffset->set(offset_);
     937        2117 :   if(doscale_||dooffset_) {
     938        1848 :     accept = static_cast<double>(MCacceptScale_) / static_cast<double>(MCtrial_);
     939        1848 :     valueAcceptScale->set(accept);
     940             :   }
     941       59879 :   for(unsigned i=0; i<sigma_.size(); i++) valueSigma[i]->set(sigma_[i]);
     942        2117 :   if(noise_type_==GENERIC) {
     943          12 :     accept = static_cast<double>(MCacceptFT_) / static_cast<double>(MCtrial_);
     944          12 :     valueAcceptFT->set(accept);
     945         144 :     for(unsigned i=0; i<sigma_.size(); i++) valueFtilde[i]->set(ftilde_[i]);
     946             :   }
     947             : }
     948             : 
     949             : /*
     950             :    In the following energy-force functions we don't add the normalisation and the jeffreys priors
     951             :    because they are not needed for the forces, the correct MetaInference energy is the one calculated
     952             :    in the Monte-Carlo
     953             : */
     954             : 
     955           6 : double MetainferenceBase::getEnergyForceSP(const vector<double> &mean, const vector<double> &dmean_x,
     956             :     const vector<double> &dmean_b)
     957             : {
     958           6 :   const double scale2 = scale_*scale_;
     959           6 :   const double sm2    = sigma_mean2_[0];
     960           6 :   const double ss2    = sigma_[0]*sigma_[0] + scale2*sm2;
     961           6 :   vector<double> f(narg+1,0);
     962             : 
     963           6 :   if(master) {
     964             :     double omp_ene=0.;
     965          18 :     #pragma omp parallel num_threads(OpenMP::getNumThreads()) shared(omp_ene)
     966             :     {
     967          12 :       #pragma omp for reduction( + : omp_ene)
     968          12 :       for(unsigned i=0; i<narg; ++i) {
     969         126 :         const double dev = scale_*mean[i]-parameters[i]+offset_;
     970          42 :         const double a2 = 0.5*dev*dev + ss2;
     971          42 :         const double t = exp(-a2/sm2);
     972          42 :         const double dt = 1./t;
     973          42 :         const double it = 1./(1.-t);
     974          42 :         const double dit = 1./(1.-dt);
     975          42 :         omp_ene += std::log(2.*a2*it);
     976          84 :         f[i] = -scale_*dev*(dit/sm2 + 1./a2);
     977             :       }
     978             :     }
     979          12 :     f[narg] = omp_ene;
     980             :     // collect contribution to forces and energy from other replicas
     981           6 :     if(nrep_>1) multi_sim_comm.Sum(&f[0],narg+1);
     982             :   }
     983             :   // intra-replica summation
     984          12 :   comm.Sum(&f[0],narg+1);
     985             : 
     986          12 :   const double ene = f[narg];
     987             :   double w_tmp = 0.;
     988          90 :   for(unsigned i=0; i<narg; ++i) {
     989         126 :     setMetaDer(i, -kbt_*f[i]*dmean_x[i]);
     990         126 :     w_tmp += kbt_*f[i]*dmean_b[i];
     991             :   }
     992             : 
     993           6 :   if(do_reweight_) {
     994           0 :     setArgDerivatives(valueScore, -w_tmp);
     995           0 :     getPntrToComponent("biasDer")->set(-w_tmp);
     996             :   }
     997             : 
     998          12 :   return kbt_*ene;
     999             : }
    1000             : 
    1001        1776 : double MetainferenceBase::getEnergyForceSPE(const vector<double> &mean, const vector<double> &dmean_x,
    1002             :     const vector<double> &dmean_b)
    1003             : {
    1004        1776 :   const double scale2 = scale_*scale_;
    1005        1776 :   vector<double> f(narg+1,0);
    1006             : 
    1007        1776 :   if(master) {
    1008             :     double omp_ene = 0;
    1009        2664 :     #pragma omp parallel num_threads(OpenMP::getNumThreads()) shared(omp_ene)
    1010             :     {
    1011        1776 :       #pragma omp for reduction( + : omp_ene)
    1012        1776 :       for(unsigned i=0; i<narg; ++i) {
    1013        7104 :         const double sm2 = sigma_mean2_[i];
    1014        3552 :         const double ss2 = sigma_[i]*sigma_[i] + scale2*sm2;
    1015       10656 :         const double dev = scale_*mean[i]-parameters[i]+offset_;
    1016        3552 :         const double a2  = 0.5*dev*dev + ss2;
    1017        3552 :         const double t   = exp(-a2/sm2);
    1018        3552 :         const double dt  = 1./t;
    1019        3552 :         const double it  = 1./(1.-t);
    1020        3552 :         const double dit = 1./(1.-dt);
    1021        3552 :         omp_ene += std::log(2.*a2*it);
    1022        7104 :         f[i] = -scale_*dev*(dit/sm2 + 1./a2);
    1023             :       }
    1024             :     }
    1025        1776 :     f[narg] = omp_ene;
    1026             :     // collect contribution to forces and energy from other replicas
    1027        1776 :     if(nrep_>1) multi_sim_comm.Sum(&f[0],narg+1);
    1028             :   }
    1029        3552 :   comm.Sum(&f[0],narg+1);
    1030             : 
    1031        3552 :   const double ene = f[narg];
    1032             :   double w_tmp = 0.;
    1033       15984 :   for(unsigned i=0; i<narg; ++i) {
    1034       21312 :     setMetaDer(i, -kbt_ * dmean_x[i] * f[i]);
    1035       21312 :     w_tmp += kbt_ * dmean_b[i] *f[i];
    1036             :   }
    1037             : 
    1038        1776 :   if(do_reweight_) {
    1039        1776 :     setArgDerivatives(valueScore, -w_tmp);
    1040        3552 :     getPntrToComponent("biasDer")->set(-w_tmp);
    1041             :   }
    1042             : 
    1043        3552 :   return kbt_*ene;
    1044             : }
    1045             : 
    1046         187 : double MetainferenceBase::getEnergyForceGJ(const vector<double> &mean, const vector<double> &dmean_x,
    1047             :     const vector<double> &dmean_b)
    1048             : {
    1049         187 :   const double scale2 = scale_*scale_;
    1050         187 :   double inv_s2=0.;
    1051             : 
    1052         187 :   if(master) {
    1053         374 :     inv_s2 = 1./(sigma_[0]*sigma_[0] + scale2*sigma_mean2_[0]);
    1054         187 :     if(nrep_>1) multi_sim_comm.Sum(inv_s2);
    1055             :   }
    1056         187 :   comm.Sum(inv_s2);
    1057             : 
    1058             :   double ene   = 0.;
    1059         187 :   double w_tmp = 0.;
    1060         561 :   #pragma omp parallel num_threads(OpenMP::getNumThreads()) shared(ene,w_tmp)
    1061             :   {
    1062         374 :     #pragma omp for reduction( + : ene,w_tmp)
    1063         374 :     for(unsigned i=0; i<narg; ++i) {
    1064        1647 :       const double dev = scale_*mean[i]-parameters[i]+offset_;
    1065         549 :       const double mult = dev*scale_*inv_s2;
    1066         549 :       ene += 0.5*dev*dev*inv_s2;
    1067        1098 :       setMetaDer(i, kbt_*dmean_x[i]*mult);
    1068        1098 :       w_tmp += kbt_*dmean_b[i]*mult;
    1069             :     }
    1070             :   }
    1071             : 
    1072         187 :   if(do_reweight_) {
    1073           0 :     setArgDerivatives(valueScore, w_tmp);
    1074           0 :     getPntrToComponent("biasDer")->set(w_tmp);
    1075             :   }
    1076             : 
    1077         187 :   return kbt_*ene;
    1078             : }
    1079             : 
    1080         136 : double MetainferenceBase::getEnergyForceGJE(const vector<double> &mean, const vector<double> &dmean_x,
    1081             :     const vector<double> &dmean_b)
    1082             : {
    1083         136 :   const double scale2 = scale_*scale_;
    1084         272 :   vector<double> inv_s2(sigma_.size(),0.);
    1085             : 
    1086         136 :   if(master) {
    1087        9656 :     for(unsigned i=0; i<sigma_.size(); ++i) inv_s2[i] = 1./(sigma_[i]*sigma_[i] + scale2*sigma_mean2_[i]);
    1088         136 :     if(nrep_>1) multi_sim_comm.Sum(&inv_s2[0],sigma_.size());
    1089             :   }
    1090         408 :   comm.Sum(&inv_s2[0],sigma_.size());
    1091             : 
    1092             :   double ene   = 0.;
    1093         136 :   double w_tmp = 0.;
    1094         408 :   #pragma omp parallel num_threads(OpenMP::getNumThreads()) shared(ene,w_tmp)
    1095             :   {
    1096         272 :     #pragma omp for reduction( + : ene,w_tmp)
    1097         272 :     for(unsigned i=0; i<narg; ++i) {
    1098       11424 :       const double dev  = scale_*mean[i]-parameters[i]+offset_;
    1099        7616 :       const double mult = dev*scale_*inv_s2[i];
    1100        3808 :       ene += 0.5*dev*dev*inv_s2[i];
    1101        7616 :       setMetaDer(i, kbt_*dmean_x[i]*mult);
    1102        7616 :       w_tmp += kbt_*dmean_b[i]*mult;
    1103             :     }
    1104             :   }
    1105             : 
    1106         136 :   if(do_reweight_) {
    1107          52 :     setArgDerivatives(valueScore, w_tmp);
    1108         104 :     getPntrToComponent("biasDer")->set(w_tmp);
    1109             :   }
    1110             : 
    1111         272 :   return kbt_*ene;
    1112             : }
    1113             : 
    1114          12 : double MetainferenceBase::getEnergyForceMIGEN(const vector<double> &mean, const vector<double> &dmean_x, const vector<double> &dmean_b)
    1115             : {
    1116          24 :   vector<double> inv_s2(sigma_.size(),0.);
    1117          24 :   vector<double> dev(sigma_.size(),0.);
    1118          24 :   vector<double> dev2(sigma_.size(),0.);
    1119             : 
    1120          96 :   for(unsigned i=0; i<sigma_.size(); ++i) {
    1121          48 :     inv_s2[i]   = 1./sigma_mean2_[i];
    1122          24 :     if(master) {
    1123          72 :       dev[i]  = (mean[i]-ftilde_[i]);
    1124          48 :       dev2[i] = dev[i]*dev[i];
    1125             :     }
    1126             :   }
    1127          12 :   if(master&&nrep_>1) {
    1128           0 :     multi_sim_comm.Sum(&dev[0],dev.size());
    1129           0 :     multi_sim_comm.Sum(&dev2[0],dev2.size());
    1130             :   }
    1131          24 :   comm.Sum(&dev[0],dev.size());
    1132          24 :   comm.Sum(&dev2[0],dev2.size());
    1133             : 
    1134          12 :   double dene_b = 0.;
    1135             :   double ene    = 0.;
    1136          36 :   #pragma omp parallel num_threads(OpenMP::getNumThreads()) shared(ene,dene_b)
    1137             :   {
    1138          24 :     #pragma omp for reduction( + : ene,dene_b)
    1139          24 :     for(unsigned i=0; i<narg; ++i) {
    1140          96 :       const double dene_x  = kbt_*inv_s2[i]*dmean_x[i]*dev[i];
    1141          48 :       dene_b += kbt_*inv_s2[i]*dmean_b[i]*dev[i];
    1142          48 :       ene += 0.5*dev2[i]*inv_s2[i];
    1143             :       setMetaDer(i, dene_x);
    1144             :     }
    1145             :   }
    1146             : 
    1147          12 :   if(do_reweight_) {
    1148           0 :     setArgDerivatives(valueScore, dene_b);
    1149           0 :     getPntrToComponent("biasDer")->set(dene_b);
    1150             :   }
    1151             : 
    1152          24 :   return kbt_*ene;
    1153             : }
    1154             : 
    1155        2117 : void MetainferenceBase::get_weights(double &fact, double &var_fact)
    1156             : {
    1157        2117 :   const double dnrep    = static_cast<double>(nrep_);
    1158        2117 :   const double ave_fact = 1.0/dnrep;
    1159             : 
    1160             :   double norm = 0.0;
    1161             : 
    1162             :   // calculate the weights either from BIAS
    1163        2117 :   if(do_reweight_) {
    1164        1828 :     vector<double> bias(nrep_,0);
    1165        1828 :     if(master) {
    1166        1828 :       bias[replica_] = getArgument(0);
    1167        1828 :       if(nrep_>1) multi_sim_comm.Sum(&bias[0], nrep_);
    1168             :     }
    1169        3656 :     comm.Sum(&bias[0], nrep_);
    1170             : 
    1171        1828 :     const double maxbias = *(std::max_element(bias.begin(), bias.end()));
    1172        9140 :     for(unsigned i=0; i<nrep_; ++i) {
    1173        7312 :       bias[i] = exp((bias[i]-maxbias)/kbt_);
    1174        3656 :       norm   += bias[i];
    1175             :     }
    1176             : 
    1177             :     // accumulate weights
    1178        3656 :     if(!firstTimeW[iselect]) {
    1179        9060 :       for(unsigned i=0; i<nrep_; ++i) {
    1180       10872 :         const double delta=bias[i]/norm-average_weights_[iselect][i];
    1181        3624 :         average_weights_[iselect][i]+=decay_w_*delta;
    1182             :       }
    1183             :     } else {
    1184             :       firstTimeW[iselect] = false;
    1185          80 :       for(unsigned i=0; i<nrep_; ++i) {
    1186          96 :         average_weights_[iselect][i] = bias[i]/norm;
    1187             :       }
    1188             :     }
    1189             : 
    1190             :     // set average back into bias and set norm to one
    1191        9140 :     for(unsigned i=0; i<nrep_; ++i) bias[i] = average_weights_[iselect][i];
    1192             :     // set local weight, norm and weight variance
    1193        3656 :     fact = bias[replica_];
    1194             :     norm = 1.0;
    1195        9140 :     for(unsigned i=0; i<nrep_; ++i) var_fact += (bias[i]/norm-ave_fact)*(bias[i]/norm-ave_fact);
    1196        3656 :     getPntrToComponent("weight")->set(fact);
    1197             :   } else {
    1198             :     // or arithmetic ones
    1199             :     norm = dnrep;
    1200         289 :     fact = 1.0/norm;
    1201             :   }
    1202        2117 : }
    1203             : 
    1204        2117 : void MetainferenceBase::get_sigma_mean(const double fact, const double var_fact, const vector<double> &mean)
    1205             : {
    1206        2117 :   const double dnrep    = static_cast<double>(nrep_);
    1207        2117 :   const double ave_fact = 1.0/dnrep;
    1208             : 
    1209        2117 :   vector<double> sigma_mean2_tmp(sigma_mean2_.size());
    1210             : 
    1211        2117 :   if(do_optsigmamean_>0) {
    1212             :     // remove first entry of the history vector
    1213         168 :     if(sigma_mean2_last_[iselect][0].size()==optsigmamean_stride_&&optsigmamean_stride_>0)
    1214           0 :       for(unsigned i=0; i<narg; ++i) sigma_mean2_last_[iselect][i].erase(sigma_mean2_last_[iselect][i].begin());
    1215             :     /* this is the current estimate of sigma mean for each argument
    1216             :        there is one of this per argument in any case  because it is
    1217             :        the maximum among these to be used in case of GAUSS/OUTLIER */
    1218          84 :     vector<double> sigma_mean2_now(narg,0);
    1219          84 :     if(do_reweight_) {
    1220           0 :       if(master) {
    1221           0 :         for(unsigned i=0; i<narg; ++i) {
    1222           0 :           double tmp1 = (fact*getCalcData(i)-ave_fact*mean[i])*(fact*getCalcData(i)-ave_fact*mean[i]);
    1223           0 :           double tmp2 = -2.*mean[i]*(fact-ave_fact)*(fact*getCalcData(i)-ave_fact*mean[i]);
    1224           0 :           sigma_mean2_now[i] = tmp1 + tmp2;
    1225             :         }
    1226           0 :         if(nrep_>1) multi_sim_comm.Sum(&sigma_mean2_now[0], narg);
    1227             :       }
    1228           0 :       comm.Sum(&sigma_mean2_now[0], narg);
    1229           0 :       for(unsigned i=0; i<narg; ++i) sigma_mean2_now[i] = dnrep/(dnrep-1.)*(sigma_mean2_now[i] + mean[i]*mean[i]*var_fact);
    1230             :     } else {
    1231          84 :       if(master) {
    1232        1302 :         for(unsigned i=0; i<narg; ++i) {
    1233         630 :           double tmp  = getCalcData(i)-mean[i];
    1234        1260 :           sigma_mean2_now[i] = fact*tmp*tmp;
    1235             :         }
    1236          84 :         if(nrep_>1) multi_sim_comm.Sum(&sigma_mean2_now[0], narg);
    1237             :       }
    1238         168 :       comm.Sum(&sigma_mean2_now[0], narg);
    1239        2604 :       for(unsigned i=0; i<narg; ++i) sigma_mean2_now[i] /= dnrep;
    1240             :     }
    1241             : 
    1242             :     // add sigma_mean2 to history
    1243          84 :     if(optsigmamean_stride_>0) {
    1244           0 :       for(unsigned i=0; i<narg; ++i) sigma_mean2_last_[iselect][i].push_back(sigma_mean2_now[i]);
    1245             :     } else {
    1246        3864 :       for(unsigned i=0; i<narg; ++i) if(sigma_mean2_now[i] > sigma_mean2_last_[iselect][i][0]) sigma_mean2_last_[iselect][i][0] = sigma_mean2_now[i];
    1247             :     }
    1248             : 
    1249          84 :     if(noise_type_==MGAUSS||noise_type_==MOUTLIERS||noise_type_==GENERIC) {
    1250        2604 :       for(unsigned i=0; i<narg; ++i) {
    1251             :         /* set to the maximum in history vector */
    1252        2520 :         sigma_mean2_tmp[i] = *max_element(sigma_mean2_last_[iselect][i].begin(), sigma_mean2_last_[iselect][i].end());
    1253             :         /* the standard error of the mean */
    1254        2520 :         valueSigmaMean[i]->set(sqrt(sigma_mean2_tmp[i]));
    1255        1260 :         if(noise_type_==GENERIC) {
    1256           0 :           sigma_min_[i] = sqrt(sigma_mean2_tmp[i]);
    1257           0 :           if(sigma_[i] < sigma_min_[i]) sigma_[i] = sigma_min_[i];
    1258             :         }
    1259             :       }
    1260           0 :     } else if(noise_type_==GAUSS||noise_type_==OUTLIERS) {
    1261             :       // find maximum for each data point
    1262             :       vector <double> max_values;
    1263           0 :       for(unsigned i=0; i<narg; ++i) max_values.push_back(*max_element(sigma_mean2_last_[iselect][i].begin(), sigma_mean2_last_[iselect][i].end()));
    1264             :       // find maximum across data points
    1265           0 :       const double max_now = *max_element(max_values.begin(), max_values.end());
    1266             :       // set new value
    1267           0 :       sigma_mean2_tmp[0] = max_now;
    1268           0 :       valueSigmaMean[0]->set(sqrt(sigma_mean2_tmp[0]));
    1269             :     }
    1270             :     // endif sigma optimization
    1271             :   } else {
    1272        2033 :     if(noise_type_==MGAUSS||noise_type_==MOUTLIERS||noise_type_==GENERIC) {
    1273       21192 :       for(unsigned i=0; i<narg; ++i) {
    1274       19352 :         sigma_mean2_tmp[i] = sigma_mean2_last_[iselect][i][0];
    1275       19352 :         valueSigmaMean[i]->set(sqrt(sigma_mean2_tmp[i]));
    1276             :       }
    1277         193 :     } else if(noise_type_==GAUSS||noise_type_==OUTLIERS) {
    1278         386 :       sigma_mean2_tmp[0] = sigma_mean2_last_[iselect][0][0];
    1279         386 :       valueSigmaMean[0]->set(sqrt(sigma_mean2_tmp[0]));
    1280             :     }
    1281             :   }
    1282             : 
    1283        2117 :   sigma_mean2_ = sigma_mean2_tmp;
    1284        2117 : }
    1285             : 
    1286        2117 : void MetainferenceBase::replica_averaging(const double fact, vector<double> &mean, vector<double> &dmean_b)
    1287             : {
    1288        2117 :   if(master) {
    1289       19374 :     for(unsigned i=0; i<narg; ++i) mean[i] = fact*calc_data_[i];
    1290        2117 :     if(nrep_>1) multi_sim_comm.Sum(&mean[0], narg);
    1291             :   }
    1292        4234 :   comm.Sum(&mean[0], narg);
    1293             :   // set the derivative of the mean with respect to the bias
    1294       48225 :   for(unsigned i=0; i<narg; ++i) dmean_b[i] = fact/kbt_*(calc_data_[i]-mean[i])*decay_w_;
    1295             : 
    1296             :   // this is only for generic metainference
    1297        2117 :   if(firstTime) {ftilde_ = mean; firstTime = false;}
    1298        2117 : }
    1299             : 
    1300        2117 : double MetainferenceBase::getScore()
    1301             : {
    1302             :   /* Metainference */
    1303             :   /* 1) collect weights */
    1304        2117 :   double fact = 0.;
    1305        2117 :   double var_fact = 0.;
    1306        2117 :   get_weights(fact, var_fact);
    1307             : 
    1308             :   /* 2) calculate average */
    1309        4234 :   vector<double> mean(getNarg(),0);
    1310             :   // this is the derivative of the mean with respect to the argument
    1311        2117 :   vector<double> dmean_x(getNarg(),fact);
    1312             :   // this is the derivative of the mean with respect to the bias
    1313        4234 :   vector<double> dmean_b(getNarg(),0);
    1314             :   // calculate it
    1315        2117 :   replica_averaging(fact, mean, dmean_b);
    1316             : 
    1317             :   /* 3) calculates parameters */
    1318        2117 :   get_sigma_mean(fact, var_fact, mean);
    1319             : 
    1320             :   /* 4) run monte carlo */
    1321        2117 :   doMonteCarlo(mean);
    1322             : 
    1323             :   // calculate bias and forces
    1324             :   double ene = 0;
    1325        2117 :   switch(noise_type_) {
    1326         187 :   case GAUSS:
    1327         187 :     ene = getEnergyForceGJ(mean, dmean_x, dmean_b);
    1328             :     break;
    1329         136 :   case MGAUSS:
    1330         136 :     ene = getEnergyForceGJE(mean, dmean_x, dmean_b);
    1331             :     break;
    1332           6 :   case OUTLIERS:
    1333           6 :     ene = getEnergyForceSP(mean, dmean_x, dmean_b);
    1334             :     break;
    1335        1776 :   case MOUTLIERS:
    1336        1776 :     ene = getEnergyForceSPE(mean, dmean_x, dmean_b);
    1337             :     break;
    1338          12 :   case GENERIC:
    1339          12 :     ene = getEnergyForceMIGEN(mean, dmean_x, dmean_b);
    1340             :     break;
    1341             :   }
    1342             : 
    1343        2117 :   return ene;
    1344             : }
    1345             : 
    1346          70 : void MetainferenceBase::writeStatus()
    1347             : {
    1348          70 :   if(!doscore_) return;
    1349          25 :   sfile_.rewind();
    1350          50 :   sfile_.printField("time",getTimeStep()*getStep());
    1351             :   //nsel
    1352         125 :   for(unsigned i=0; i<sigma_mean2_last_.size(); i++) {
    1353             :     std::string msg_i,msg_j;
    1354          25 :     Tools::convert(i,msg_i);
    1355             :     vector <double> max_values;
    1356             :     //narg
    1357        4987 :     for(unsigned j=0; j<narg; ++j) {
    1358        2481 :       Tools::convert(j,msg_j);
    1359        4962 :       std::string msg = msg_i+"_"+msg_j;
    1360        2481 :       if(noise_type_==MGAUSS||noise_type_==MOUTLIERS||noise_type_==GENERIC) {
    1361        7398 :         sfile_.printField("sigma_mean_"+msg,sqrt(*max_element(sigma_mean2_last_[i][j].begin(), sigma_mean2_last_[i][j].end())));
    1362             :       } else {
    1363             :         // find maximum for each data point
    1364          30 :         max_values.push_back(*max_element(sigma_mean2_last_[i][j].begin(), sigma_mean2_last_[i][j].end()));
    1365             :       }
    1366             :     }
    1367          25 :     if(noise_type_==GAUSS||noise_type_==OUTLIERS) {
    1368             :       // find maximum across data points
    1369           4 :       const double max_now = sqrt(*max_element(max_values.begin(), max_values.end()));
    1370           8 :       sfile_.printField("sigma_mean_0_"+msg_i,max_now);
    1371             :     }
    1372             :   }
    1373        7460 :   for(unsigned i=0; i<sigma_.size(); ++i) {
    1374             :     std::string msg;
    1375        2470 :     Tools::convert(i,msg);
    1376        4940 :     sfile_.printField("sigma_"+msg,sigma_[i]);
    1377             :   }
    1378          25 :   if(noise_type_==GENERIC) {
    1379           8 :     for(unsigned i=0; i<ftilde_.size(); ++i) {
    1380             :       std::string msg;
    1381           2 :       Tools::convert(i,msg);
    1382           4 :       sfile_.printField("ftilde_"+msg,ftilde_[i]);
    1383             :     }
    1384             :   }
    1385          50 :   sfile_.printField("scale0_",scale_);
    1386          50 :   sfile_.printField("offset0_",offset_);
    1387         125 :   for(unsigned i=0; i<average_weights_.size(); i++) {
    1388             :     std::string msg_i;
    1389          25 :     Tools::convert(i,msg_i);
    1390          75 :     sfile_.printField("weight_"+msg_i,average_weights_[i][replica_]);
    1391             :   }
    1392          25 :   sfile_.printField();
    1393          25 :   sfile_.flush();
    1394             : }
    1395             : 
    1396             : }
    1397        4839 : }
    1398             : 

Generated by: LCOV version 1.13