Bayesian Analysis of Neutron Star Mass and Radius Observations
process.cpp
1 #include <iostream>
2 #include <string>
3 
4 // For gsl_sf_erf
5 #include <gsl/gsl_specfunc.h>
6 
7 #include <boost/numeric/ublas/vector.hpp>
8 #include <boost/numeric/ublas/matrix.hpp>
9 
10 #include <o2scl/table.h>
11 #include <o2scl/table3d.h>
12 #include <o2scl/format_float.h>
13 #include <o2scl/vec_stats.h>
14 #include <o2scl/contour.h>
15 #include <o2scl/hist.h>
16 #include <o2scl/hdf_io.h>
17 #include <o2scl/expval.h>
18 #include <o2scl/interp.h>
19 #include <o2scl/vector.h>
20 #include <o2scl/hist.h>
21 #include <o2scl/mcarlo_miser.h>
22 #include <o2scl/fit_linear.h>
23 
24 #ifdef BAMR_READLINE
25 #include <o2scl/cli_readline.h>
26 #else
27 #include <o2scl/cli.h>
28 #endif
29 
30 using namespace std;
31 using namespace o2scl;
32 using namespace o2scl_hdf;
33 
36 
37 namespace bamr {
38 
39  /** \brief Processing MCMC data from bamr
40  */
41  class process {
42 
43  protected:
44 
45  /// \name Command-line parameters
46  //@{
47  /// Verbosity (default 1)
48  int verbose;
49  /// Ignore all lines before start
51  /// Scale for x-axis
52  double xscale;
53  /// Scale for y-axis
54  double yscale;
55  /// If true, use a logarithmic x scale
56  bool logx;
57  /// If true, use a logarithmic y scale
58  bool logy;
59  /// If true, use a logarithmic z scale
60  bool logz;
61  /// If true, plot errors in 1d
62  bool errors;
63  /// Histogram size
65  /// Number of blocks
66  int n_blocks;
67  //@}
68 
69  /// \name Axis limits from 'xlimits' and 'ylimits'
70  //@{
71  /// If true, x limits are set
72  bool xset;
73  /// Lower x value
74  double user_xlow;
75  /// Upper x value
76  double user_xhigh;
77  /// If true, y limits are set
78  bool yset;
79  /// Lower y value
80  double user_ylow;
81  /// Upper y value
82  double user_yhigh;
83  //@}
84 
85  /// \name Const confidence limits
86  //@{
87  /// The one-sigma limit of a normal distribution, 0.6827
88  const double one_sigma;
89  /// The two-sigma limit of a normal distribution, 0.9545
90  const double two_sigma;
91  /// The three-sigma limit of a normal distribution, 0.9973
92  const double three_sigma;
93  //@}
94 
95  /// \name Other class member data
96  //@{
97  /// Constraint to apply to the data
98  std::string constraint;
99  /// Contour levels set in 'contours'
101  /// Formatter for floating point numbers
103  //@}
104 
105 
106  /// \name Commands
107  //@{
108  /** \brief Create a table of autocorrelation data from a specified
109  column
110  */
111  int auto_corr(std::vector<std::string> &sv, bool itive_com) {
112 
113  // Setup histogram size
114  size_t hist_size=(size_t)hist_size_int;
115  if (hist_size_int<=0) {
116  cout << "Histogram size <=0, using 100." << endl;
117  hist_size=100;
118  }
119 
120  // column name
121  string name=sv[1];
122 
123  // (output file is sv[2])
124 
125  // Parse file list
126  vector<string> files;
127  for(size_t i=3;i<sv.size();i++) files.push_back(sv[i]);
128  size_t nf=files.size();
129 
130  // Read data
131 
132  vector<double> values;
133 
134  for(size_t i=0;i<nf;i++) {
135  hdf_file hf;
136 
137  // Open file
138  cout << "Opening file: " << files[i] << endl;
139  hf.open(files[i]);
140 
141  // Get number of chains
142  size_t n_chains;
143  hf.get_szt("n_chains",n_chains);
144  cout << n_chains << " separate chains." << endl;
145 
146  size_t line_counter=0;
147 
148  // Read each chain
149  for(size_t j=0;j<n_chains;j++) {
150 
151  // Read table
152  std::string tab_name="markov_chain"+szttos(j);
153  table_units<> tab;
154  hdf_input(hf,tab,tab_name);
155  cout << "Read table " << tab_name << endl;
156 
157  // Parse table into values and weights
158  for(size_t k=0;k<tab.get_nlines();k++) {
159  if (((int)line_counter)>=line_start) {
160  values.push_back(tab.get(name,k));
161  }
162  line_counter++;
163  }
164  // Go to next chain
165  }
166  // Go to next file
167  }
168 
169  // Store the autocorrelation data
170  size_t kmax=values.size()/3;
171  vector<double> kvec(kmax-1), acvec(kmax-1), acerr(kmax-1), acfit(kmax-1);
172  for(size_t k=1;k<kmax;k++) {
173  kvec[k-1]=((double)k);
174  acvec[k-1]=vector_lagk_autocorr(values.size(),values,k);
175  acerr[k-1]=fabs(acvec[k-1])/10.0;
176  acfit[k-1]=0.0;
177  }
178 
179  // Put the autocorrelation data into a table
180  table<> tab;
181  tab.line_of_names("k ac err fit");
182  tab.inc_maxlines(kvec.size());
183  tab.set_nlines(kvec.size());
184  tab.swap_column_data("k",kvec);
185  tab.swap_column_data("ac",acvec);
186  tab.swap_column_data("err",acerr);
187  tab.swap_column_data("fit",acfit);
188 
189  // Create the output file
190  hdf_file hf;
191  hf.open_or_create(sv[2]);
192  hdf_output(hf,tab,"ac");
193  hf.close();
194 
195  return 0;
196  }
197 
198  /** \brief Set limits for the x-axis
199  */
200  int xlimits(std::vector<std::string> &sv, bool itive_com) {
201 
202  if (sv.size()<3) {
203  if (verbose>0) {
204  cout << "Setting 'xset' to false." << endl;
205  }
206  xset=false;
207  return 0;
208  }
209 
210  if (sv.size()==3) {
211  user_xlow=o2scl::function_to_double(sv[1]);
212  user_xhigh=o2scl::function_to_double(sv[2]);
213  xset=true;
214  if (verbose>0) {
215  cout << "X limits are " << user_xlow << " and "
216  << user_xhigh << " ." << endl;
217  }
218  return 0;
219  }
220 
221  string file=sv[1];
222  string low_name=sv[2];
223  string high_name=sv[3];
224 
225  hdf_file hf;
226  if (verbose>1) {
227  cout << "Reading file named '" << file << "'." << endl;
228  }
229  hf.open(file);
230  xset=true;
231  hf.getd(low_name,user_xlow);
232  hf.getd(high_name,user_xhigh);
233  hf.close();
234 
235  if (verbose>0) {
236  cout << "X limits are " << user_xlow << " and "
237  << user_xhigh << " ." << endl;
238  }
239 
240  return 0;
241  }
242 
243  /** \brief Set limits for the y-axis
244  */
245  int ylimits(std::vector<std::string> &sv, bool itive_com) {
246 
247  if (sv.size()<3) {
248  if (verbose>0) {
249  cout << "Setting 'yset' to false." << endl;
250  }
251  yset=false;
252  return 0;
253  }
254 
255  if (sv.size()==3) {
256  user_ylow=o2scl::function_to_double(sv[1]);
257  user_yhigh=o2scl::function_to_double(sv[2]);
258  yset=true;
259  if (verbose>0) {
260  cout << "Y limits are " << user_ylow << " and "
261  << user_yhigh << " ." << endl;
262  }
263  return 0;
264  }
265 
266  string file=sv[1];
267  string low_name=sv[2];
268  string high_name=sv[3];
269 
270  hdf_file hf;
271  if (verbose>1) {
272  cout << "Reading file named '" << file << "'." << endl;
273  }
274  hf.open(file);
275  yset=true;
276  hf.getd(low_name,user_ylow);
277  hf.getd(high_name,user_yhigh);
278  hf.close();
279 
280  if (verbose>0) {
281  cout << "Y limits are " << user_ylow << " and "
282  << user_yhigh << " ." << endl;
283  }
284 
285  return 0;
286  }
287 
288  /** \brief Create a histogram from a specified column
289 
290  \future This function isn't that efficient. It first reads all
291  of the data into one long vector and then reparses the long
292  vector into a set of \ref o2scl::expval_scalar objects. The
293  averages and errors for the \ref o2scl::expval_scalar objects
294  are stored into a table, and the table is written to the
295  output file. This could be improved.
296  */
297  int hist(std::vector<std::string> &sv, bool itive_com) {
298 
299  // Setup histogram size
300  size_t hist_size=(size_t)hist_size_int;
301  if (hist_size_int<=0) {
302  cout << "Histogram size <=0, using 100." << endl;
303  hist_size=100;
304  }
305 
306  // column name
307  string name=sv[1];
308 
309  // (output file is sv[2])
310 
311  // Form list of data files
312  vector<string> files;
313  for(size_t i=3;i<sv.size();i++) files.push_back(sv[i]);
314  size_t nf=files.size();
315 
316  // Storage for all of the values and weights
317  vector<double> values, weights;
318 
319  // ------------------------------------------------------------
320  // Read all of the data files in order
321 
322  for(size_t i=0;i<nf;i++) {
323  hdf_file hf;
324 
325  // Open file
326  if (verbose>0) cout << "Opening file: " << files[i] << endl;
327  hf.open(files[i]);
328 
329  // Get number of chains
330  size_t n_chains;
331  hf.get_szt_def("n_chains",1,n_chains);
332  if (verbose>0) {
333  if (n_chains>1) {
334  cout << n_chains << " separate chains." << endl;
335  } else {
336  cout << "1 chain." << endl;
337  }
338  }
339 
340  size_t line_counter=0;
341 
342  // Read each chain
343  for(size_t j=0;j<n_chains;j++) {
344 
345  // Read table
346  std::string tab_name="markov_chain"+szttos(j);
347  table_units<> tab;
348  hdf_input(hf,tab,tab_name);
349 
350  if (constraint.size()>0) {
351  size_t nlines_old=tab.get_nlines();
352  tab.delete_rows(constraint);
353  size_t nlines_new=tab.get_nlines();
354  if (verbose>0) {
355  cout << "Applied constraint \"" << constraint
356  << "\" and went from " << nlines_old << " to "
357  << nlines_new << " lines." << endl;
358  }
359  }
360 
361  // Parse table into values and weights
362  for(size_t k=0;k<tab.get_nlines();k++) {
363  if (((int)line_counter)>=line_start) {
364  values.push_back(tab.get(name,k));
365  weights.push_back(tab.get("mult",k));
366  }
367  line_counter++;
368  }
369 
370  if (verbose>0) {
371  cout << "Table " << tab_name << " lines: "
372  << tab.get_nlines();
373  if (line_start>0) {
374  cout << " skipping: " << line_start;
375  }
376  cout << endl;
377  }
378 
379  // Go to next chain
380  }
381  // Go to next file
382  }
383  if (verbose>0) {
384  cout << "Done with reading files.\n" << endl;
385  }
386 
387  // ------------------------------------------------------------
388  // Get limits for grid
389 
390  double min, max;
391  if (!xset) {
392  min=*min_element(values.begin(),values.end());
393  max=*max_element(values.begin(),values.end());
394  min*=xscale;
395  max*=xscale;
396  } else {
397  min=user_xlow;
398  max=user_xhigh;
399  }
400  if (verbose>0) {
401  cout << "xsize, min, max: " << values.size() << " "
402  << min << " " << max << endl;
403  }
404 
405  // ------------------------------------------------------------
406  // Double check limits in the case of a logarithmic grid
407 
408  if (logx) {
409  if (max<=0.0) {
410  O2SCL_ERR2("Both min and max negative with logarithmic ",
411  "grid in hist().",exc_efailed);
412  }
413  // Reset 'min' to be smallest positive value
414  if (min<=0.0) {
415  double min_new=max;
416  for(size_t i=0;i<values.size();i++) {
417  if (values[i]>0.0 && values[i]<min_new) min_new=values[i];
418  }
419  cout << "Resetting 'min' from " << min << " to "
420  << min_new << "." << endl;
421  min=min_new;
422  }
423  }
424 
425  // ------------------------------------------------------------
426  // Create expval_scalar objects
427 
428  if (n_blocks<=0) {
429  cout << "Variable 'n_blocks' <= 0. Setting to 20." << endl;
430  n_blocks=20;
431  }
432  vector<expval_scalar> sev(hist_size);
433  for(size_t i=0;i<hist_size;i++) {
434  sev[i].set_blocks(n_blocks,1);
435  }
436 
437  // ------------------------------------------------------------
438  // Setup grid and histogram. We make the grid slightly bigger
439  // than the min-max range to ensure finite precision errors
440  // don't give problems with elements outside the histogram.
441 
442  o2scl::hist h;
444  if (logx) {
445  grid=uniform_grid_log_end<double>(min*(1.0-1.0e-6),
446  max*(1.0+1.0e-6),hist_size);
447  h.set_bin_edges(grid);
448  } else {
449  grid=uniform_grid_end<double>(min-fabs(min)*1.0e-6,
450  max+fabs(max)*1.0e-6,hist_size);
451  h.set_bin_edges(grid);
452  }
453 
454  // ------------------------------------------------------------
455  // Fill expval_scalar objects using histogram
456 
457  size_t block_size=values.size()/((size_t)n_blocks);
458  for(int i=0;i<n_blocks;i++) {
459  if (verbose>1) {
460  cout << "Block " << i << endl;
461  }
462  for(size_t j=0;j<block_size;j++) {
463  double val=values[i*block_size+j]*xscale;
464  if (val>min && val<max) {
465  h.update(val,weights[i*block_size+j]);
466  }
467  }
468  // Update expval_scalar object with histogram for this block
469  for(size_t j=0;j<hist_size;j++) {
470  sev[j].add(h[j]);
471  }
472  h.clear_wgts();
473  }
474 
475  // ------------------------------------------------------------
476  // Setup table
477 
478  table<> t;
479  if (errors) {
480  t.line_of_names("reps avgs errs plus minus");
481  } else {
482  t.line_of_names("reps avgs errs");
483  }
484  t.set_nlines(hist_size);
485 
486  // ------------------------------------------------------------
487  // Compute values for table
488 
489  double avg, std_dev, avg_err;
490  for(size_t i=0;i<hist_size;i++) {
491  t.set("reps",i,h.get_rep_i(i));
492  sev[i].current_avg(avg,std_dev,avg_err);
493  t.set("avgs",i,avg);
494  t.set("errs",i,avg_err);
495  }
496 
497  // Set endpoints to zero for vector_invert_enclosed_sum()
498  t.set("avgs",0,0.0);
499  t.set("avgs",hist_size-1,0.0);
500 
501  cout << "Here." << endl;
502 
503  // Compute total integral
504  double total=vector_integ_linear(hist_size,t["reps"],t["avgs"]);
505  if (verbose>0) cout << "Total integral: " << total << endl;
506 
507  cout << "Here " << total << endl;
508  std::vector<double> locs;
509  double lev;
510 
511  // Get one-sigma error ranges
512  vector_invert_enclosed_sum(one_sigma*total,hist_size,
513  t["reps"],t["avgs"],lev);
514 
515  cout << "here: " << locs.size() << endl;
516 
517  vector_find_level(lev,hist_size,t["reps"],t["avgs"],locs);
518  double xlo1=locs[0], xhi1=locs[0];
519  for(size_t i=0;i<locs.size();i++) {
520  if (locs[i]<xlo1) xlo1=locs[i];
521  if (locs[i]>xhi1) xhi1=locs[i];
522  }
523 
524  // Get two-sigma error ranges
525  vector_invert_enclosed_sum(two_sigma*total,hist_size,
526  t["reps"],t["avgs"],lev);
527  vector_find_level(lev,hist_size,t["reps"],t["avgs"],locs);
528  double xlo2=locs[0], xhi2=locs[0];
529  for(size_t i=0;i<locs.size();i++) {
530  if (locs[i]<xlo2) xlo2=locs[i];
531  if (locs[i]>xhi2) xhi2=locs[i];
532  }
533 
534  // Get peak position
535  double xpeak=vector_max_quad_loc<std::vector<double>,double>
536  (hist_size,t["reps"],t["avgs"]);
537 
538  if (verbose>0) {
539  cout << name << " -2,-1,0,+1,+2 sigma: " << endl;
540  if (false) {
541  cout.unsetf(ios::scientific);
542  cout.precision(4);
543  cout << xlo2 << " & " << xlo1 << " & "
544  << xpeak << " & " << xhi1 << " & " << xhi2 << " \\\\" << endl;
545  cout.precision(6);
546  cout.setf(ios::scientific);
547  }
548  cout << ff.convert(xlo2) << " & " << ff.convert(xlo1) << " & "
549  << ff.convert(xpeak) << " & " << ff.convert(xhi1) << " & "
550  << ff.convert(xhi2) << " \\\\" << endl;
551  }
552 
553  // Setup min and max y values (distinct from 'min' and
554  // 'max' for the grid above)
555  double xmin=t.min("reps");
556  double xmax=t.max("reps");
557  double ymin=t.min("avgs");
558  double ymax=t.max("avgs");
559 
560  // Create columns for errors
561  if (errors) {
562  for(size_t i=0;i<hist_size;i++) {
563  if (t.get("avgs",i)>=0.0) {
564  if (t.get("avgs",i)-t.get("errs",i)>=0.0) {
565  t.set("minus",i,t.get("avgs",i)-t.get("errs",i));
566  } else {
567  t.set("minus",i,0.0);
568  }
569  t.set("plus",i,t.get("avgs",i)+t.get("errs",i));
570  } else {
571  t.set("minus",i,t.get("avgs",i)-t.get("errs",i));
572  if (t.get("avgs",i)+t.get("errs",i)>0.0) {
573  t.set("plus",i,0.0);
574  } else {
575  t.set("plus",i,t.get("avgs",i)+t.get("errs",i));
576  }
577  }
578  }
579  double plus_max=t.max("plus");
580  double minus_min=t.min("minus");
581  if (plus_max>ymax) ymax=plus_max;
582  if (minus_min<ymin) ymin=minus_min;
583  }
584 
585  if (verbose>0) {
586  cout << "Writing to file " << sv[2] << endl;
587  }
588  hdf_file hf;
589  hf.open_or_create(sv[2]);
590  hdf_output(hf,t,"hist_table");
591  hdf_output(hf,grid,"xgrid");
592  hf.setd("xmin",xmin);
593  hf.setd("xmax",xmax);
594  if (logx) {
595  hf.seti("logx",1);
596  } else {
597  hf.seti("logx",0);
598  }
599  hf.setd("ymin",ymin);
600  hf.setd("ymax",ymax);
601  hf.setd("xlo2",xlo2);
602  hf.setd("xhi2",xhi2);
603  hf.setd("xlo1",xlo1);
604  hf.setd("xhi1",xhi1);
605  hf.setd("xpeak",xpeak);
606  hf.close();
607 
608  return 0;
609  }
610 
611  /** \brief Create a two-dimensional histogram from two
612  user-specified columns
613  */
614  int hist2(std::vector<std::string> &sv, bool itive_com) {
615 
616  // Setup histogram size
617  size_t hist_size=(size_t)hist_size_int;
618  if (hist_size_int<=0) {
619  cout << "Histogram size <=0, using 100." << endl;
620  hist_size=100;
621  }
622 
623  // Column names
624  string xname=sv[1];
625  string yname=sv[2];
626 
627  // (output file is sv[3])
628 
629  // List of data files
630  vector<string> files;
631  for(size_t i=4;i<sv.size();i++) files.push_back(sv[i]);
632  size_t nf=files.size();
633 
634  // Ouptut table
635  table_units<> tab;
636 
637  // Temporary column storage
638  vector<double> valuesx, valuesy, weights;
639 
640  // data
641  for(size_t i=0;i<nf;i++) {
642  hdf_file hf;
643  cout << "Opening file: " << files[i] << endl;
644  hf.open(files[i]);
645  size_t n_chains;
646  hf.get_szt_def("n_chains",1,n_chains);
647  cout << n_chains << " chains." << endl;
648 
649  size_t line_counter=0;
650 
651  for(size_t j=0;j<n_chains;j++) {
652 
653  std::string tab_name="markov_chain"+szttos(j);
654  hdf_input(hf,tab,tab_name);
655  cout << "Read table " << tab_name << endl;
656 
657  if (constraint.size()>0) {
658  size_t nlines_old=tab.get_nlines();
659  tab.delete_rows(constraint);
660  size_t nlines_new=tab.get_nlines();
661  if (verbose>0) {
662  cout << "Applied constraint \"" << constraint
663  << "\" and went from " << nlines_old << " to "
664  << nlines_new << " lines." << endl;
665  }
666  }
667 
668  for(size_t k=0;k<tab.get_nlines();k++) {
669  if (((int)line_counter)>=line_start) {
670  valuesx.push_back(tab.get(xname,k));
671  valuesy.push_back(tab.get(yname,k));
672  weights.push_back(tab.get("mult",k));
673  }
674  line_counter++;
675  }
676  }
677  }
678 
679  // Form x and y limits
680  double xmin, xmax;
681  if (!xset) {
682  xmin=*min_element(valuesx.begin(),valuesx.end());
683  xmax=*max_element(valuesx.begin(),valuesx.end());
684  xmin*=xscale;
685  xmax*=xscale;
686  cout << xname << " xsize, xmin, xmax: " << valuesx.size() << " "
687  << xmin << " " << xmax << endl;
688  } else {
689  xmin=user_xlow;
690  xmax=user_xhigh;
691  }
692 
693  double ymin, ymax;
694  if (!yset) {
695  ymin=*min_element(valuesy.begin(),valuesy.end());
696  ymax=*max_element(valuesy.begin(),valuesy.end());
697  ymin*=yscale;
698  ymax*=yscale;
699  cout << yname << " ysize, ymin, ymax: " << valuesy.size() << " "
700  << ymin << " " << ymax << endl;
701  } else {
702  ymin=user_ylow;
703  ymax=user_yhigh;
704  }
705 
706  // Create expval_scalar objects
707  if (n_blocks<=0) n_blocks=20;
708  vector<expval_scalar> sev(hist_size*hist_size);
709  for(size_t i=0;i<hist_size*hist_size;i++) {
710  sev[i].set_blocks(n_blocks,1);
711  }
712 
713  // Grid and histogram
714  uniform_grid_end<double> xgrid(xmin*(1.0-1.0e-6),
715  xmax*(1.0+1.0e-6),hist_size);
716  uniform_grid_end<double> ygrid(ymin*(1.0-1.0e-6),
717  ymax*(1.0+1.0e-6),hist_size);
718  hist_2d h;
719  h.set_bin_edges(xgrid,ygrid);
720 
721  // Fill expval_scalar objects using histogram
722  size_t block_size=valuesx.size()/20;
723  for(size_t i=0;((int)i)<n_blocks;i++) {
724  cout << "Block " << i << endl;
725  for(size_t j=0;j<block_size;j++) {
726  double vx=valuesx[i*block_size+j]*xscale;
727  double vy=valuesy[i*block_size+j]*yscale;
728  if (vx<xmax && vx>xmin && vy<ymax && vy>ymin) {
729  h.update(vx,vy,weights[i*block_size+j]);
730  }
731  }
732  for(size_t j=0;j<hist_size;j++) {
733  for(size_t k=0;k<hist_size;k++) {
734  sev[j*hist_size+k].add(h.get_wgt_i(j,k));
735  }
736  }
737  h.clear_wgts();
738  }
739 
740  // Create x and y grids
741  ubvector xreps(hist_size), yreps(hist_size);
742  for(size_t i=0;i<hist_size;i++) {
743  xreps[i]=h.get_x_rep_i(i);
744  yreps[i]=h.get_y_rep_i(i);
745  }
746 
747  // Create table
748  table3d t3d;
749  t3d.set_xy(xname,xreps.size(),xreps,yname,yreps.size(),yreps);
750  t3d.new_slice("avgs");
751 
752  // Collect averages into table
753  double std_dev, avg_err, avg, smax=0.0;
754  for(size_t i=0;i<hist_size;i++) {
755  for(size_t j=0;j<hist_size;j++) {
756  sev[i*hist_size+j].current_avg(avg,std_dev,avg_err);
757  if (avg>smax) smax=avg;
758  t3d.set(i,j,"avgs",avg);
759  }
760  }
761 
762  // Renormalize so that maximum value is 1.0 (important for
763  // contours below)
764  for(size_t i=0;i<hist_size;i++) {
765  for(size_t j=0;j<hist_size;j++) {
766  t3d.set(i,j,"avgs",t3d.get(i,j,"avgs")/smax);
767  }
768  }
769 
770  // ------------------------------------------
771  // Compute contour lines
772 
773  vector<contour_line> conts;
774  size_t nc=0;
775 
776  // Construct x and y values for interpolating the double integral
777  ubvector integx(101), integy(101);
778  for(size_t i=0;i<101;i++) {
779  integx[i]=((double)i)/100.0;
780  }
781  for(size_t k=0;k<101;k++) {
782  integy[k]=0.0;
783  for(size_t i=0;i<hist_size;i++) {
784  for(size_t j=0;j<hist_size;j++) {
785  if (t3d.get(i,j,"avgs")>integx[k]) integy[k]+=t3d.get(i,j,"avgs");
786  }
787  }
788  }
789 
790  // Get the total
791  double total=integy[0];
792  ubvector levels(cont_levels.size());
793 
794  // Compute the function values associated with the contour levels
795  for(size_t k=0;k<cont_levels.size();k++) {
796  levels[k]=0.0;
797  for(size_t i=0;i<100;i++) {
798  if (integy[i]>cont_levels[k]*total &&
799  integy[i+1]<cont_levels[k]*total) {
800  levels[k]=(integx[i]+integx[i+1])/2.0;
801  i=100;
802  }
803  }
804  if (levels[k]==0.0) {
805  cout << "Failed to find contour level for level "
806  << levels[k] << endl;
807  }
808  }
809 
810  // If those levels were found, plot the associated contours
811  contour co;
812 
813  // Set the contour object data
814  ubvector xg(hist_size), yg(hist_size);
815  for(size_t i=0;i<hist_size;i++) {
816  xg[i]=t3d.get_grid_x(i);
817  yg[i]=t3d.get_grid_y(i);
818  }
819  co.set_data(hist_size,hist_size,xg,yg,t3d.get_slice("avgs"));
820 
821  // Set the levels
822  co.set_levels(levels.size(),levels);
823 
824  // Compute the contours
825  co.calc_contours(conts);
826  nc=conts.size();
827  cout << "Number of contours: " << nc << endl;
828 
829  cout << "Writing to file " << sv[3] << endl;
830  hdf_file hf;
831  hf.open_or_create(sv[3]);
832  hdf_output(hf,t3d,"hist2_table");
833  hf.setd_vec_copy("levels",levels);
834  hf.set_szt("n_contours",nc);
835  if (nc>0) {
836  hdf_output(hf,conts,"contours");
837  }
838  hf.close();
839 
840  return 0;
841  }
842 
843  /** \brief Create a set of histograms from a set of columns in
844  the bamr MCMC output
845  */
846  int hist_set(std::vector<std::string> &sv, bool itive_com) {
847 
848  // Setup histogram size
849  size_t hist_size=(size_t)hist_size_int;
850  if (hist_size_int<=0) {
851  cout << "Histogram size <=0, using 100." << endl;
852  hist_size=100;
853  }
854 
855  string type=sv[1];
856  string low_name=sv[2];
857  string high_name=sv[3];
858  string set_prefix=sv[4];
859 
860  // (output file is sv[5])
861 
862  // file list
863  vector<string> files;
864 
865  // Form list of data files
866  for(size_t i=6;i<sv.size();i++) files.push_back(sv[i]);
867  size_t nf=files.size();
868 
869  // The value of 'grid_size' from the bamr output file
870  size_t grid_size=0;
871 
872  // Storage for all of the data
873  vector<double> weights;
874  vector<vector<double> > values_ser;
875 
876  // The grid determined by the boundaries specified in low_name
877  // and high_name by the user (unscaled)
878  double index_low, index_high;
879  uniform_grid<double> index_grid;
880 
881  // Count the data over each grid
882  ubvector count;
883 
884  // Series min and series max, just determined by the min and
885  // max values over all points in all chains
886  double ser_min=0.0, ser_max=0.0;
887 
888  // ------------------------------------------------------------
889  // Read the data
890 
891  for(size_t i=0;i<nf;i++) {
892  hdf_file hf;
893  if (verbose>0) {
894  cout << "Opening file: " << files[i] << endl;
895  }
896  hf.open(files[i]);
897 
898  // If we're reading the first file, obtain the grid information
899  if (i==0) {
900  hf.get_szt("grid_size",grid_size);
901  hf.getd(low_name,index_low);
902  hf.getd(high_name,index_high);
903  if (verbose>0) {
904  cout << "grid_size, index_low, index_high: "
905  << grid_size << " " << index_low << " " << index_high << endl;
906  }
907  if ((logx && type==((string)"x")) ||
908  (logy && type==((string)"y"))) {
910  (index_low,index_high,grid_size-1);
911  } else {
912  index_grid=uniform_grid_end<double>
913  (index_low,index_high,grid_size-1);
914  }
915  values_ser.resize(grid_size);
916  count.resize(index_grid.get_npoints());
917  }
918 
919  // Obtain the number of chains in this file
920  size_t n_chains;
921  hf.get_szt("n_chains",n_chains);
922  if (verbose>0) {
923  if (n_chains==1) {
924  cout << n_chains << " separate chain." << endl;
925  } else {
926  cout << n_chains << " separate chains." << endl;
927  }
928  }
929 
930  // Count the total number of lines over all
931  // the individual tables in the file
932  size_t line_counter=0;
933 
934  // Process each chain in turn
935  for(size_t j=0;j<n_chains;j++) {
936 
937  // Read chain from file
938  table_units<> tab;
939  std::string tab_name="markov_chain"+szttos(j);
940  hdf_input(hf,tab,tab_name);
941  if (verbose>0) {
942  cout << "Read table " << tab_name << " lines: "
943  << tab.get_nlines() << endl;
944  }
945 
946  // Apply constraint to this table
947  if (constraint.size()>0) {
948  size_t nlines_old=tab.get_nlines();
949  tab.delete_rows(constraint);
950  size_t nlines_new=tab.get_nlines();
951  if (verbose>0) {
952  cout << "Applied constraint \"" << constraint
953  << "\" and went from " << nlines_old << " to "
954  << nlines_new << " lines." << endl;
955  }
956  }
957 
958  // Read each line in the current chain
959  for(size_t k=0;k<tab.get_nlines();k++) {
960 
961  if (((int)line_counter)>=line_start) {
962 
963  // For each column in the series
964  for(size_t ell=0;ell<grid_size;ell++) {
965 
966  // Column name and value
967  string col=set_prefix+"_"+szttos(ell);
968  double val=tab.get(col,k);
969 
970  // Add the value even if it's zero (blank)
971  values_ser[ell].push_back(val);
972 
973  // But if it's less than or equal to zero, don't count
974  // it for min, max, or count. This is important
975  // because we don't want to count past the M-R curve
976  // or the end of the EOS.
977  if (val>0.0) {
978  count[ell]++;
979  // The first time, initialize to the first non-zero point
980  if (ser_min==0.0) {
981  ser_min=val;
982  ser_max=val;
983  }
984  if (val<ser_min) ser_min=val;
985  if (val>ser_max) ser_max=val;
986  }
987  // Next column
988  }
989  weights.push_back(tab.get("mult",k));
990  }
991  line_counter++;
992 
993  // Next table line (k)
994  }
995 
996  // Next chain (j)
997  }
998 
999  // Next file (i)
1000  }
1001  if (verbose>0) {
1002  cout << "Done reading files." << endl;
1003  }
1004 
1005  // Rescale series limits, and set from user-specified values
1006  // if present
1007  if (type==((string)"x")) {
1008  ser_min*=yscale;
1009  ser_max*=yscale;
1010  } else {
1011  ser_min*=xscale;
1012  ser_max*=xscale;
1013  }
1014  if (type==((string)"x")) {
1015  if (yset) {
1016  ser_min=user_ylow;
1017  ser_max=user_yhigh;
1018  }
1019  } else {
1020  if (xset) {
1021  ser_min=user_xlow;
1022  ser_max=user_xhigh;
1023  }
1024  }
1025 
1026  if (verbose>0) {
1027  cout << "Total lines: " << weights.size() << " "
1028  << values_ser[0].size() << " ser_min: " << ser_min << " ser_max: "
1029  << ser_max << endl;
1030  }
1031 
1032  // Create expval_scalar objects
1033  if (n_blocks<=0) n_blocks=20;
1034  vector<expval_scalar> sev(hist_size*grid_size);
1035  for(size_t i=0;i<hist_size*grid_size;i++) {
1036  sev[i].set_blocks(n_blocks,1);
1037  }
1038 
1039  uniform_grid<double> ser_grid;
1040  if ((logy && type==((string)"x")) ||
1041  (logx && type==((string)"y"))) {
1042  ser_grid=uniform_grid_log_end<double>(ser_min,ser_max,hist_size);
1043  } else {
1044  ser_grid=uniform_grid_end<double>(ser_min,ser_max,hist_size);
1045  }
1046 
1047  // Histograms to compute contour lines. The histogram 'h' stores
1048  // the values for the current block, and 'hsum' stores the
1049  // values over all blocks.
1050  o2scl::hist h, hsum;
1051  h.set_bin_edges(ser_grid);
1052  hsum.set_bin_edges(ser_grid);
1053 
1054  // Vectors to store contour results. Each additional
1055  // contour level requires two vectors: a low and high
1056  // vector.
1057  ubmatrix cont_res(cont_levels.size()*2,grid_size);
1058 
1059  // Fill expval_scalar objects using histogram
1060  size_t block_size=weights.size()/20;
1061  for(size_t k=0;k<grid_size;k++) {
1062 
1063  string col=set_prefix+"_"+szttos(k);
1064  cout << "Column " << col << " index: ";
1065  if (type==((string)"x")) {
1066  cout << index_grid[k]*xscale << " count: ";
1067  } else {
1068  cout << index_grid[k]*yscale << " count: ";
1069  }
1070  cout << count[k] << " ";
1071 
1072  // Add up the entries for the histograms
1073  for(size_t i=0;((int)i)<n_blocks;i++) {
1074  for(size_t j=0;j<block_size;j++) {
1075  double val=values_ser[k][i*block_size+j];
1076  if (type==((string)"x")) val*=yscale;
1077  else val*=xscale;
1078  if (val>ser_min && val<ser_max) {
1079  h.update(val,weights[i*block_size+j]);
1080  hsum.update(val,weights[i*block_size+j]);
1081  }
1082  }
1083  for(size_t j=0;j<hist_size;j++) {
1084  sev[j*grid_size+k].add(h.get_wgt_i(j));
1085  }
1086  h.clear_wgts();
1087  }
1088 
1089  // Copy results from hsum histogram
1090  vector<double> cont_wgt, cont_grid;
1091  for(size_t i=0;i<hsum.size();i++) {
1092  cont_wgt.push_back(hsum.get_wgt_i(i));
1093  cont_grid.push_back(hsum.get_rep_i(i));
1094  }
1095 
1096  // Compute contour levels
1097  for(size_t j=0;j<cont_levels.size();j++) {
1098  if (count[k]>0.0) {
1099  std::vector<double> locs;
1100  cont_wgt[0]=0.0;
1101  cont_wgt[cont_wgt.size()-1]=0.0;
1102  if (o2scl::vector_sum_double(cont_wgt)>0.0) {
1103  vector_region_parint(cont_wgt.size(),cont_grid,cont_wgt,
1104  cont_levels[j],locs);
1105  double lmin=vector_min_value<vector<double>,double>(locs);
1106  double lmax=vector_max_value<vector<double>,double>(locs);
1107  cout << lmin << " " << lmax << " ";
1108  cont_res(2*j,k)=lmin;
1109  cont_res(2*j+1,k)=lmax;
1110  } else {
1111  // If there's not enough data, just report zero
1112  cont_res(2*j,k)=0.0;
1113  cont_res(2*j+1,k)=0.0;
1114  }
1115  } else {
1116  // If there's not enough data, just report zero
1117  cont_res(2*j,k)=0.0;
1118  cont_res(2*j+1,k)=0.0;
1119  }
1120  }
1121  hsum.clear_wgts();
1122  cout << endl;
1123 
1124  // End of loop over grid index
1125  }
1126 
1127  // Create both grids for the table3d object
1128  ubvector reps(hist_size);
1129  for(size_t i=0;i<hist_size;i++) {
1130  reps[i]=h.get_rep_i(i);
1131  }
1132  ubvector index_grid_vec;
1133  index_grid_vec.resize(index_grid.get_npoints());
1134  vector_grid(index_grid,index_grid_vec);
1135  if (type==((string)"x")) {
1136  for(size_t i=0;i<index_grid_vec.size();i++) {
1137  index_grid_vec[i]*=xscale;
1138  }
1139  } else {
1140  for(size_t i=0;i<index_grid_vec.size();i++) {
1141  index_grid_vec[i]*=yscale;
1142  }
1143  }
1144 
1145  // Create the table3d object
1146  table3d t3d;
1147  if (type==((string)"x")) {
1148  t3d.set_xy("x",index_grid_vec.size(),index_grid_vec,
1149  "y",reps.size(),reps);
1150  } else {
1151  t3d.set_xy("x",reps.size(),reps,
1152  "y",index_grid_vec.size(),index_grid_vec);
1153  }
1154  t3d.new_slice("avgs");
1155 
1156  // Collect averages into the table3d slice
1157  double std_dev, avg_err, avg, smax=0.0;
1158  for(size_t j=0;j<hist_size;j++) {
1159  for(size_t k=0;k<grid_size;k++) {
1160  sev[j*grid_size+k].current_avg(avg,std_dev,avg_err);
1161  if (avg>smax) smax=avg;
1162  if (type==((string)"x")) {
1163  t3d.set(k,j,"avgs",avg);
1164  } else {
1165  t3d.set(j,k,"avgs",avg);
1166  }
1167  }
1168  }
1169 
1170  // Renormalize by the maximum value
1171  for(size_t j=0;j<t3d.get_nx();j++) {
1172  for(size_t k=0;k<t3d.get_ny();k++) {
1173  t3d.set(j,k,"avgs",t3d.get(j,k,"avgs")/smax);
1174  }
1175  }
1176 
1177  // Perform file output
1178  cout << "Writing table to file " << sv[5] << endl;
1179  hdf_file hf;
1180  hf.open_or_create(sv[5]);
1181  hdf_output(hf,t3d,"hist_set");
1182  hdf_output(hf,index_grid,"index_grid");
1183  hf.setd_mat_copy("cont_res",cont_res);
1184 
1185  hf.close();
1186 
1187  return 0;
1188  }
1189 
1190  /** \brief Specify which contour levels to use
1191  */
1192  int contours(std::vector<std::string> &sv, bool itive_com) {
1193  cont_levels.clear();
1194  for(size_t i=1;i<sv.size();i++) {
1195  if (sv[i]==((string)"1s")) {
1196  cont_levels.push_back(one_sigma);
1197  } else if (sv[i]==((string)"2s")) {
1198  cont_levels.push_back(two_sigma);
1199  } else if (sv[i]==((string)"3s")) {
1200  cont_levels.push_back(three_sigma);
1201  } else {
1202  cont_levels.push_back(o2scl::function_to_double(sv[i]));
1203  }
1204  }
1205  return 0;
1206  }
1207 
1208  /** \brief Combine several <tt>bamr</tt> output files
1209  */
1210  int combine(std::vector<std::string> &sv, bool itive_com) {
1211 
1212  // Thinning factor
1213  size_t thin_factor=stoszt(sv[1]);
1214  if (thin_factor==0) thin_factor=1;
1215 
1216  // Output file
1217  string out_file=sv[2];
1218 
1219  // file list
1220  vector<string> files;
1221 
1222  // Form list of data files
1223  for(size_t i=3;i<sv.size();i++) files.push_back(sv[i]);
1224  size_t nf=files.size();
1225 
1226  table_units<> full;
1227 
1228  double e_low, e_high, m_low, m_high, nb_low, nb_high;
1229  ubvector low, high;
1230  size_t nparams, nsources, lgrid_size, file_row_start;
1231  vector<string> param_names, source_names;
1232 
1233  // data
1234  for(size_t i=0;i<nf;i++) {
1235  hdf_file hf;
1236 
1237  // Open file
1238  cout << "Opening file: " << files[i] << endl;
1239  hf.open(files[i]);
1240 
1241  // Get number of chains
1242  size_t n_chains;
1243  hf.get_szt("n_chains",n_chains);
1244  if (n_chains>1) {
1245  cout << n_chains << " separate chains." << endl;
1246  } else {
1247  cout << "1 chain." << endl;
1248  }
1249 
1250  size_t line_counter=0;
1251 
1252  file_row_start=full.get_nlines();
1253 
1254  // Read each chain
1255  for(size_t j=0;j<n_chains;j++) {
1256 
1257  // Read table
1258  std::string tab_name="markov_chain"+szttos(j);
1259  table_units<> tab;
1260  hdf_input(hf,tab,tab_name);
1261 
1262  // Set up columns in fill table over all files
1263  if (i==0 && j==0) {
1264  for(size_t ell=0;ell<tab.get_ncolumns();ell++) {
1265  full.new_column(tab.get_column_name(ell));
1266  }
1267  }
1268 
1269  // Get misc parameters
1270  if (i==0 && j==0) {
1271  hf.getd("e_low",e_low);
1272  hf.getd("e_high",e_high);
1273  hf.getd("nb_low",nb_low);
1274  hf.getd("nb_high",nb_high);
1275  hf.getd("m_low",m_low);
1276  hf.getd("m_high",m_high);
1277  hf.getd_vec_copy("low",low);
1278  hf.getd_vec_copy("high",high);
1279  hf.get_szt("grid_size",lgrid_size);
1280  hf.get_szt("nparams",nparams);
1281  hf.get_szt("nsources",nsources);
1282  hf.gets_vec("param_names",param_names);
1283  hf.gets_vec("source_names",source_names);
1284  }
1285 
1286  // Add data to table for this file
1287  for(size_t k=((size_t)line_start);k<tab.get_nlines();k+=thin_factor) {
1288  vector<double> line;
1289  for(size_t ell=0;ell<tab.get_ncolumns();ell++) {
1290  line.push_back(tab.get(tab.get_column_name(ell),k));
1291  }
1292  full.line_of_data(line.size(),line);
1293  }
1294 
1295  cout << "Added table " << tab_name << " lines: "
1296  << tab.get_nlines();
1297  if (line_start>0) {
1298  cout << " skipped: " << line_start;
1299  }
1300  cout << endl;
1301 
1302  // Go to next chain
1303  }
1304 
1305 
1306  // Load weight column for this file from full table
1307  vector<double> subweights;
1308  for(size_t k=file_row_start;k<full.get_nlines();k++) {
1309  subweights.push_back(full.get("weight",k));
1310  }
1311 
1312  // Compute autocorrelation
1313  size_t kmax=subweights.size()/3;
1314  ubvector kvec(kmax-1), acvec(kmax-1), acerr(kmax-1);
1315  for(size_t k=1;k<kmax;k++) {
1316  kvec[k-1]=((double)k);
1317  acvec[k-1]=vector_lagk_autocorr(subweights.size(),subweights,k);
1318  acerr[k-1]=fabs(acvec[k-1])/10.0;
1319  }
1320 
1321  cout << "full.nlines=" << full.get_nlines() << endl;
1322 
1323  // Go to next file
1324  }
1325 
1326  cout << "Writing table to file " << out_file << endl;
1327  hdf_file hf;
1328  hf.open_or_create(out_file);
1329  hf.setd("e_low",e_low);
1330  hf.setd("e_high",e_high);
1331  hf.setd("nb_low",nb_low);
1332  hf.setd("nb_high",nb_high);
1333  hf.setd("m_low",m_low);
1334  hf.setd("m_high",m_high);
1335  hf.set_szt("grid_size",lgrid_size);
1336  hf.set_szt("n_chains",1);
1337  hf.setd_vec_copy("low",low);
1338  hf.setd_vec_copy("high",high);
1339  hf.set_szt("nparams",nparams);
1340  hf.set_szt("nsources",nsources);
1341  hf.sets_vec("param_names",param_names);
1342  hf.sets_vec("source_names",source_names);
1343  hdf_output(hf,full,"markov_chain0");
1344  hf.close();
1345 
1346  return 0;
1347  }
1348  //@}
1349 
1350  public:
1351 
1352  /// Create the process object
1353  process() : one_sigma(gsl_sf_erf(1.0/sqrt(2.0))),
1354  two_sigma(gsl_sf_erf(2.0/sqrt(2.0))),
1355  three_sigma(gsl_sf_erf(3.0/sqrt(2.0))) {
1356  verbose=1;
1357  xscale=1.0;
1358  yscale=1.0;
1359  xset=false;
1360  user_xlow=0.0;
1361  user_xhigh=0.0;
1362  yset=false;
1363  user_ylow=0.0;
1364  user_yhigh=0.0;
1365  errors=true;
1366  hist_size_int=100;
1367  logx=false;
1368  logy=false;
1369  logz=false;
1370  line_start=0;
1371  ff.latex_mode();
1372  ff.set_sig_figs(4);
1373  ff.set_pad_zeros(true);
1374  ff.set_exp_limits(-5,5);
1375  n_blocks=0;
1376  }
1377 
1378  /** \brief Main public interface
1379  */
1380  void run(int argc, char *argv[]) {
1381 
1382  // ---------------------------------------
1383  // Specify command-line option object
1384 
1385 #ifdef BAMR_READLINE
1386  cli_readline cl;
1387 #else
1388  cli cl;
1389 #endif
1390  cl.prompt="process> ";
1391  cl.gnu_intro=false;
1392 
1393  //sfit();
1394 
1395  // ---------------------------------------
1396  // Set options
1397 
1398  static const int nopt=8;
1399  comm_option_s options[nopt]={
1400  {'x',"xlimits","Set histogram limits for first variable",0,3,
1401  "<low-value high-value> or <file> <low-name> <high-name> or <>",
1402  ((string)"In the case of no arguments, set the limits ")+
1403  "to the default. In the case of two arguments, take the two "+
1404  "specified numbers as the limits, in the case of three "+
1405  "arguments, assume that the first argument is the bamr output "+
1406  "file, and the second and third are the names of values "+
1407  "in the output file which should be set as limits.",
1408  new comm_option_mfptr<process>(this,&process::xlimits),
1409  cli::comm_option_both},
1410  {'y',"ylimits","Set histogram limits for the second variable",0,3,
1411  "<low-value high-value> or <file> <low-name> <high-name> or <>",
1412  ((std::string)"In the case of no arguments, set the limits ")+
1413  "to the default. In the case of two arguments, take the two "+
1414  "specified numbers as the limits, in the case of three "+
1415  "arguments, assume that the first argument is the bamr output "+
1416  "file, and the second and third are the names of values "+
1417  "in the output file which should be set as limits.",
1418  new comm_option_mfptr<process>(this,&process::ylimits),
1419  cli::comm_option_both},
1420  {'C',"combine","Combine two bamr output files.",2,-1,
1421  "<name> <file1> <file2> [file 3...]",
1422  ((string)"Long ")+"desc.",
1423  new comm_option_mfptr<process>(this,&process::combine),
1424  cli::comm_option_both},
1425  {0,"contours","Specify contour levels",-1,-1,
1426  "[level 1] [level 2] ...",
1427  ((string)"Specify a list of contour levels for the 'hist2' and ")+
1428  "'hist-set' commands. The levels can either be numerical values "+
1429  "or they can be 1s, 2s, or 3s, corresponding to 1-, 2-, and 3-"+
1430  "sigma confidence limits, respectively.",
1431  new comm_option_mfptr<process>(this,&process::contours),
1432  cli::comm_option_both},
1433  {0,"hist2","Create a histogram from two columns of MCMC data.",4,-1,
1434  "<x> <y> <out_file> <file1> [file2 file 3...]",
1435  ((string)"Using the 'markov_chain' objects in a bamr output ")+
1436  "file, construct a two-dimensional histogram from the "+
1437  "specified columns and store the histogram in 'out_file'.",
1438  new comm_option_mfptr<process>(this,&process::hist2),
1439  cli::comm_option_both},
1440  {0,"hist","Create a histogram from one column of MCMC data.",3,-1,
1441  "<column> <out_file> <file1> [file2 file 3...]",
1442  ((string)"Using the 'markov_chain' objects in a bamr output ")+
1443  "file, construct a histogram from the specified column, and "+
1444  "store the histogram in 'out_file'.",
1445  new comm_option_mfptr<process>(this,&process::hist),
1446  cli::comm_option_both},
1447  {0,"auto-corr","Create a table of autocorrelation data",3,-1,
1448  "<column> <out_file> <file1> [file2 file 3...]",
1449  ((string)"Using the 'markov_chain' objects in a bamr output ")+
1450  "file, create a table containing autocorrelation "+
1451  "information about the specified column, and "+
1452  "store the table in 'out_file'.",
1453  new comm_option_mfptr<process>(this,&process::auto_corr),
1454  cli::comm_option_both},
1455  {0,"hist-set","Create an ensemble of of 1-d histograms.",3,-1,
1456  "<direction> <low> <high> <set> <out_file> <file1> [file2...]",
1457  ((string)"Using the 'markov_chain' objects in a bamr output ")+
1458  "file, create an ensemble of 1-d histograms from a set "+
1459  "of columns in each chain. Typical uses are: "+
1460  "\'process -hist-set y m_low m_high R out.o2 x_0_out\', "+
1461  "\'process -hist-set x e_low e_high P out.o2 x_0_out\', and "+
1462  "\'process -hist-set x nb_low nb_high P out.o2 x_0_out\'. ",
1463  new comm_option_mfptr<process>(this,&process::hist_set),
1464  cli::comm_option_both}
1465  };
1466  cl.set_comm_option_vec(nopt,options);
1467 
1468  // ---------------------------------------
1469  // Set parameters
1470 
1471  cli::parameter_double p_xscale;
1472  p_xscale.d=&xscale;
1473  p_xscale.help="Scale parameter for first variable";
1474  cl.par_list.insert(make_pair("xscale",&p_xscale));
1475 
1476  cli::parameter_double p_yscale;
1477  p_yscale.d=&yscale;
1478  p_yscale.help="Scale parameter for second variable";
1479  cl.par_list.insert(make_pair("yscale",&p_yscale));
1480 
1481  cli::parameter_bool p_errors;
1482  p_errors.b=&errors;
1483  p_errors.help="If true, output more error information";
1484  cl.par_list.insert(make_pair("errors",&p_errors));
1485 
1486  cli::parameter_bool p_logx;
1487  p_logx.b=&logx;
1488  p_logx.help="If true, use a logarithmic x-axis (default false).";
1489  cl.par_list.insert(make_pair("logx",&p_logx));
1490 
1491  cli::parameter_bool p_logy;
1492  p_logy.b=&logy;
1493  p_logy.help="If true, use a logarithmic y-axis (default false).";
1494  cl.par_list.insert(make_pair("logy",&p_logy));
1495 
1496  cli::parameter_bool p_logz;
1497  p_logz.b=&logz;
1498  p_logz.help="If true, use a logarithmic z-axis (default false).";
1499  cl.par_list.insert(make_pair("logz",&p_logz));
1500 
1501  cli::parameter_int p_hist_size;
1502  p_hist_size.i=&hist_size_int;
1503  p_hist_size.help="Histogram size (default 100).";
1504  cl.par_list.insert(make_pair("hist_size",&p_hist_size));
1505 
1506  cli::parameter_int p_n_blocks;
1507  p_n_blocks.i=&n_blocks;
1508  p_n_blocks.help="Number of blocks (default 20).";
1509  cl.par_list.insert(make_pair("n_blocks",&p_n_blocks));
1510 
1511  cli::parameter_int p_line_start;
1512  p_line_start.i=&line_start;
1513  p_line_start.help=((string)"Number of initial rows to skip over ")+
1514  "when reading MCMC tables.";
1515  cl.par_list.insert(make_pair("line_start",&p_line_start));
1516 
1517  cli::parameter_int p_verbose;
1518  p_verbose.i=&verbose;
1519  p_verbose.help="Verbosity (default 1).";
1520  cl.par_list.insert(make_pair("verbose",&p_verbose));
1521 
1522  cli::parameter_string p_constraint;
1523  p_constraint.str=&constraint;
1524  p_constraint.help="Constraint (default is an empty string).";
1525  cl.par_list.insert(make_pair("constraint",&p_constraint));
1526 
1527  // ---------------------------------------
1528  // Process command-line arguments and run
1529 
1530  cl.run_auto(argc,argv);
1531 
1532  return;
1533  }
1534 
1535  };
1536 
1537 }
1538 
1539 int main(int argc, char *argv[]) {
1540  cout.setf(ios::scientific);
1541  bamr::process pc;
1542  pc.run(argc,argv);
1543  return 0;
1544 }
bool yset
If true, y limits are set.
Definition: process.cpp:78
void vector_region_parint(size_t n, vec_t &x, vec2_t &y, double frac, std::vector< double > &locs, int verbose=0)
void set_pad_zeros(bool pad)
double vector_lagk_autocorr(size_t n, const vec_t &data, size_t k, double mean)
bool logy
If true, use a logarithmic y scale.
Definition: process.cpp:58
int hist_size_int
Histogram size.
Definition: process.cpp:64
void run(int argc, char *argv[])
Main public interface.
Definition: process.cpp:1380
int open(std::string fname, bool err_on_fail=true)
int setd_mat_copy(std::string name, const ubmatrix &m)
int setd_vec_copy(std::string name, const vec_t &v)
const double & get_wgt_i(size_t i) const
double user_ylow
Lower y value.
Definition: process.cpp:80
int get_szt_def(std::string name, size_t def, size_t &i)
Main namespace.
Definition: bamr.h:54
int verbose
Verbosity (default 1)
Definition: process.cpp:48
const double one_sigma
The one-sigma limit of a normal distribution, 0.6827.
Definition: process.cpp:88
size_t get_nx() const
void set_szt(std::string name, size_t u)
int getd(std::string name, double &d)
void setd(std::string name, double d)
double get_grid_x(size_t ix)
void set_nlines(size_t il)
size_t get_nlines() const
int n_blocks
Number of blocks.
Definition: process.cpp:66
void set_bin_edges(uniform_grid< double > gx, uniform_grid< double > gy)
double & get(size_t ix, size_t iy, std::string name)
double user_yhigh
Upper y value.
Definition: process.cpp:82
void hdf_output(hdf_file &hf, o2scl::table3d &h, std::string name)
void set_data(size_t sizex, size_t sizey, const vec_t &x_fun, const vec_t &y_fun, const mat_t &udata)
void seti(std::string name, int i)
double get_y_rep_i(size_t j)
process()
Create the process object.
Definition: process.cpp:1353
int run_auto(int argc, char *argv[], int debug=0)
int get_szt(std::string name, size_t &u)
size_t size() const
size_t get_ny() const
std::string prompt
const ubmatrix & get_slice(std::string scol) const
format_float ff
Formatter for floating point numbers.
Definition: process.cpp:102
int hist_set(std::vector< std::string > &sv, bool itive_com)
Create a set of histograms from a set of columns in the bamr MCMC output.
Definition: process.cpp:846
double get_rep_i(size_t i)
std::string get_column_name(size_t icol) const
void open_or_create(std::string fname)
int ylimits(std::vector< std::string > &sv, bool itive_com)
Set limits for the y-axis.
Definition: process.cpp:245
double user_xhigh
Upper x value.
Definition: process.cpp:76
void update(double x, double y, double val=1.0)
void line_of_names(std::string newheads)
int contours(std::vector< std::string > &sv, bool itive_com)
Specify which contour levels to use.
Definition: process.cpp:1192
#define O2SCL_ERR2(d, d2, n)
const double two_sigma
The two-sigma limit of a normal distribution, 0.9545.
Definition: process.cpp:90
std::string constraint
Constraint to apply to the data.
Definition: process.cpp:98
Processing MCMC data from bamr.
Definition: process.cpp:41
void clear_wgts()
bool errors
If true, plot errors in 1d.
Definition: process.cpp:62
int hist2(std::vector< std::string > &sv, bool itive_com)
Create a two-dimensional histogram from two user-specified columns.
Definition: process.cpp:614
void line_of_data(size_t nv, const vec2_t &v)
bool xset
If true, x limits are set.
Definition: process.cpp:72
int set_comm_option_vec(size_t list_size, vec_t &option_list)
double get_x_rep_i(size_t i)
bool logx
If true, use a logarithmic x scale.
Definition: process.cpp:56
double vector_integ_linear(size_t n, vec_t &x, vec2_t &y)
void vector_invert_enclosed_sum(double sum, size_t n, vec_t &x, vec2_t &y, double &lev, int verbose=0)
double xscale
Scale for x-axis.
Definition: process.cpp:52
std::string convert(double x, bool debug=false)
void delete_rows(std::string func)
size_t get_ncolumns() const
const double & get_wgt_i(size_t i, size_t j) const
void vector_grid(uniform_grid< data_t > g, vec_t &v)
int hist(std::vector< std::string > &sv, bool itive_com)
Create a histogram from a specified column.
Definition: process.cpp:297
void new_column(std::string head)
int auto_corr(std::vector< std::string > &sv, bool itive_com)
Create a table of autocorrelation data from a specified column.
Definition: process.cpp:111
double get_grid_y(size_t iy)
void update(double x, double val=1.0)
void set(size_t ix, size_t iy, std::string name, double val)
double vector_sum_double(size_t n, vec_t &data)
int combine(std::vector< std::string > &sv, bool itive_com)
Combine several bamr output files.
Definition: process.cpp:1210
void new_slice(std::string name)
int sets_vec(std::string name, std::vector< std::string > &s)
bool gnu_intro
void set(std::string scol, size_t row, double val)
size_t stoszt(std::string s, bool err_on_fail=true)
double user_xlow
Lower x value.
Definition: process.cpp:74
int gets_vec(std::string name, std::vector< std::string > &s)
bool logz
If true, use a logarithmic z scale.
Definition: process.cpp:60
int line_start
Ignore all lines before start.
Definition: process.cpp:50
void set_levels(size_t nlevels, vec_t &ulevels)
void calc_contours(std::vector< contour_line > &clines)
int getd_vec_copy(std::string name, vec_t &v)
const double three_sigma
The three-sigma limit of a normal distribution, 0.9973.
Definition: process.cpp:92
int xlimits(std::vector< std::string > &sv, bool itive_com)
Set limits for the x-axis.
Definition: process.cpp:200
void set_bin_edges(uniform_grid< double > g)
std::map< std::string, parameter *, std::greater< std::string > > par_list
double function_to_double(std::string s, bool err_on_fail=true)
void set_xy(std::string x_name, size_t nx, const vec_t &x, std::string y_name, size_t ny, const vec2_t &y)
vector< double > cont_levels
Contour levels set in 'contours'.
Definition: process.cpp:100
std::string szttos(size_t x)
void set_exp_limits(int min, int max)
void hdf_input(hdf_file &hf, o2scl::table< vec_t > &t, std::string name)
void vector_find_level(double level, size_t n, vec_t &x, vec2_t &y, std::vector< double > &locs)
double get(std::string scol, size_t row) const
void inc_maxlines(size_t llines)
virtual void swap_column_data(std::string scol, vec_t &v)
double yscale
Scale for y-axis.
Definition: process.cpp:54
void set_sig_figs(size_t sig_figs)
size_t get_npoints() const

Documentation generated with Doxygen. Bamr documentation is under the GNU Free Documentation License.