Loading [MathJax]/extensions/TeX/AMSsymbols.js
LALInference 4.1.9.1-8a6b96f
All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Modules Pages
cbcBayesCombinePTMCMCh5s.py
Go to the documentation of this file.
1##python
2# -*- coding: utf-8 -*-
3#
4# cbcBayesCombinePTMCMCh5s.py
5#
6# Copyright 2016
7# Ben Farr <benjamin.farr@ligo.org>
8#
9# Combine multiple HDF5s from the same parallel-tempered run into a common hdf5
10#
11# This program is free software; you can redistribute it and/or modify
12# it under the terms of the GNU General Public License as published by
13# the Free Software Foundation; either version 2 of the License, or
14# (at your option) any later version.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License
22# along with this program; if not, write to the Free Software
23# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
24# MA 02110-1301, USA.
25
26from optparse import OptionParser
27
28import h5py
29
30from lalinference import git_version
31__author__="Ben Farr <benjamin.farr@ligo.org>"
32__version__= "git id %s"%git_version.id
33__date__= git_version.date
34
35USAGE='''%prog [options] PTMCMC_datafile.hdf5 [PTMCMC_datafile2.hdf5 ...]
36Combine chains from a parallel-tempered MCMC run spread across several HDF5 files.
37'''
38
39if __name__ == '__main__':
40 parser = OptionParser(USAGE)
41 parser.add_option(
42 '-o', '--outfile', type='string', default=None,
43 help='Output file for posterior samples. If None, file containing T=1 chain will be used', metavar='combined_chains.hdf5')
44 opts, args = parser.parse_args()
45
46 datafiles = args
47
48 group_id = '/lalinference/lalinference_mcmc'
49
50 outfile = opts.outfile
51
52 # find the file containing the coldest chain (i.e. a posterior_samples group)
53 for datafile in datafiles:
54 rootfile = datafile
55 possible_root = h5py.File(datafile, 'a')
56 if group_id + '/posterior_samples' in possible_root:
57 possible_root.close()
58 break
59 else:
60 possible_root.close()
61
62 if outfile:
63 if outfile != rootfile:
64 try:
65 outp = h5py.File(outfile, 'w-')
66
67 # Copy contents of the root hdf5 to preserve metadata
68 with h5py.File(rootfile, 'r') as inp:
69 for group in inp.keys():
70 inp.copy(group, outp['/'])
71
72 except IOError:
73 assert outfile in datafiles, \
74 "Trying to write to an existing file that isn't being explicitly combined"
75
76 outp = h5py.File(outfile, 'a')
77
78 # Copy root over now for consistent behavior
79 with h5py.File(rootfile, 'r') as inp:
80 for chain in inp[group_id].keys():
81 chain_id = group_id + '/' + chain
82 inp.copy(chain_id, outp[group_id])
83
84 # make sure the target group exists
85 try:
86 outp.create_group(group_id)
87 except ValueError:
88 pass
89 else:
90 outfile = rootfile
91
92 for datafile in datafiles:
93 if datafile == outfile:
94 continue
95 with h5py.File(datafile, 'r') as inp:
96 for chain in inp[group_id].keys():
97 chain_id = group_id + '/' + chain
98 if chain_id not in outp:
99 inp.copy(chain_id, outp[group_id])
100 outp.close()