37from time
import strftime
40from numpy
import (exp, cos, sin, size, cov, unique, hsplit, log, squeeze, sort)
44from matplotlib
import pyplot
as plt
47from lalinference
import bayespputils
as bppu
49from lalinference
import git_version
50from igwn_ligolw
import table
51from igwn_ligolw
import ligolw
52from igwn_ligolw
import lsctables
55 os.environ[
'PATH'] = os.environ[
'PATH'] +
':/usr/texbin'
57 os.environ[
'PATH'] =
':/usr/texbin'
58print(os.environ[
'PATH'])
59__author__=
"Ben Aylott <benjamin.aylott@ligo.org>, Ben Farr <bfarr@u.northwestern.edu>, Will M. Farr <will.farr@ligo.org>, John Veitch <john.veitch@ligo.org>"
60__version__=
"git id %s"%git_version.id
61__date__= git_version.date
66 address=address.split(
',')
68 USER=os.environ[
'USER']
69 HOST=subprocess.check_output([
"hostname",
"-f"]).strip()
71 SUBJECT=
"LALInference result is ready at "+HOST+
"!"
73 fslocation=os.path.abspath(path)
74 webpath=
'posplots.html'
75 if 'public_html' in fslocation:
77 elif 'WWW' in fslocation:
84 (a,b)=fslocation.split(a)
85 webpath=os.path.join(b,webpath)
86 else: webpath=os.path.join(fslocation,
'posplots.html')
88 if 'atlas.aei.uni-hannover.de' in HOST:
89 url=
"https://atlas1.atlas.aei.uni-hannover.de/"
90 elif 'ligo.caltech.edu' in HOST:
91 url=
"https://ldas-jobs.ligo.caltech.edu/"
92 elif 'ligo-wa.caltech.edu' in HOST:
93 url=
"https://ldas-jobs.ligo-wa.caltech.edu/"
94 elif 'ligo-la.caltech.edu' in HOST:
95 url=
"https://ldas-jobs.ligo-la.caltech.edu/"
96 elif 'phys.uwm.edu' in HOST:
97 url=
"https://ldas-jobs.phys.uwm.edu/"
98 elif 'phy.syr.edu' in HOST:
99 url=
"https://sugar-jobs.phy.syr.edu/"
104 TEXT=
"Hi "+USER+
",\nYou have a new parameter estimation result on "+HOST+
".\nYou can view the result at "+url
105 message=
"From: %s\nTo: %s\nSubject: %s\n\n%s"%(FROM,
', '.join(address),SUBJECT,TEXT)
106 server=smtplib.SMTP(SERVER)
107 server.sendmail(FROM,address,message)
113 ligolw.LIGOLWContentHandler.__init__(self,document)
114 self.
tabname=lsctables.SimInspiralTable.tableName
118 if attrs.has_key(
'Name')
and table.Table.TableName(attrs[
'Name'])==self.
tabname:
121 ligolw.LIGOLWContentHandler.startElement(self,name,attrs)
124 ligolw.LIGOLWContentHandler.startElement(self,name,attrs)
126 if self.
intable: ligolw.LIGOLWContentHandler.endElement(self,name)
131 ligolw.LIGOLWContentHandler.__init__(self,document)
132 self.
tabname=lsctables.SimBurstTable.tableName
136 if attrs.has_key(
'Name')
and table.Table.TableName(attrs[
'Name'])==self.
tabname:
139 ligolw.LIGOLWContentHandler.startElement(self,name,attrs)
142 ligolw.LIGOLWContentHandler.startElement(self,name,attrs)
144 if self.
intable: ligolw.LIGOLWContentHandler.endElement(self,name)
149 Pickle/serialize 'obj' into
'fname'.
151 filed=open(fname,'w')
152 pickle.dump(obj,filed)
156 filed=open(fname,
'w')
157 for key,value
in dict.items():
158 filed.write(
"%s %s\n"%(
str(key),
str(value)) )
170 for arg
in parser.rargs:
172 if arg[:2] ==
"--" and len(arg) > 2:
175 if arg[:1] ==
"-" and len(arg) > 1
and not floatable(arg):
179 del parser.rargs[:len(args)]
181 if getattr(parser.values, opt.dest):
182 oldargs = getattr(parser.values, opt.dest)
185 setattr(parser.values, opt.dest, args)
188 out=bppu.htmlChunk(
'div',parent=parent)
190 row=out.insert_row(tab)
192 out.insert_td(row,
str(key))
193 row2=out.insert_row(tab)
194 for val
in d.values():
195 out.insert_td(row2,
str(val))
201 sec=bppu.htmlSection(h5grp.name,htmlElement=parent)
204 if(isinstance(h5grp[group],h5py.Group)):
209 outdir,data,oneDMenu,twoDGreedyMenu,GreedyRes,
210 confidence_levels,twoDplots,
212 injfile=None,eventnum=None,
213 trigfile=None,trignum=None,
216 dievidence=False,boxing=64,difactor=1.0,
220 bayesfactornoise=None,bayesfactorcoherent=None,
224 ns_flag=False,ns_Nlive=None,
226 ss_flag=False,ss_spin_flag=False,
228 li_flag=False,deltaLogL=None,fixedBurnins=None,nDownsample=None,oldMassConvention=False,
236 RconvergenceTests=False,
240 covarianceMatrices=None,
250 This is a demonstration script
for using the functionality/data structures
252 posterior samples generated by the parameter estimation codes
with 1D/2D plots
253 and stats
from the marginal posteriors
for each parameter/set of parameters.
258 if eventnum
is not None and injfile
is None:
259 print(
"You specified an event number but no injection file. Ignoring!")
261 if trignum
is not None and trigfile
is None:
262 print(
"You specified a trigger number but no trigger file. Ignoring!")
264 if trignum
is None and trigfile
is not None:
265 print(
"You specified a trigger file but no trigger number. Taking first entry (the case for GraceDB events).")
269 raise RuntimeError(
'You must specify an input data file')
272 raise RuntimeError(
"You must specify an output directory.")
274 if not os.path.isdir(outdir):
278 peparser=bppu.PEOutputParser(
'fm')
279 commonResultsObj=peparser.parse(data)
281 elif ns_flag
and not ss_flag:
282 peparser=bppu.PEOutputParser(
'ns')
283 commonResultsObj=peparser.parse(data,Nlive=ns_Nlive)
285 elif ss_flag
and not ns_flag:
286 peparser=bppu.PEOutputParser(
'mcmc_burnin')
287 commonResultsObj=peparser.parse(data,spin=ss_spin_flag,deltaLogL=deltaLogL)
290 peparser=bppu.PEOutputParser(
'inf_mcmc')
291 commonResultsObj=peparser.parse(data,outdir=outdir,deltaLogL=deltaLogL,fixedBurnins=fixedBurnins,nDownsample=nDownsample,oldMassConvention=oldMassConvention)
293 elif ss_flag
and ns_flag:
294 raise RuntimeError(
"Undefined input format. Choose only one of:")
296 elif '.xml' in data[0]:
297 peparser=bppu.PEOutputParser(
'xml')
298 commonResultsObj=peparser.parse(data[0])
299 thefile=open(data[0],
'r')
300 votfile=thefile.read()
301 elif '.hdf' in data[0]
or '.h5' in data[0]:
302 peparser = bppu.PEOutputParser(
'hdf5')
303 commonResultsObj = peparser.parse(data[0])
305 peparser=bppu.PEOutputParser(
'common')
306 commonResultsObj=peparser.parse(open(data[0],
'r'),info=[header,
None])
311 ps,samps = commonResultsObj
313 f_refIdx = ps.index(
'f_ref')
314 injFref = unique(samps[:,f_refIdx])
316 print(
"ERROR: Expected f_ref to be constant for all samples. Can't tell which value was injected!")
326 if injfile
and eventnum
is not None:
327 print(
'Looking for event %i in %s\n'%(eventnum,injfile))
329 from igwn_ligolw
import ligolw
330 from igwn_ligolw
import lsctables
331 from igwn_ligolw
import utils
332 xmldoc = utils.load_filename(injfile,contenthandler=LIGOLWContentHandlerExtractSimBurstTable)
335 simtable=lsctables.SimBurstTable.get_table(xmldoc)
337 simtable=lsctables.SimInspiralTable.get_table(xmldoc)
341 injection=simtable[eventnum]
348 if trigfile
is not None and trignum
is not None:
349 triggers = bppu.readCoincXML(trigfile, trignum)
353 if bayesfactornoise
is not None:
354 bfile=open(bayesfactornoise,
'r')
357 if(len(BSN.split())!=1):
360 if bayesfactorcoherent
is not None:
361 bfile=open(bayesfactorcoherent,
'r')
366 if snrfactor
is not None:
367 if not os.path.isfile(snrfactor):
368 print(
"No snr file provided or wrong path to snr file\n")
372 snrfile=open(snrfactor,
'r')
373 snrs=snrfile.readlines()
378 snrstring=snrstring +
" "+
str(snr[0:-1])+
" ,"
379 snrstring=snrstring[0:-1]
383 if got_inspiral_table==1:
384 pos = bppu.Posterior(commonResultsObj,SimInspiralTableEntry=injection,injFref=injFref,SnglInspiralList=triggers)
386 pos = bppu.BurstPosterior(commonResultsObj,SimBurstTableEntry=injection,injFref=injFref,SnglBurstList=triggers)
388 analyticLikelihood =
None
389 if covarianceMatrices
and meanVectors:
390 analyticLikelihood = bppu.AnalyticLikelihood(covarianceMatrices, meanVectors)
393 oneDMenu = analyticLikelihood.names
395 for i
in range(len(oneDMenu)):
396 for j
in range(i+1,len(oneDMenu)):
397 twoDGreedyMenu.append([oneDMenu[i],oneDMenu[j]])
398 twoDplots = twoDGreedyMenu
400 if eventnum
is None and injfile
is not None:
402 from igwn_ligolw
import lsctables
403 from igwn_ligolw
import utils
404 if got_inspiral_table==1:
405 injections = lsctables.SimInspiralTable.get_table(
406 utils.load_filename(injfile)
409 if(len(injections)<1):
411 print(
'Warning: Cannot find injection with end time %f' %(pos[
'time'].mean))
413 print(
"Warning: No 'time' column!")
417 injection = itertools.ifilter(
lambda a: abs(float(a.get_end()) - pos[
'time'].mean) < 0.1, injections).next()
418 pos.set_injection(injection)
420 print(
"Warning: No 'time' column!")
423 if (
'ra' in pos.names
or 'rightascension' in pos.names) \
424 and (
'declination' in pos.names
or 'dec' in pos.names) \
425 and 'time' in pos.names:
426 from lal
import LIGOTimeGPS, TimeDelayFromEarthCenter
428 detMap = {
'H1':
'LHO_4k',
'H2':
'LHO_2k',
'L1':
'LLO_4k',
429 'G1':
'GEO_600',
'V1':
'VIRGO',
'T1':
'TAMA_300'}
430 if 'ra' in pos.names:
432 else: ra_name=
'rightascension'
433 if 'dec' in pos.names:
435 else: dec_name=
'declination'
437 my_ifos=[
'H1',
'L1',
'V1']
441 inj_time=float(injection.get_end(ifo[0]))
442 location=lal.cached_detectors_by_prefix[ifo].location
443 ifo_times[ifo]=array(map(
lambda ra,dec,time: array([time[0]+TimeDelayFromEarthCenter(location,ra[0],dec[0],
LIGOTimeGPS(float(time[0])))]), pos[ra_name].samples,pos[dec_name].samples,pos[
'time'].samples))
444 loc_end_time=bppu.PosteriorOneDPDF(ifo.lower()+
'_end_time',ifo_times[ifo],injected_value=inj_time)
445 pos.append(loc_end_time)
448 if ifo1==ifo2:
continue
449 delay_time=ifo_times[ifo2]-ifo_times[ifo1]
451 inj_delay=float(injection.get_end(ifo2[0])-injection.get_end(ifo1[0]))
454 time_delay=bppu.PosteriorOneDPDF(ifo1.lower()+ifo2.lower()+
'_delay',delay_time,inj_delay)
455 pos.append(time_delay)
459 functions = {
'cos':cos,
'sin':sin,
'exp':exp,
'log':log}
460 for pos_name
in oneDMenu:
461 if pos_name
not in pos.names:
462 for func
in functions.keys():
463 old_pos_name = pos_name.replace(func,
'')
464 if pos_name.find(func)==0
and old_pos_name
in pos.names:
465 print(
"Taking %s of %s ..."% (func,old_pos_name))
466 pos.append_mapping(pos_name,functions[func],old_pos_name)
469 requested_params = set(pos.names).intersection(set(oneDMenu))
470 pos.delete_NaN_entries(requested_params)
473 if analyticLikelihood:
474 dievidence_names = [
'post',
'posterior',
'logl',
'prior',
'likelihood',
'cycle',
'chain']
475 [pos.pop(param)
for param
in pos.names
if param
not in analyticLikelihood.names
and param
not in dievidence_names]
479 print(
"Number of posterior samples: %i"%len(pos))
482 print(
str(pos.means))
485 print(
str(pos.medians))
488 max_pos,max_pos_co=pos.maxL
495 html=bppu.htmlPage(
'Posterior PDFs',css=bppu.__default_css_string,javascript=bppu.__default_javascript_string)
498 html_meta=html.add_section(
'Summary')
499 table=html_meta.tab()
501 row=html_meta.insert_row(table,label=
'thisrow')
502 td=html_meta.insert_td(row,
'',label=
'Samples')
503 SampsStats=html.add_section_to_element(
'Samples',td)
504 SampsStats.p(
'Produced from '+
str(len(pos))+
' posterior samples.')
505 if 'chain' in pos.names:
506 acceptedChains = unique(pos[
'chain'].samples)
507 acceptedChainText =
'%i of %i chains accepted: %i'%(len(acceptedChains),len(data),acceptedChains[0])
508 if len(acceptedChains) > 1:
509 for chain
in acceptedChains[1:]:
510 acceptedChainText +=
', %i'%(chain)
511 SampsStats.p(acceptedChainText)
512 if 'cycle' in pos.names:
513 SampsStats.p(
'Longest chain has '+
str(pos.longest_chain_cycles())+
' cycles.')
514 filenames=
'Samples read from %s'%(data[0])
516 for fname
in data[1:]:
517 filenames+=
', '+
str(fname)
518 SampsStats.p(filenames)
519 td=html_meta.insert_td(row,
'',label=
'SummaryLinks')
520 legend=html.add_section_to_element(
'Sections',td)
523 if '.h5' in data[0]
or '.hdf' in data[0]:
524 html_hdf=html.add_section(
'Metadata',legend=legend)
526 with h5py.File(data[0],
'r')
as h5grp:
531 if bayesfactornoise
is not None:
532 html_model=html.add_section(
'Model selection',legend=legend)
533 html_model.p(
'log Bayes factor ( coherent vs gaussian noise) = %s, Bayes factor=%f'%(BSN,exp(float(BSN))))
534 if bayesfactorcoherent
is not None:
535 html_model.p(
'log Bayes factor ( coherent vs incoherent OR noise ) = %s, Bayes factor=%f'%(BCI,exp(float(BCI))))
538 html_model=html.add_section(
'Direct Integration Evidence',legend=legend)
539 log_ev = log(difactor) + pos.di_evidence(boxing=boxing)
541 evfilename=os.path.join(outdir,
"evidence.dat")
542 evout=open(evfilename,
"w")
545 evout.write(
str(log_ev))
547 print(
"Computing direct integration evidence = %g (log(Evidence) = %g)"%(ev, log_ev))
548 html_model.p(
'Direct integration evidence is %g, or log(Evidence) = %g. (Boxing parameter = %d.)'%(ev,log_ev,boxing))
549 if 'logl' in pos.names:
550 log_ev=pos.harmonic_mean_evidence()
551 html_model.p(
'Compare to harmonic mean evidence of %g (log(Evidence) = %g).'%(exp(log_ev),log_ev))
555 html_model=html.add_section(
'Elliptical Evidence',legend=legend)
556 log_ev = pos.elliptical_subregion_evidence()
558 evfilename=os.path.join(outdir,
'ellevidence.dat')
559 evout=open(evfilename,
'w')
560 evout.write(
str(ev) +
' ' +
str(log_ev))
562 print(
'Computing elliptical region evidence = %g (log(ev) = %g)'%(ev, log_ev))
563 html_model.p(
'Elliptical region evidence is %g, or log(Evidence) = %g.'%(ev, log_ev))
565 if 'logl' in pos.names:
566 log_ev=pos.harmonic_mean_evidence()
567 html_model.p(
'Compare to harmonic mean evidence of %g (log(Evidence = %g))'%(exp(log_ev), log_ev))
569 print(
'Warning: Sample size too small to compute elliptical evidence!')
572 if snrfactor
is not None:
573 html_snr=html.add_section(
'Signal to noise ratio(s)',legend=legend)
574 html_snr.p(
'%s'%snrstring)
578 html_stats=html.add_collapse_section(
'Summary statistics',legend=legend,innertable_id=tabid)
579 html_stats.write(
str(pos))
580 statfilename=os.path.join(outdir,
"summary_statistics.dat")
581 statout=open(statfilename,
"w")
582 statout.write(
"\tmaxP\tmaxL\tstdev\tmean\tmedian\tstacc\tinjection\tvalue\n")
584 for statname,statoned_pos
in pos:
586 statmax_pos,max_i=pos._posMaxL()
587 statmaxL=statoned_pos.samples[max_i][0]
588 statmax_pos,max_j=pos._posMap()
589 statmaxP=statoned_pos.samples[max_j][0]
590 statmean=
str(statoned_pos.mean)
591 statstdev=
str(statoned_pos.stdev)
592 statmedian=
str(squeeze(statoned_pos.median))
593 statstacc=
str(statoned_pos.stacc)
594 statinjval=
str(statoned_pos.injval)
596 statarray=[
str(i)
for i
in [statname,statmaxP,statmaxL,statstdev,statmean,statmedian,statstacc,statinjval]]
597 statout.write(
"\t".join(statarray))
607 sky_injection_cl=
None
610 html_wf=html.add_collapse_section(
'Sky Localization and Waveform',innertable_id=tabid)
612 table=html_wf.tab(idtable=tabid)
613 row=html_wf.insert_row(table,label=
'SkyandWF')
614 skytd=html_wf.insert_td(row,
'',label=
'SkyMap',legend=legend)
615 html_sky=html.add_section_to_element(
'SkyMap',skytd)
617 if skyres
is not None and \
618 (
'ra' in pos.names
and 'dec' in pos.names):
620 if pos[
'dec'].injval
is not None and pos[
'ra'].injval
is not None:
621 inj_position=[pos[
'ra'].injval,pos[
'dec'].injval]
625 hpmap = pos.healpix_map(float(skyres), nest=
True)
626 bppu.plot_sky_map(hpmap, outdir, inj=inj_position, nest=
True)
628 if inj_position
is not None:
629 html_sky.p(
'Injection found at p = %g'%bppu.skymap_inj_pvalue(hpmap, inj_position, nest=
True))
631 html_sky.write(
'<a href="skymap.png" target="_blank"><img src="skymap.png"/></a>')
633 html_sky_write=
'<table border="1" id="statstable"><tr><th>Confidence region</th><th>size (sq. deg)</th></tr>'
635 areas = bppu.skymap_confidence_areas(hpmap, confidence_levels)
636 for cl, area
in zip(confidence_levels, areas):
637 html_sky_write+=
'<tr><td>%g</td><td>%g</td></tr>'%(cl, area)
638 html_sky_write+=(
'</table>')
640 html_sky.write(html_sky_write)
642 html_sky.write(
'<b>No skymap generated!</b>')
644 wfdir=os.path.join(outdir,
'Waveform')
645 if not os.path.isdir(wfdir):
648 wfpointer= bppu.plot_burst_waveform(pos=pos,simburst=injfile,event=eventnum,path=wfdir)
651 wftd=html_wf.insert_td(row,
'',label=
'Waveform',legend=legend)
652 wfsection=html.add_section_to_element(
'Waveforms',wftd)
653 if wfpointer
is not None:
654 wfsection.write(
'<a href="Waveform/WF_DetFrame.png" target="_blank"><img src="Waveform/WF_DetFrame.png"/></a>')
656 wfsection.write(
"<b>No Waveform generated!</b>")
658 wftd=html_wf.insert_td(row,
'',label=
'PSDs',legend=legend)
659 wfsection=html.add_section_to_element(
'PSDs',wftd)
661 if psd_files
is not None:
662 psd_files=list(psd_files.split(
','))
663 psddir=os.path.join(outdir,
'PSDs')
664 if not os.path.isdir(psddir):
667 psd_pointer=bppu.plot_psd(psd_files,outpath=psddir)
671 wfsection.write(
'<a href="PSDs/PSD.png" target="_blank"><img src="PSDs/PSD.png"/></a>')
673 wfsection.write(
"<b>No PSD file found!</b>")
684 tabid=
'onedmargtable'
685 html_ompdf=html.add_collapse_section(
'1D marginal posterior PDFs',legend=legend,innertable_id=tabid)
688 html_ompdf_write=
'<table id="%s"><tr><th>Histogram and Kernel Density Estimate</th><th>Samples used</th><th>Autocorrelation</th></tr>'%tabid
690 html_ompdf_write=
'<table id="%s"><tr><th>Histogram and Kernel Density Estimate</th><th>Samples used</th></tr>'%tabid
692 tabid=
'onedconftable'
693 html_ogci=html.add_collapse_section(
'1D confidence intervals (greedy binning)',legend=legend,innertable_id=tabid)
694 html_ogci_write=
'<table id="%s" border="1"><tr><th/>'%tabid
695 confidence_levels.sort()
696 for cl
in confidence_levels:
697 html_ogci_write+=
'<th>%f</th>'%cl
699 html_ogci_write+=
'<th>Injection Confidence Level</th>'
700 html_ogci_write+=
'<th>Injection Confidence Interval</th>'
701 html_ogci_write+=
'</tr>'
703 onepdfdir=os.path.join(outdir,
'1Dpdf')
704 if not os.path.isdir(onepdfdir):
705 os.makedirs(onepdfdir)
707 sampsdir=os.path.join(outdir,
'1Dsamps')
708 if not os.path.isdir(sampsdir):
709 os.makedirs(sampsdir)
711 if 'chain' in pos.names:
712 data,header=pos.samples()
713 par_index=pos.names.index(
'cycle')
714 chain_index=pos.names.index(
"chain")
715 chains=unique(pos[
"chain"].samples)
716 chainCycles = [sort(data[ data[:,chain_index] == chain, par_index ])
for chain
in chains]
717 chainNcycles = [cycles[-1]-cycles[0]
for cycles
in chainCycles]
718 chainNskips = [cycles[1] - cycles[0]
for cycles
in chainCycles]
719 elif 'cycle' in pos.names:
720 cycles = sort(pos[
'cycle'].samples)
721 Ncycles = cycles[-1]-cycles[0]
722 Nskip = cycles[1]-cycles[0]
724 for par_name
in oneDMenu:
725 par_name=par_name.lower()
727 pos[par_name.lower()]
732 par_bin=GreedyRes[par_name]
734 print(
"Bin size is not set for %s, skipping binning."%par_name)
738 binParams={par_name:par_bin}
740 toppoints,injectionconfidence,reses,injection_area,cl_intervals=bppu.greedy_bin_one_param(pos,binParams,confidence_levels)
745 BCItableline=
'<tr><td>%s</td>'%(par_name)
746 cls=list(reses.keys())
750 BCItableline+=
'<td>%f</td>'%reses[cl]
752 if injection
is not None:
753 if injectionconfidence
is not None and injection_area
is not None:
755 BCItableline+=
'<td>%f</td>'%injectionconfidence
756 BCItableline+=
'<td>%f</td>'%injection_area
759 BCItableline+=
'<td/>'
760 BCItableline+=
'<td/>'
762 BCItableline+=
'</tr>'
765 html_ogci_write+=BCItableline
768 print(
"Generating 1D plot for %s."%par_name)
772 if analyticLikelihood:
773 pdf = analyticLikelihood.pdf(par_name)
774 cdf = analyticLikelihood.cdf(par_name)
776 oneDPDFParams={par_name:50}
777 rbins,plotFig=bppu.plot_one_param_pdf(pos,oneDPDFParams,pdf,cdf,plotkde=
False)
779 figname=par_name+
'.png'
780 oneDplotPath=os.path.join(onepdfdir,figname)
781 plotFig.savefig(oneDplotPath)
782 if(savepdfs): plotFig.savefig(os.path.join(onepdfdir,par_name+
'.pdf'))
786 print(
"r of injected value of %s (bins) = %f"%(par_name, rbins))
789 myfig=plt.figure(figsize=(4,3.5),dpi=200)
790 pos_samps=pos[par_name].samples
791 if not (
"chain" in pos.names):
794 plt.plot(pos_samps,
'k.',linewidth=0.0, markeredgewidth=0,figure=myfig)
795 maxLen=len(pos_samps)
800 data,header=pos.samples()
801 par_index=pos.names.index(par_name)
802 chain_index=pos.names.index(
"chain")
803 chains=unique(pos[
"chain"].samples)
804 chainData=[data[ data[:,chain_index] == chain, par_index ]
for chain
in chains]
805 chainDataRanges=[range(len(cd))
for cd
in chainData]
806 maxLen=
max([len(cd)
for cd
in chainData])
807 for rng, data
in zip(chainDataRanges, chainData):
808 plt.plot(rng, data, marker=
',',linewidth=0.0, markeredgewidth=0,figure=myfig)
809 plt.title(
"Gelman-Rubin R = %g"%(pos.gelman_rubin(par_name)))
817 injpar=pos[par_name].injval
820 if min(pos_samps)<injpar
and max(pos_samps)>injpar:
821 plt.axhline(injpar, color=
'r', linestyle=
'-.')
822 myfig.savefig(os.path.join(sampsdir,figname.replace(
'.png',
'_samps.png')))
823 if(savepdfs): myfig.savefig(os.path.join(sampsdir,figname.replace(
'.png',
'_samps.pdf')))
827 acffig=plt.figure(figsize=(4,3.5),dpi=200)
828 if not (
"chain" in pos.names):
831 (Neff, acl, acf) = bppu.effectiveSampleSize(data, Nskip)
832 lines=plt.plot(acf,
'k,', marker=
',',linewidth=0.0, markeredgewidth=0, figure=acffig)
834 if nDownsample
is None:
835 plt.title(
'Autocorrelation Function')
836 elif 'cycle' in pos.names:
837 last_color = lines[-1].get_color()
838 plt.axvline(acl/Nskip, linestyle=
'-.', color=last_color)
839 plt.title(
'ACL = %i N = %i'%(acl,Neff))
840 except FloatingPointError:
848 for rng, data, Nskip, Ncycles
in zip(chainDataRanges, chainData, chainNskips, chainNcycles):
849 (Neff, acl, acf) = bppu.effectiveSampleSize(data, Nskip)
852 lines=plt.plot(acf,
'k,', marker=
',',linewidth=0.0, markeredgewidth=0, figure=acffig)
854 if nDownsample
is not None:
855 last_color = lines[-1].get_color()
856 plt.axvline(acl/Nskip, linestyle=
'-.', color=last_color)
857 if nDownsample
is None:
858 plt.title(
'Autocorrelation Function')
860 plt.title(
'ACL = %i N = %i'%(
max(acls),Nsamps))
861 except FloatingPointError:
866 acffig.savefig(os.path.join(sampsdir,figname.replace(
'.png',
'_acf.png')))
867 if(savepdfs): acffig.savefig(os.path.join(sampsdir,figname.replace(
'.png',
'_acf.pdf')))
872 acfhtml=
'<td width="30%"><img width="100%" src="1Dsamps/'+figname.replace(
'.png',
'_acf.png')+
'"/></td>'
874 acfhtml=
'<td>ACF generation failed!</td>'
875 html_ompdf_write+=
'<tr><td width="30%"><img width="100%" src="1Dpdf/'+figname+
'"/></td><td width="30%"><img width="100%" src="1Dsamps/'+figname.replace(
'.png',
'_samps.png')+
'"/></td>'+acfhtml+
'</tr>'
877 html_ompdf_write+=
'<tr><td width="30%"><img width="100%" src="1Dpdf/'+figname+
'"/></td><td width="30%"><img width="100%" src="1Dsamps/'+figname.replace(
'.png',
'_samps.png')+
'"/></td></tr>'
880 html_ompdf_write+=
'</table>'
882 html_ompdf.write(html_ompdf_write)
884 html_ogci_write+=
'</table>'
885 html_ogci.write(html_ogci_write)
896 margdir=os.path.join(outdir,
'2Dkde')
897 if not os.path.isdir(margdir):
900 twobinsdir=os.path.join(outdir,
'2Dbins')
901 if not os.path.isdir(twobinsdir):
902 os.makedirs(twobinsdir)
904 greedytwobinsdir=os.path.join(outdir,
'greedy2Dbins')
905 if not os.path.isdir(greedytwobinsdir):
906 os.makedirs(greedytwobinsdir)
911 html_tcig=html.add_collapse_section(
'2D confidence intervals (greedy binning)',legend=legend,innertable_id=tabid)
913 html_tcig_write=
'<table id="%s" border="1"><tr><th/>'%tabid
914 confidence_levels.sort()
915 for cl
in confidence_levels:
916 html_tcig_write+=
'<th>%f</th>'%cl
918 html_tcig_write+=
'<th>Injection Confidence Level</th>'
919 html_tcig_write+=
'<th>Injection Confidence Interval</th>'
920 html_tcig_write+=
'</tr>'
924 twodkdeplots_flag=twodkdeplots
925 if twodkdeplots_flag:
927 html_tcmp=html.add_collapse_section(
'2D Marginal PDFs',legend=legend,innertable_id=tabid)
929 html_tcmp_write=
'<table border="1" id="%s">'%tabid
930 tabid=
'2dgreedytable'
931 html_tgbh=html.add_collapse_section(
'2D Greedy Bin Histograms',legend=legend,innertable_id=tabid)
932 html_tgbh_write=
'<table border="1" id="%s">'%tabid
937 for par1_name,par2_name
in twoDGreedyMenu:
938 par1_name=par1_name.lower()
939 par2_name=par2_name.lower()
941 pos[par1_name.lower()]
946 pos[par2_name.lower()]
952 par1_bin=GreedyRes[par1_name]
954 print(
"Bin size is not set for %s, skipping %s/%s binning."%(par1_name,par1_name,par2_name))
957 par2_bin=GreedyRes[par2_name]
959 print(
"Bin size is not set for %s, skipping %s/%s binning."%(par2_name,par1_name,par2_name))
964 greedy2Params={par1_name:par1_bin,par2_name:par2_bin}
968 toppoints,injection_cl,reses,injection_area=\
969 bppu.greedy_bin_two_param(pos,greedy2Params,confidence_levels)
974 print(
"BCI %s-%s:"%(par1_name,par2_name))
978 BCItableline=
'<tr><td>%s-%s</td>'%(par1_name,par2_name)
979 cls=list(reses.keys())
983 BCItableline+=
'<td>%f</td>'%reses[cl]
985 if injection
is not None:
986 if injection_cl
is not None:
987 BCItableline+=
'<td>%f</td>'%injection_cl
988 BCItableline+=
'<td>'+
str(injection_area)+
'</td>'
991 BCItableline+=
'<td/>'
992 BCItableline+=
'<td/>'
994 BCItableline+=
'</tr>'
997 html_tcig_write+=BCItableline
1004 greedy2ContourPlot=bppu.plot_two_param_kde_greedy_levels({
'Result':pos},greedy2Params,[0.67,0.9,0.95],{
'Result':
'k'})
1005 greedy2contourpath=os.path.join(greedytwobinsdir,
'%s-%s_greedy2contour.png'%(par1_name,par2_name))
1006 greedy2ContourPlot.savefig(greedy2contourpath)
1007 if(savepdfs): greedy2ContourPlot.savefig(greedy2contourpath.replace(
'.png',
'.pdf'))
1008 plt.close(greedy2ContourPlot)
1010 greedy2HistFig=bppu.plot_two_param_greedy_bins_hist(pos,greedy2Params,confidence_levels)
1011 greedy2histpath=os.path.join(greedytwobinsdir,
'%s-%s_greedy2.png'%(par1_name,par2_name))
1012 greedy2HistFig.savefig(greedy2histpath)
1013 if(savepdfs): greedy2HistFig.savefig(greedy2histpath.replace(
'.png',
'.pdf'))
1014 plt.close(greedy2HistFig)
1017 greedyFile = open(os.path.join(twobinsdir,
'%s_%s_greedy_stats.txt'%(par1_name,par2_name)),
'w')
1021 greedyFile.write(
"%lf %lf\n"%(cl,reses[cl]))
1024 if [par1_name,par2_name]
in twoDplots
or [par2_name,par1_name]
in twoDplots :
1025 print(
'Generating %s-%s greedy hist plot'%(par1_name,par2_name))
1027 par1_pos=pos[par1_name].samples
1028 par2_pos=pos[par2_name].samples
1030 if (
size(unique(par1_pos))<2
or size(unique(par2_pos))<2):
1032 head,figname=os.path.split(greedy2histpath)
1033 head,figname_c=os.path.split(greedy2contourpath)
1035 html_tgbh_write+=
'<tr>'
1036 html_tgbh_write+=
'<td width="30%"><img width="100%" src="greedy2Dbins/'+figname+
'"/>[<a href="greedy2Dbins/'+figname_c+
'">contour</a>]</td>'
1039 html_tgbh_write+=
'</tr>'
1044 if twodkdeplots_flag
is True:
1045 if [par1_name,par2_name]
in twoDplots
or [par2_name,par1_name]
in twoDplots :
1046 print(
'Generating %s-%s plot'%(par1_name,par2_name))
1048 par1_pos=pos[par1_name].samples
1049 par2_pos=pos[par2_name].samples
1051 if (
size(unique(par1_pos))<2
or size(unique(par2_pos))<2):
1054 plot2DkdeParams={par1_name:50,par2_name:50}
1055 myfig=bppu.plot_two_param_kde(pos,plot2DkdeParams)
1057 figname=par1_name+
'-'+par2_name+
'_2Dkernel.png'
1058 twoDKdePath=os.path.join(margdir,figname)
1061 html_tcmp_write+=
'<tr>'
1062 html_tcmp_write+=
'<td width="30%"><img width="100%" src="2Dkde/'+figname+
'"/></td>'
1065 html_tcmp_write+=
'</tr>'
1068 myfig.savefig(twoDKdePath)
1069 if(savepdfs): myfig.savefig(twoDKdePath.replace(
'.png',
'.pdf'))
1073 html_tcig_write+=
'</table>'
1074 html_tcig.write(html_tcig_write)
1076 if twodkdeplots_flag
is True:
1079 html_tcmp_write+=
'<td/>'
1083 html_tcmp_write+=
'</tr>'
1084 html_tcmp_write+=
'</table>'
1085 html_tcmp.write(html_tcmp_write)
1087 html_tcmp.a(
"2Dkde/",
'All 2D marginal PDFs (kde)')
1090 while row_count_gb!=0:
1091 html_tgbh_write+=
'<td/>'
1095 html_tgbh_write+=
'</tr>'
1096 html_tgbh_write+=
'</table>'
1097 html_tgbh.write(html_tgbh_write)
1099 html_tgbh.a(
"greedy2Dbins/",
'All 2D Greedy Bin Histograms')
1101 if RconvergenceTests
is True:
1102 convergenceResults=bppu.convergenceTests(pos,gelman=
False)
1104 if convergenceResults
is not None:
1106 html_conv_test=html.add_collapse_section(
'Convergence tests',legend=legend,innertable_id=tabid)
1108 for test,test_data
in convergenceResults.items():
1112 html_conv_test.h3(test)
1114 html_conv_table_rows={}
1115 html_conv_table_header=
''
1116 for chain,chain_data
in test_data.items():
1117 html_conv_table_header+=
'<th>%s</th>'%chain
1120 for data
in chain_data:
1123 html_conv_table_rows[data[0]]+=
'<td>'+data[1]+
'</td>'
1125 html_conv_table_rows[data[0]]=
'<td>'+data[1]+
'</td>'
1127 html_conv_table=
'<table id="%s"><tr><th>Chain</th>'%tabid+html_conv_table_header+
'</tr>'
1128 for row_name,row
in html_conv_table_rows.items():
1129 html_conv_table+=
'<tr><td>%s</td>%s</tr>'%(row_name,row)
1130 html_conv_table+=
'</table>'
1131 html_conv_test.write(html_conv_table)
1132 if data_found
is False:
1133 html_conv_test.p(
'No convergence diagnostics generated!')
1136 html_stats_cov=html.add_collapse_section(
'Covariance matrix',legend=legend,innertable_id=tabid)
1137 pos_samples,table_header_string=pos.samples()
1139 cov_matrix=cov(pos_samples,rowvar=0,bias=1)
1142 table_header_list=table_header_string.split()
1144 cov_table_string=
'<table border="1" id="%s"><tr><th/>'%tabid
1145 for header
in table_header_list:
1146 cov_table_string+=
'<th>%s</th>'%header
1147 cov_table_string+=
'</tr>'
1149 cov_column_list=hsplit(cov_matrix,cov_matrix.shape[1])
1151 for cov_column,cov_column_name
in zip(cov_column_list,table_header_list):
1152 cov_table_string+=
'<tr><th>%s</th>'%cov_column_name
1153 for cov_column_element
in cov_column:
1154 cov_table_string+=
'<td>%.3e</td>'%(cov_column_element[0])
1155 cov_table_string+=
'</tr>'
1156 cov_table_string+=
'</table>'
1157 html_stats_cov.write(cov_table_string)
1159 html_footer=html.add_section(
'')
1160 html_footer.p(
'Produced using cbcBayesPostProc.py at '+strftime(
"%Y-%m-%d %H:%M:%S")+
' .')
1163 for arg
in sys.argv:
1166 html_footer.p(
'Command line: %s'%cc_args)
1167 html_footer.p(git_version.verbose_msg)
1170 resultspage=open(os.path.join(outdir,
'posplots.html'),
'w')
1171 resultspage.write(
str(html))
1174 posfilename=os.path.join(outdir,
'posterior_samples.dat')
1175 pos.write_to_file(posfilename)
1180USAGE=
'''%prog [options] datafile.dat [datafile2.dat ...]
1181Generate a web page displaying results of parameter estimation based on the contents
1182of one or more datafiles containing samples from one of the bayesian algorithms (MCMC, nested sampling).
1183Options specify which extra statistics to compute and allow specification of additional information.
1186if __name__==
'__main__':
1188 from optparse
import OptionParser
1189 parser=OptionParser(USAGE)
1190 parser.add_option(
"-o",
"--outpath", dest=
"outpath",help=
"make page and plots in DIR", metavar=
"DIR")
1191 parser.add_option(
"-d",
"--data",dest=
"data",action=
"callback",callback=multipleFileCB,help=
"datafile")
1193 parser.add_option(
"-i",
"--inj",dest=
"injfile",help=
"SimBurst injection file",metavar=
"INJ.XML",default=
None)
1194 parser.add_option(
"-t",
"--trig",dest=
"trigfile",help=
"Coinc XML file",metavar=
"COINC.XML",default=
None)
1195 parser.add_option(
"--skyres",dest=
"skyres",help=
"Sky resolution to use to calculate sky box size",default=
None)
1196 parser.add_option(
"--eventnum",dest=
"eventnum",action=
"store",default=
None,help=
"event number in SimInspiral file of this signal",type=
"int",metavar=
"NUM")
1197 parser.add_option(
"--trignum",dest=
"trignum",action=
"store",default=
None,help=
"trigger number in CoincTable",type=
"int",metavar=
"NUM")
1198 parser.add_option(
"--bsn",action=
"store",default=
None,help=
"Optional file containing the bayes factor signal against noise",type=
"string")
1199 parser.add_option(
"--bci",action=
"store",default=
None,help=
"Optional file containing the bayes factor coherent signal model against incoherent signal model.",type=
"string")
1200 parser.add_option(
"--snr",action=
"store",default=
None,help=
"Optional file containing the SNRs of the signal in each IFO",type=
"string")
1201 parser.add_option(
"--dievidence",action=
"store_true",default=
False,help=
"Calculate the direct integration evidence for the posterior samples")
1202 parser.add_option(
"--boxing",action=
"store",default=64,help=
"Boxing parameter for the direct integration evidence calculation",type=
"int",dest=
"boxing")
1203 parser.add_option(
"--evidenceFactor",action=
"store",default=1.0,help=
"Overall factor (normalization) to apply to evidence",type=
"float",dest=
"difactor",metavar=
"FACTOR")
1205 parser.add_option(
'--ellipticEvidence', action=
'store_true', default=
False,help=
'Estimate the evidence by fitting ellipse to highest-posterior points.', dest=
'ellevidence')
1207 parser.add_option(
"--no2D",action=
"store_true",default=
False,help=
"Skip 2-D plotting.")
1208 parser.add_option(
"--header",action=
"store",default=
None,help=
"Optional file containing the header line for posterior samples",type=
"string")
1210 parser.add_option(
"--ns",action=
"store_true",default=
False,help=
"(inspnest) Parse input as if it was output from parallel nested sampling runs.")
1211 parser.add_option(
"--Nlive",action=
"store",default=
None,help=
"(inspnest) Number of live points used in each parallel nested sampling run.",type=
"int")
1212 parser.add_option(
"--xflag",action=
"store_true",default=
False,help=
"(inspnest) Convert x to iota.")
1214 parser.add_option(
"--ss",action=
"store_true",default=
False,help=
"(SPINspiral) Parse input as if it was output from SPINspiral.")
1215 parser.add_option(
"--spin",action=
"store_true",default=
False,help=
"(SPINspiral) Specify spin run (15 parameters). ")
1217 parser.add_option(
"--lalinfmcmc",action=
"store_true",default=
False,help=
"(LALInferenceMCMC) Parse input from LALInferenceMCMC.")
1218 parser.add_option(
"--downsample",action=
"store",default=
None,help=
"(LALInferenceMCMC) approximate number of samples to record in the posterior",type=
"int")
1219 parser.add_option(
"--deltaLogL",action=
"store",default=
None,help=
"(LALInferenceMCMC) Difference in logL to use for convergence test.",type=
"float")
1220 parser.add_option(
"--fixedBurnin",dest=
"fixedBurnin",action=
"callback",callback=multipleFileCB,help=
"(LALInferenceMCMC) Fixed number of iteration for burnin.")
1221 parser.add_option(
"--oldMassConvention",action=
"store_true",default=
False,help=
"(LALInferenceMCMC) if activated, m2 > m1; otherwise m1 > m2 in PTMCMC.output.*.00")
1223 parser.add_option(
"--fm",action=
"store_true",default=
False,help=
"(followupMCMC) Parse input as if it was output from followupMCMC.")
1225 parser.add_option(
"--no-acf", action=
"store_true", default=
False, dest=
"noacf")
1227 parser.add_option(
"--twodkdeplots", action=
"store_true", default=
False, dest=
"twodkdeplots")
1229 parser.add_option(
"--RconvergenceTests", action=
"store_true", default=
False, dest=
"RconvergenceTests")
1230 parser.add_option(
"--nopdfs",action=
"store_false",default=
True,dest=
"nopdfs")
1231 parser.add_option(
"-c",
"--covarianceMatrix",dest=
"covarianceMatrices",action=
"append",default=
None,help=
"CSV file containing covariance (must give accompanying mean vector CSV. Can add more than one matrix.")
1232 parser.add_option(
"-m",
"--meanVectors",dest=
"meanVectors",action=
"append",default=
None,help=
"Comma separated list of locations of the multivariate gaussian described by the correlation matrix. First line must be list of params in the order used for the covariance matrix. Provide one list per covariance matrix.")
1233 parser.add_option(
"--email",action=
"store",default=
None,type=
"string",metavar=
"user@ligo.org",help=
"Send an e-mail to the given address with a link to the finished page.")
1234 parser.add_option(
"--stats_only",action=
"store_true",default=
False,dest=
"stats_only")
1235 parser.add_option(
"--archive",action=
"store",default=
None,type=
"string",metavar=
"results.tar.gz",help=
"Create the given tarball with all results")
1236 parser.add_option(
"--psdfiles",action=
"store",default=
None,type=
"string",metavar=
"H1,L1,V1",help=
"comma separater list of ASCII files with PSDs, one per IFO")
1237 (opts,args)=parser.parse_args()
1241 datafiles=datafiles+args
1243 datafiles=datafiles + opts.data
1245 if opts.fixedBurnin:
1246 fixedBurnins = [
int(fixedBurnin)
for fixedBurnin
in opts.fixedBurnin]
1249 if opts.archive==
'None':
1253 polParams=[
'psi',
'polarisation',
'polarization']
1254 skyParams=[
'ra',
'rightascension',
'declination',
'dec']
1256 ellParams=[
'alpha',
'polar_eccentricity',
'polar_angle']
1257 burstParams=[
'frequency',
'loghrss',
'quality',
'hrss',
'duration']
1258 phaseParams=[
'phase',
'phi_orb']
1262 statsParams=[
'logl']
1263 calibParams=[
'calpha_l1',
'calpha_h1',
'calamp_l1',
'calamp_h1']
1264 oneDMenu=polParams + skyParams + timeParams + statsParams+burstParams+ellParams+phaseParams+calibParams
1266 ifos_menu=[
'h1',
'l1',
'v1']
1267 from itertools
import combinations
1268 for ifo1,ifo2
in combinations(ifos_menu,2):
1269 oneDMenu.append(ifo1+ifo2+
'_delay')
1274 for b1,b2
in combinations(burstParams,2):
1275 twoDGreedyMenu.append([b1,b2])
1283 twoDGreedyMenu.append([
'phi_orb',
'psi'])
1284 twoDGreedyMenu.append([
'alpha',
'psi'])
1285 twoDGreedyMenu.append([
'phi_orb',
'alpha'])
1286 twoDGreedyMenu.append([
'loghrss',
'psi'])
1287 twoDGreedyMenu.append([
'alpha',
'loghrss'])
1288 for i
in calibParams[2:]:
1289 twoDGreedyMenu.append([i,
'loghrss'])
1290 twoDGreedyMenu.append([
'calpha_h1',
'calpha_l1'])
1291 twoDGreedyMenu.append([
'calamp_h1',
'calamp_l1'])
1295 greedyBinSizes={
'time':1e-4,
'ra':0.05,
'dec':0.05,
'polarisation':0.04,
'rightascension':0.05,
'declination':0.05,
'loghrss':0.01,
'frequency':0.5,
'quality':0.05,
'phase':0.1,
'phi_orb':0.1,
'psi':0.04,
'polarization':0.04,
'alpha':0.01,
'duration':0.0001,
'calamp_l1':0.01,
'calamp_h1':0.01,
'calpha_h1':0.01,
'calpha_l1':0.01,
'polar_eccentricity':0.01}
1305 for loglname
in [
'logl',
'deltalogl',
'deltaloglh1',
'deltaloglv1',
'deltalogll1',
'logll1',
'loglh1',
'loglv1']:
1306 greedyBinSizes[loglname]=0.1
1307 confidenceLevels=[0.67,0.9,0.95,0.99]
1310 twoDplots=twoDGreedyMenu
1312 opts.outpath,datafiles,oneDMenu,twoDGreedyMenu,
1313 greedyBinSizes,confidenceLevels,twoDplots,
1315 injfile=opts.injfile,eventnum=opts.eventnum,
1316 trigfile=opts.trigfile,trignum=opts.trignum,
1319 dievidence=opts.dievidence,boxing=opts.boxing,difactor=opts.difactor,
1321 ellevidence=opts.ellevidence,
1323 bayesfactornoise=opts.bsn,bayesfactorcoherent=opts.bci,
1327 ns_flag=opts.ns,ns_Nlive=opts.Nlive,
1329 ss_flag=opts.ss,ss_spin_flag=opts.spin,
1331 li_flag=opts.lalinfmcmc,deltaLogL=opts.deltaLogL,fixedBurnins=fixedBurnins,nDownsample=opts.downsample,oldMassConvention=opts.oldMassConvention,
1340 RconvergenceTests=opts.RconvergenceTests,
1342 savepdfs=opts.nopdfs,
1344 covarianceMatrices=opts.covarianceMatrices,
1346 meanVectors=opts.meanVectors,
1350 psd_files=opts.psdfiles,
1351 statsonly=opts.stats_only
1353 if opts.archive
is not None:
1355 subprocess.call([
"tar",
"cvzf",opts.archive,opts.outpath])
1363 print(
'Unable to send notification email')
def __init__(self, document)
def startElement(self, name, attrs)
def endElement(self, name)
def __init__(self, document)
def endElement(self, name)
def startElement(self, name, attrs)
def oneD_dict_to_file(dict, fname)
def cbcBayesBurstPostProc(outdir, data, oneDMenu, twoDGreedyMenu, GreedyRes, confidence_levels, twoDplots, injfile=None, eventnum=None, trigfile=None, trignum=None, skyres=None, dievidence=False, boxing=64, difactor=1.0, ellevidence=False, bayesfactornoise=None, bayesfactorcoherent=None, snrfactor=None, ns_flag=False, ns_Nlive=None, ss_flag=False, ss_spin_flag=False, li_flag=False, deltaLogL=None, fixedBurnins=None, nDownsample=None, oldMassConvention=False, fm_flag=False, noacf=False, twodkdeplots=False, RconvergenceTests=False, savepdfs=True, covarianceMatrices=None, meanVectors=None, header=None, psd_files=None, statsonly=False)
This is a demonstration script for using the functionality/data structures contained in lalinference....
def dict2html(d, parent=None)
def extract_hdf5_metadata(h5grp, parent=None)
def email_notify(address, path)
def pickle_to_file(obj, fname)
Pickle/serialize 'obj' into 'fname'.