6from lal
import pipeline
7import igwn_segments
as segments
8from igwn_ligolw
import lsctables
9from igwn_ligolw
import utils
as ligolw_utils
14from math
import floor,ceil,log,pow
17from itertools
import permutations
21from functools
import reduce
23 from configparser
import NoOptionError, NoSectionError
25 from ConfigParser
import NoOptionError, NoSectionError
33 """Return list of segments whose data quality is good enough for PE. The data
34 quality is examined
with statevector
in frame files. If frame files do
not
35 exist,
return empty list.
41 state_vector_channel: string
43 List of bits. This function extracts the data taken when all of the
44 bits
in this list are
"active" assuming such data
is good enough
for
46 gpsstart, gpsend: float
50 from glue.lal
import Cache
51 from gwdatafind
import find_urls
53 import gwpy.timeseries
56 print(
'Unable to import necessary modules. Querying science segments not possible. Please try installing gwdatafind and gwpy')
59 datacache = Cache.from_urls(find_urls(ifo[0], frametype, gpsstart, gpsend))
61 return gwpy.segments.SegmentList([])
62 state = gwpy.timeseries.StateVector.read(
63 datacache, state_vector_channel, start=gpsstart, end=gpsend,
66 if not np.issubdtype(state.dtype, np.unsignedinteger):
75 flags = state.to_dqflags()
77 segments = flags[bits[0]].active
79 segments -= ~flags[bit].active
84 Try to work out the web address of a given path
87 USER=os.environ[
'USER']
89 if 'public_html' in fslocation:
91 elif 'WWW' in fslocation:
93 elif 'www_html' in fslocation:
98 (a,b)=fslocation.split(k)
99 webpath=os.path.join(
'~%s'%(USER),b)
102 (c,d)=fslocation.split(USER,1)
103 for k
in [
'public_html',
'WWW',
'www_html']:
104 trypath=c+os.environ[
'USER']+
'/'+k+d
106 if os.path.realpath(trypath)==os.path.normpath(fslocation):
108 webpath=os.path.join(
'~%s'%(USER),d)
115 url=
"https://atlas1.atlas.aei.uni-hannover.de/"
116 elif 'ligo-wa' in HOST:
117 url=
"https://ldas-jobs.ligo-wa.caltech.edu/"
118 elif 'ligo-la' in HOST:
119 url=
"https://ldas-jobs.ligo-la.caltech.edu/"
120 elif 'cit' in HOST
or 'caltech' in HOST:
121 url=
"https://ldas-jobs.ligo.caltech.edu/"
122 elif 'uwm' in HOST
or 'nemo' in HOST:
123 url=
"https://ldas-jobs.phys.uwm.edu/"
124 elif 'phy.syr.edu' in HOST:
125 url=
"https://sugar-jobs.phy.syr.edu/"
126 elif 'arcca.cf.ac.uk' in HOST:
127 url=
"https://geo2.arcca.cf.ac.uk/"
128 elif 'vulcan' in HOST:
129 url=
"https://galahad.aei.mpg.de/"
132 url=
"http://%s/"%(HOST)
140 Represents a unique event to run on
142 new_id=itertools.count()
143 def __init__(self,trig_time=None,SimInspiral=None,SimBurst=None,SnglInspiral=None,CoincInspiral=None,event_id=None,timeslide_dict=None,GID=None,ifos=None, duration=None,srate=None,trigSNR=None,fhigh=None,horizon_distance=None):
148 if timeslide_dict
is None:
163 if event_id
is not None:
179 if self.
GID is not None:
184 Can set event-specific options for the engine nodes
185 using this option, e.g. ev.set_engine_option(
'time-min',
'1083759273')
189dummyCacheNames=['LALLIGO',
'LALVirgo',
'LALAdLIGO',
'LALAdVirgo']
192 coinc_xml_obj, psd_dict=None, gid=None, threshold_snr=None, flow=20.0,
193 roq=False, use_gracedbpsd=False
195 """This function calculates seglen, fhigh, srate and horizon distance from
196 coinc.xml and psd.xml.gz
from GraceDB
and create list of Events
as input of
197 pipeline. This function
is based on Chris Pankow
's script.
201 coinc_xml_obj: ligolw.ligolw.Document
202 file object of coinc.xml
203 psd_dict: dictionary of REAL8FrequencySeries
206 snr threshold for detection
208 lower frequecy cutoff
for overlap calculation
210 Whether the run uses ROQ
or not
212 Whether the gracedb PSD
is used
or not in PE
216 from lalsimulation
import SimInspiralChirpTimeBound, IMRPhenomDGetPeakFreq
218 from gstlal
import reference_psd
222 from gwpy.frequencyseries
import FrequencySeries
223 from gwpy.astro
import inspiral_range
225 inspiral_range =
None
226 coinc_events = lsctables.CoincInspiralTable.get_table(coinc_xml_obj)
227 sngl_event_idx = dict((row.event_id, row)
for row
in lsctables.SnglInspiralTable.get_table(coinc_xml_obj))
228 ifos = sorted(coinc_events[0].instruments)
229 trigSNR = coinc_events[0].snr
233 if psd_dict
is not None and use_gracedbpsd:
234 psd = list(psd_dict.values())[0]
236 2.0, ceil(log(psd.f0 + psd.deltaF * (psd.data.length - 1), 2))
238 coinc_map = lsctables.CoincMapTable.get_table(coinc_xml_obj)
239 for coinc
in coinc_events:
240 these_sngls = [sngl_event_idx[c.event_id]
for c
in coinc_map
if c.coinc_event_id == coinc.coinc_event_id]
244 for e
in these_sngls:
246 chirplen = SimInspiralChirpTimeBound(flow, e.mass1 * lal.MSUN_SI, e.mass2 * lal.MSUN_SI, 0.0, 0.0)
247 fstop = IMRPhenomDGetPeakFreq(e.mass1, e.mass2, 0.0, 0.0)
248 dur.append(pow(2.0, ceil( log(
max(8.0, chirplen + 2.0), 2) ) ) )
249 srate.append(pow(2.0, ceil( log(fstop, 2) ) ) * 2)
251 if threshold_snr
is not None:
252 if e.eff_distance
is not None and not math.isnan(e.eff_distance):
253 if e.snr > threshold_snr:
254 horizon_distance.append(e.eff_distance * e.snr / threshold_snr)
256 horizon_distance.append(2 * e.eff_distance)
258 if psd_dict
is not None:
261 psd = psd_dict[e.ifo]
265 fstop = IMRPhenomDGetPeakFreq(e.mass1, e.mass2, 0.0, 0.0)
269 if reference_psd
is not None:
270 HorizonDistanceObj = reference_psd.HorizonDistance(
271 f_min = flow, f_max = fstop, delta_f = 1.0 / 32.0,
272 m1 = e.mass1, m2 = e.mass2
274 horizon_distance.append(
275 HorizonDistanceObj(psd, snr = threshold_snr)[0]
279 elif inspiral_range
is not None:
280 gwpy_psd = FrequencySeries(
281 psd.data.data, f0 = psd.f0, df = psd.deltaF
284 horizon_distance.append(
286 gwpy_psd, threshold_snr, e.mass1, e.mass2,
296 if max(srate)<srate_psdfile:
299 srate = srate_psdfile
300 if psd_dict
is not None and use_gracedbpsd:
301 fhigh = srate_psdfile/2.0 * 0.95
308 horizon_distance =
max(horizon_distance)
if len(horizon_distance) > 0
else None
309 ev=
Event(CoincInspiral=coinc, GID=gid, ifos = ifos, duration = duration, srate = srate,
310 trigSNR = trigSNR, fhigh = fhigh, horizon_distance=horizon_distance)
313 print(
"Found %d coinc events in table." % len(coinc_events))
318 Open the connection to the pipedown database
320 if not os.access(database_filename,os.R_OK):
321 raise Exception(
'Unable to open input file: %s'%(database_filename))
322 from igwn_ligolw
import dbtables
324 working_filename=dbtables.get_connection_filename(database_filename,tmp_path=tmp_space)
325 connection = sqlite3.connect(working_filename)
327 dbtables.set_temp_store_directory(connection,tmp_space)
329 return (connection,working_filename)
331def get_zerolag_lloid(database_connection, dumpfile=None, gpsstart=None, gpsend=None, max_cfar=-1, min_cfar=-1):
333 Returns a list of Event objects
334 from pipedown data base. Can dump some stats to dumpfile
if given,
335 and filter by gpsstart
and gpsend to reduce the nunmber
or specify
336 max_cfar to select by combined FAR
339 if gpsstart
is not None: gpsstart=float(gpsstart)
340 if gpsend
is not None: gpsend=float(gpsend)
342 get_coincs =
"SELECT sngl_inspiral.end_time+sngl_inspiral.end_time_ns*1e-9,sngl_inspiral.ifo,coinc_event.coinc_event_id,sngl_inspiral.snr,sngl_inspiral.chisq,coinc_inspiral.combined_far \
343 FROM sngl_inspiral join coinc_event_map on (coinc_event_map.table_name=='sngl_inspiral' and coinc_event_map.event_id ==\
344 sngl_inspiral.event_id) join coinc_event on (coinc_event.coinc_event_id==coinc_event_map.coinc_event_id) \
345 join coinc_inspiral on (coinc_event.coinc_event_id==coinc_inspiral.coinc_event_id) \
346 WHERE coinc_event.time_slide_id=='time_slide:time_slide_id:1'\
348 if gpsstart
is not None:
349 get_coincs=get_coincs+
' and coinc_inspiral.end_time+coinc_inspiral.end_time_ns*1.0e-9 > %f'%(gpsstart)
350 if gpsend
is not None:
351 get_coincs=get_coincs+
' and coinc_inspiral.end_time+coinc_inspiral.end_time_ns*1.0e-9 < %f'%(gpsend)
353 get_coincs=get_coincs+
' and coinc_inspiral.combined_far < %f'%(max_cfar)
355 get_coincs=get_coincs+
' and coinc_inspiral.combined_far > %f'%(min_cfar)
356 db_out=database_connection.cursor().execute(get_coincs)
358 for (sngl_time, ifo, coinc_id, snr, chisq, cfar)
in db_out:
359 coinc_id=
int(coinc_id.split(
":")[-1])
360 if not coinc_id
in output.keys():
361 output[coinc_id]=
Event(trig_time=sngl_time,timeslide_dict={},event_id=
int(coinc_id))
363 output[coinc_id].timeslides[ifo]=0
364 output[coinc_id].ifos.append(ifo)
365 extra[coinc_id][ifo]={
'snr':snr,
'chisq':chisq,
'cfar':cfar}
366 if dumpfile
is not None:
367 fh=open(dumpfile,
'w')
368 for co
in output.keys():
369 for ifo
in output[co].ifos:
370 fh.write(
'%s %s %s %s %s %s %s\n'%(
str(co),ifo,
str(output[co].trig_time),
str(output[co].timeslides[ifo]),
str(extra[co][ifo][
'snr']),
str(extra[co][ifo][
'chisq']),
str(extra[co][ifo][
'cfar'])))
372 return output.values()
374def get_zerolag_pipedown(database_connection, dumpfile=None, gpsstart=None, gpsend=None, max_cfar=-1, min_cfar=-1):
376 Returns a list of Event objects
377 from pipedown data base. Can dump some stats to dumpfile
if given,
378 and filter by gpsstart
and gpsend to reduce the nunmber
or specify
379 max_cfar to select by combined FAR
382 if gpsstart
is not None: gpsstart=float(gpsstart)
383 if gpsend
is not None: gpsend=float(gpsend)
385 get_coincs =
"SELECT sngl_inspiral.end_time+sngl_inspiral.end_time_ns*1e-9,sngl_inspiral.ifo,coinc_event.coinc_event_id,sngl_inspiral.snr,sngl_inspiral.chisq,coinc_inspiral.combined_far \
386 FROM sngl_inspiral join coinc_event_map on (coinc_event_map.table_name=='sngl_inspiral' and coinc_event_map.event_id ==\
387 sngl_inspiral.event_id) join coinc_event on (coinc_event.coinc_event_id==coinc_event_map.coinc_event_id) \
388 join coinc_inspiral on (coinc_event.coinc_event_id==coinc_inspiral.coinc_event_id) \
389 WHERE coinc_event.time_slide_id=='time_slide:time_slide_id:10049'\
391 if gpsstart
is not None:
392 get_coincs=get_coincs+
' and coinc_inspiral.end_time+coinc_inspiral.end_time_ns*1.0e-9 > %f'%(gpsstart)
393 if gpsend
is not None:
394 get_coincs=get_coincs+
' and coinc_inspiral.end_time+coinc_inspiral.end_time_ns*1.0e-9 < %f'%(gpsend)
396 get_coincs=get_coincs+
' and coinc_inspiral.combined_far < %f'%(max_cfar)
398 get_coincs=get_coincs+
' and coinc_inspiral.combined_far > %f'%(min_cfar)
399 db_out=database_connection.cursor().execute(get_coincs)
401 for (sngl_time, ifo, coinc_id, snr, chisq, cfar)
in db_out:
402 coinc_id=
int(coinc_id.split(
":")[-1])
403 if not coinc_id
in output.keys():
404 output[coinc_id]=
Event(trig_time=sngl_time,timeslide_dict={},event_id=
int(coinc_id))
406 output[coinc_id].timeslides[ifo]=0
407 output[coinc_id].ifos.append(ifo)
408 extra[coinc_id][ifo]={
'snr':snr,
'chisq':chisq,
'cfar':cfar}
409 if dumpfile
is not None:
410 fh=open(dumpfile,
'w')
411 for co
in output.keys():
412 for ifo
in output[co].ifos:
413 fh.write(
'%s %s %s %s %s %s %s\n'%(
str(co),ifo,
str(output[co].trig_time),
str(output[co].timeslides[ifo]),
str(extra[co][ifo][
'snr']),
str(extra[co][ifo][
'chisq']),
str(extra[co][ifo][
'cfar'])))
415 return output.values()
419 Returns a list of Event objects
420 with times
and timeslide offsets
423 if gpsstart
is not None: gpsstart=float(gpsstart)
424 if gpsend
is not None: gpsend=float(gpsend)
426 sql_seg_query=
"SELECT search_summary.out_start_time, search_summary.out_end_time from search_summary join process on process.process_id==search_summary.process_id where process.program=='thinca'"
427 db_out = database_connection.cursor().execute(sql_seg_query)
429 if d
not in db_segments:
430 db_segments.append(d)
431 seglist=segments.segmentlist([segments.segment(d[0],d[1])
for d
in db_segments])
434 get_coincs=
"SELECT sngl_inspiral.end_time+sngl_inspiral.end_time_ns*1e-9,time_slide.offset,sngl_inspiral.ifo,coinc_event.coinc_event_id,sngl_inspiral.snr,sngl_inspiral.chisq,coinc_inspiral.combined_far \
435 FROM sngl_inspiral join coinc_event_map on (coinc_event_map.table_name == 'sngl_inspiral' and coinc_event_map.event_id \
436 == sngl_inspiral.event_id) join coinc_event on (coinc_event.coinc_event_id==coinc_event_map.coinc_event_id) join time_slide\
437 on (time_slide.time_slide_id == coinc_event.time_slide_id and time_slide.instrument==sngl_inspiral.ifo)\
438 join coinc_inspiral on (coinc_inspiral.coinc_event_id==coinc_event.coinc_event_id) where coinc_event.time_slide_id!='time_slide:time_slide_id:10049'"
440 if gpsstart
is not None:
441 get_coincs=get_coincs+ joinstr +
' coinc_inspiral.end_time+coinc_inspiral.end_time_ns*1e-9 > %f'%(gpsstart)
442 if gpsend
is not None:
443 get_coincs=get_coincs+ joinstr+
' coinc_inspiral.end_time+coinc_inspiral.end_time_ns*1e-9 <%f'%(gpsend)
445 get_coincs=get_coincs+joinstr+
' coinc_inspiral.combined_far < %f'%(max_cfar)
446 db_out=database_connection.cursor().execute(get_coincs)
448 from pylal
import SnglInspiralUtils
450 for (sngl_time, slide, ifo, coinc_id, snr, chisq, cfar)
in db_out:
451 coinc_id=
int(coinc_id.split(
":")[-1])
452 seg=list(filter(
lambda seg:sngl_time
in seg,seglist))[0]
453 slid_time = SnglInspiralUtils.slideTimeOnRing(sngl_time,slide,seg)
454 if not coinc_id
in output.keys():
455 output[coinc_id]=
Event(trig_time=slid_time,timeslide_dict={},event_id=
int(coinc_id))
457 output[coinc_id].timeslides[ifo]=slid_time-sngl_time
458 output[coinc_id].ifos.append(ifo)
459 extra[coinc_id][ifo]={
'snr':snr,
'chisq':chisq,
'cfar':cfar}
460 if dumpfile
is not None:
461 fh=open(dumpfile,
'w')
462 for co
in output.keys():
463 for ifo
in output[co].ifos:
464 fh.write(
'%s %s %s %s %s %s %s\n'%(
str(co),ifo,
str(output[co].trig_time),
str(output[co].timeslides[ifo]),
str(extra[co][ifo][
'snr']),
str(extra[co][ifo][
'chisq']),
str(extra[co][ifo][
'cfar'])))
466 return output.values()
470 Helper function. Make the given directory, creating intermediate
471 dirs if necessary,
and don
't complain about it already existing.
473 if os.access(path,os.W_OK)
and os.path.isdir(path):
return
474 else: os.makedirs(path)
477 if name==
'lalinferencenest':
478 return LALInferenceNestNode
479 if name==
'lalinferenceburst':
480 return LALInferenceBurstNode
481 if name==
'lalinferencemcmc':
482 return LALInferenceMCMCNode
483 if name==
'lalinferencedatadump':
484 return LALInferenceDataDumpNode
485 if name==
'bayeswavepsd':
486 return BayesWavePSDNode
487 if name==
'bayeswavepost':
488 return BayesWavePostNode
492 name=cp.get(
'analysis',
'engine')
494 engine_list=[
'lalinferencenest',
'lalinferencemcmc']
495 if cp.has_option(
'input',
'gid'):
496 gid=cp.get(
'input',
'gid')
497 engine_number=
int(
''.join(i
for i
in gid
if i.isdigit())) % 2
499 engine_number=random.randint(0,1)
500 return engine_list[engine_number]
506 p=re.compile(
r'[\d.]+')
508 timefilehandle=open(timefile,
'r')
509 for time
in timefilehandle:
510 if not p.match(time):
512 if float(time)
in times:
513 print(
'Skipping duplicate time %s'%(time))
515 print(
'Read time %s'%(time))
516 times.append(float(time))
517 timefilehandle.close()
522 Get a psd.xml.gz file and:
524 2) Checks the psd file contains all the IFO we want to analyze
525 3) Writes down the PSDs into an ascii file
for each IFO
in psd.xml.gz. The name of the file contains the trigtime (
if given)
and the IFO name.
527 psdxml: psd.xml.gz file
528 ifos: list of ifos used
for the analysis
529 outpath: path where the ascii PSD will be written to
530 (end_time): trigtime
for this event. Will be used a part of the PSD file name
533 from lal
import series
as lalseries
535 print(
"ERROR, cannot import lal.series in bppu/get_xml_psds()\n")
539 if not os.path.isdir(outpath):
541 if end_time
is not None:
542 time=repr(float(end_time))
548 path_to_ascii_psd=os.path.join(outpath,ifo+
'_psd_'+time+
'.txt')
550 if os.path.isfile(path_to_ascii_psd):
557 out[ifo]=os.path.join(outpath,ifo+
'_psd_'+time+
'.txt')
561 if not os.path.isfile(psdxml):
562 print(
"ERROR: impossible to open the psd file %s. Exiting...\n"%psdxml)
564 xmlpsd = lalseries.read_psd_xmldoc(ligolw_utils.load_filename(psdxml))
567 if not ifo
in xmlpsd:
568 print(
"ERROR. The PSD for the ifo %s does not seem to be contained in %s\n"%(ifo,psdxml))
571 for instrument
in xmlpsd.keys():
573 path_to_ascii_psd=os.path.join(outpath,instrument+
'_psd_'+time+
'.txt')
575 if os.path.isfile(path_to_ascii_psd):
578 ifodata=xmlpsd[instrument]
583 combine = np.c_[ifodata.f0 + np.arange(ifodata.data.length) * ifodata.deltaF, ifodata.data.data]
584 np.savetxt(path_to_ascii_psd,combine)
587 out[ifo]=os.path.join(outpath,ifo+
'_psd_'+time+
'.txt')
591 coinc_events = lsctables.CoincInspiralTable.get_table(coinc_xml_obj)
592 sngl_event_idx = dict((row.event_id, row)
for row
in lsctables.SnglInspiralTable.get_table(coinc_xml_obj))
593 coinc_map = lsctables.CoincMapTable.get_table(coinc_xml_obj)
596 for coinc
in coinc_events:
597 these_sngls = [sngl_event_idx[c.event_id]
for c
in coinc_map
if c.coinc_event_id == coinc.coinc_event_id]
598 for e
in these_sngls:
599 mass1.append(e.mass1)
600 mass2.append(e.mass2)
602 assert len(set(mass1)) == 1
603 assert len(set(mass2)) == 1
605 mchirp = (mass1[0]*mass2[0])**(3./5.) / ( (mass1[0] + mass2[0])**(1./5.) )
615 if coinc_xml_obj
is not None and sim_inspiral
is not None:
616 print(
"Error in get_roq_mchirp_priors, cannot use both coinc.xml and sim_inspiral\n")
619 for roq
in roq_paths:
620 params=os.path.join(path,roq,
'params.dat')
621 roq_params[roq]=np.genfromtxt(params,names=
True)
622 mc_priors[roq]=[float(roq_params[roq][
'chirpmassmin']),float(roq_params[roq][
'chirpmassmax'])]
623 ordered_roq_paths=[item[0]
for item
in sorted(roq_params.items(), key=key)][::-1]
626 for roq
in ordered_roq_paths:
630 mc_priors[roq][0]=mc_priors[ordered_roq_paths[i-1]][1]
631 if i<len(roq_paths)-1:
632 mc_priors[roq][1]-= (mc_priors[roq][1]- mc_priors[ordered_roq_paths[i+1]][0])/2.
634 if coinc_xml_obj
is not None:
636 elif sim_inspiral
is not None:
637 trigger_mchirp = sim_inspiral.mchirp
639 trigger_mchirp =
None
641 return mc_priors, trigger_mchirp
650 if coinc_xml_obj
is not None and sim_inspiral
is not None:
651 print(
"Error in get_roq_mchirp_priors, cannot use both coinc.xml and sim_inspiral\n")
654 for roq
in roq_paths:
655 params=os.path.join(path,roq,
'params.dat')
656 roq_params[roq]=np.genfromtxt(params,names=
True)
657 m1_priors[roq]=[float(roq_params[roq][
'mass1min']),float(roq_params[roq][
'mass1max'])]
658 m2_priors[roq]=[float(roq_params[roq][
'mass2min']),float(roq_params[roq][
'mass2max'])]
660 if coinc_xml_obj
is not None:
662 elif sim_inspiral
is not None:
663 trigger_mchirp = sim_inspiral.mchirp
665 trigger_mchirp =
None
667 return m1_priors, m2_priors, trigger_mchirp
670 mc_min = min([prange[0]
for prange
in mc_priors.values()])
671 mc_max =
max([prange[1]
for prange
in mc_priors.values()])
673 if force_flow ==
None and trigger_mchirp !=
None:
674 if trigger_mchirp >= mc_max:
675 scale_factor = 2.**(floor(trigger_mchirp/mc_max))
676 if trigger_mchirp <= mc_min:
677 scale_factor = (2./3.2)**(ceil(trigger_mchirp/mc_min))
678 elif force_flow !=
None:
679 scale_factor = 20./force_flow
684 return (m1*m2)**(3.0/5.0) / (m1+m2)**(1.0/5.0)
689 params = os.path.join(path,roq,
'params.dat')
690 roq_params0 = np.genfromtxt(params,names=
True)
691 roq_names_set = set(roq_params0.dtype.names)
692 component_mass_bounds_set = set([
'mass1min',
'mass1max',
'mass2min',
'mass2max'])
693 chirp_mass_q_bounds_set = set([
'chirpmassmin',
'chirpmassmax',
'qmin',
'qmax'])
694 if roq_names_set.issuperset(component_mass_bounds_set):
695 roq_bounds =
'component_mass'
696 elif roq_names_set.issuperset(chirp_mass_q_bounds_set):
697 roq_bounds =
'chirp_mass_q'
699 print(
'Invalid bounds for ROQ. Ether (m1,m2) or (mc,q) bounds are supported.')
704 approximant = cp.get(
"engine",
"approx")
705 if "pseudo" in approximant:
706 approximant = approximant.split(
"pseudo")[0]
715 if cp.has_option(
'paths',
'basedir'):
719 print(
'No basepath specified, using current directory: %s'%(self.
basepath))
721 print(
"Generating LALInference DAG in {0}".format(self.
basepath))
724 daglogdir=cp.get(
'paths',
'daglogdir')
726 self.
daglogfile=os.path.join(daglogdir,
'lalinference_pipeline-'+
str(uuid.uuid1())+
'.log')
728 if cp.has_option(
'paths',
'cachedir'):
733 if cp.has_option(
'paths',
'logdir'):
738 if cp.has_option(
'analysis',
'ifos'):
739 self.
ifos=ast.literal_eval(cp.get(
'analysis',
'ifos'))
741 self.
ifos=[
'H1',
'L1',
'V1']
743 if cp.has_option(
'datafind',
'veto-categories'):
746 for ifo
in self.
ifos:
753 self.
frtypes=ast.literal_eval(cp.get(
'datafind',
'types'))
754 self.
channels=ast.literal_eval(cp.get(
'data',
'channels'))
756 if cp.has_option(
"paths",
"webdir"):
758 if cp.has_option(
"paths",
"existing_webdir"):
759 self.
webdir=cp.get(
'paths',
'existing_webdir')
760 if cp.has_option(
'analysis',
'dataseed'):
764 if cp.has_option(
'analysis',
'randomseed'):
773 if cp.has_option(
'analysis',
'osg')
and cp.getboolean(
'analysis',
'osg'):
774 self.
datafind_job.add_opt(
'server',
'datafind.ligo.org:443')
775 if cp.has_option(
'condor',
'accounting_group'):
776 self.
datafind_job.add_condor_cmd(
'accounting_group',cp.get(
'condor',
'accounting_group'))
777 if cp.has_option(
'condor',
'accounting_group_user'):
778 self.
datafind_job.add_condor_cmd(
'accounting_group_user',cp.get(
'condor',
'accounting_group_user'))
782 if self.
config.getboolean(
'analysis',
'roq'):
784 if self.
config.has_option(
'condor',
'bayesline'):
788 if self.
config.has_option(
'condor',
'bayeswave'):
789 for ifo
in self.
ifos:
795 for N
in range(1,len(self.
ifos)+1):
796 for a
in permutations(self.
ifos,N):
798 for ifos
in ifocombos:
800 if "summarypages" in self.
config.get(
'condor',
'resultspage'):
805 if self.
config.has_section(
'spin_evol'):
807 if self.
engine==
'lalinferencemcmc':
822 print(
'No input events found, please check your config if you expect some events')
826 if not (self.
config.has_option(
'input',
'gps-start-time')
and self.
config.has_option(
'input',
'gps-end-time'))
and len(self.
times)>0:
828 if not self.
config.has_option(
'input',
'gps-start-time'):
829 self.
config.set(
'input',
'gps-start-time',
str(
int(floor(mintime))))
830 if not self.
config.has_option(
'input',
'gps-end-time'):
831 self.
config.set(
'input',
'gps-end-time',
str(
int(ceil(maxtime))))
836 conffilename=os.path.join(self.
basepath,
'config.ini')
837 with open(conffilename,
'w')
as conffile:
838 self.
config.write(conffile)
839 if self.
config.has_option(
'paths',
'webdir'):
841 with open(os.path.join(self.
config.get(
'paths',
'webdir'),
'config.ini'),
'w')
as conffile:
842 self.
config.write(conffile)
846 if self.
config.has_option(
'analysis',
'upload-to-gracedb'):
847 if self.
config.getboolean(
'analysis',
'upload-to-gracedb'):
849 if self.
config.has_option(
'condor',
'gdbinfo')
and self.
config.has_option(
'analysis',
'ugid')
and self.
config.getboolean(
'analysis',
'upload-to-gracedb'):
850 if self.
config.has_option(
'gracedbinfo',
'server'):
851 gdb_srv=self.
config.get(
'gracedbinfo',
'server')
864 script =
"""#!/usr/bin/env bash
865 echo "Making placeholder output files"
872 with open(path,
'w')
as scr:
875 os.chmod(path, stat.S_IXUSR | stat.S_IWUSR | stat.S_IRUSR | stat.S_IXOTH | stat.S_IROTH)
878 if self.
engine==
'lalinferencenest' or self.
engine==
'lalinferenceburst':
880 elif self.
engine==
'lalinferencemcmc':
883 raise Exception(
'Unknown engine {0}'.format(self.
engine))
888 Calculate the data that will be needed to process all events
891 padding=self.
config.getint(
'input',
'padding')
892 if self.
config.has_option(
'engine',
'seglen')
or self.
config.has_option(
'lalinference',
'seglen'):
893 if self.
config.has_option(
'engine',
'seglen'):
894 seglen =
int(np.ceil(self.
config.getfloat(
'engine',
'seglen')))
895 if self.
config.has_option(
'lalinference',
'seglen'):
896 seglen = self.
config.getint(
'lalinference',
'seglen')
898 use_gracedbpsd = (
not self.
config.getboolean(
'input',
'ignore-gracedb-psd'))
899 except (NoOptionError, NoSectionError):
900 use_gracedbpsd =
True
901 if (use_gracedbpsd
and os.path.isfile(os.path.join(self.
basepath,
'psd.xml.gz')))
or self.
config.has_option(
'condor',
'bayesline')
or self.
config.has_option(
'condor',
'bayeswave'):
904 self.
config.set(
'input',
'padding',
str(padding))
905 if self.
config.has_option(
'condor',
'bayeswave'):
906 if (np.log2(seglen)%1):
907 seglen = np.power(2., np.ceil(np.log2(seglen)))
909 psdlength = 32*seglen
911 seglen =
max(e.duration
for e
in self.
events)
913 use_gracedbpsd = (
not self.
config.getboolean(
'input',
'ignore-gracedb-psd'))
914 except (NoOptionError, NoSectionError):
915 use_gracedbpsd =
True
916 if (use_gracedbpsd
and os.path.isfile(os.path.join(self.
basepath,
'psd.xml.gz')))
or self.
config.has_option(
'condor',
'bayesline')
or self.
config.has_option(
'condor',
'bayeswave'):
919 self.
config.set(
'input',
'padding',
str(padding))
920 if self.
config.has_option(
'condor',
'bayeswave'):
921 if (np.log2(seglen)%1):
922 seglen = np.power(2., np.ceil(np.log2(seglen)))
924 psdlength = 32*seglen
928 return (min(times)-padding-seglen-psdlength+2,
max(times)+padding+2)
932 Generate a DAG from a list of times
934 for time
in self.
times:
939 Read events from the config parser. Understands both ranges
and comma separated events,
or combinations
940 eg. events=[0,1,5:10,21] adds to the analysis the events: 0,1,5,6,7,8,9,10
and 21
944 raw_events=self.config.get('input',
'events').replace(
'[',
'').replace(
']',
'').split(
',')
945 for raw_event
in raw_events:
947 limits=raw_event.split(
':')
949 print(
"Error: in event config option; ':' must separate two numbers.")
954 events.extend(range(
int(high),
int(low)+1))
956 events.extend(range(
int(low),
int(high)+1))
958 events.append(
int(raw_event))
963 Scan the list of inputs, i.e.
964 gps-time-file, injection-file, sngl-inspiral-file, coinc-inspiral-file, pipedown-database
965 in the [input] section of the ini file.
966 And process the events found therein
971 if self.
config.has_option(
'input',
'gps-start-time'):
972 gpsstart=self.
config.getfloat(
'input',
'gps-start-time')
973 if self.
config.has_option(
'input',
'gps-end-time'):
974 gpsend=self.
config.getfloat(
'input',
'gps-end-time')
975 inputnames=[
'gps-time-file',
'burst-injection-file',
'injection-file',
'coinc-xml',
'pipedown-db',
'gid',
'gstlal-db']
976 ReadInputFromList=sum([ 1
if self.
config.has_option(
'input',name)
else 0
for name
in inputnames])
978 if ReadInputFromList!=1
and (gpsstart
is None or gpsend
is None):
981 if self.
config.has_option(
'input',
'events'):
982 selected_events=self.
config.get(
'input',
'events')
983 print(
'Selected events %s'%(
str(selected_events)))
985 if selected_events==
'all':
992 if(self.
config.has_option(
'engine',
'correlatedGaussianLikelihood')
or
993 self.
config.has_option(
'engine',
'bimodalGaussianLikelihood')
or
994 self.
config.has_option(
'engine',
'rosenbrockLikelihood')):
997 analytic_test =
False
1000 if self.
config.has_option(
'input',
'analyse-all-time')
and self.
config.getboolean(
'input',
'analyse-all-time')==
True:
1001 print(
'Setting up for analysis of continuous time stretch %f - %f'%(gpsstart,gpsend))
1002 if self.
config.has_option(
'engine',
'seglen'):
1003 seglen=self.
config.getfloat(
'engine',
'seglen')
1005 print(
'ERROR: seglen must be specified in [engine] section when running without input file')
1007 if(self.
config.has_option(
'input',
'segment-overlap')):
1008 overlap=self.
config.getfloat(
'input',
'segment-overlap')
1012 print(
'ERROR: segment-overlap is greater than seglen')
1018 ev=
Event(trig_time=t+seglen-2)
1019 ev.set_engine_option(
'segment-start',
str(t-overlap))
1020 if not analytic_test:
1021 ev.set_engine_option(
'time-min',
str(t))
1022 tMax=t + seglen - overlap
1025 if not analytic_test:
1026 ev.set_engine_option(
'time-max',
str(tMax))
1032 if self.
config.has_option(
'input',
'gps-time-file'):
1034 if self.
config.has_option(
'input',
'timeslides-ascii'):
1037 print(
"Reading timeslides from ascii file. Columns order is understood as follow:")
1038 for this_ifo,ifo
in enumerate(self.
ifos):
1039 print(
"Column %d"%this_ifo +
"= %s "%(ifo))
1040 dest=self.
config.get(
'input',
'timeslides-ascii')
1041 if not os.path.isfile(dest):
1042 print(
"ERROR the ascii file %s containing the timeslides does not exist\n"%dest)
1045 from numpy
import loadtxt
1046 data=loadtxt(dest).reshape(-1,len(self.
ifos))
1047 if len(self.
ifos)!= len(data[0,:]):
1048 print(
"ERROR: ascii timeslide file must contain a column for each IFO used in the analysis!\n")
1050 if len(times)!=len(data[:,0]):
1051 print(
'ERROR: ascii timeslide must contain a row for each trigtime. Exiting...\n')
1054 for this_time,time
in enumerate(times):
1055 timeslides[this_time]={}
1056 for this_ifo,ifo
in enumerate(self.
ifos):
1057 timeslides[this_time][ifo]=data[this_time,this_ifo]
1058 events=[
Event(trig_time=time,timeslide_dict=timeslides[i_time])
for i_time,time
in enumerate(times)]
1060 events=[
Event(trig_time=time)
for time
in times]
1062 if self.
config.has_option(
'input',
'injection-file'):
1063 injTable = lsctables.SimInspiralTable.get_table(
1064 ligolw_utils.load_filename(self.
config.get(
'input',
'injection-file')))
1065 events=[
Event(SimInspiral=inj)
for inj
in injTable]
1067 if self.
config.has_option(
'input',
'burst-injection-file'):
1068 injfile=self.
config.get(
'input',
'burst-injection-file')
1069 injTable=lsctables.SimBurstTable.get_table(ligolw_utils.load_filename(injfile))
1070 events=[
Event(SimBurst=inj)
for inj
in injTable]
1073 if self.
config.has_option(
'input',
'gid')
or self.
config.has_option(
'input',
'coinc-xml'):
1075 if self.
config.has_option(
'lalinference',
'flow'):
1076 flow=min(ast.literal_eval(self.
config.get(
'lalinference',
'flow')).values())
1077 threshold_snr =
None
1078 if not self.
config.has_option(
'engine',
'distance-max')
and self.
config.has_option(
'input',
'threshold-snr'):
1079 threshold_snr=self.
config.getfloat(
'input',
'threshold-snr')
1082 from lal
import series
as lalseries
1084 if self.
config.has_option(
'input',
'gid'):
1085 from ligo.gracedb.rest
import GraceDb, HTTPError
1086 gid = self.
config.get(
'input',
'gid')
1087 if self.
config.has_option(
'analysis',
'service-url'):
1089 service_url=self.
config.get(
'analysis',
'service-url')
1093 print(
"Download %s coinc.xml" % gid)
1094 coinc_file_obj = client.files(gid,
"coinc.xml")
1095 print(
"Download %s psd.xml.gz" % gid)
1097 psd_file_obj = client.files(gid,
"psd.xml.gz")
1099 print(
"Failed to download %s psd.xml.gz. lalinference will estimate the psd itself." % gid)
1101 coinc_file_obj = open(self.
config.get(
'input',
'coinc-xml'),
"rb")
1103 psd_file_obj = open(self.
config.get(
'input',
'psd-xml-gz'),
"rb")
1105 print(
"lalinference will estimate the psd itself.")
1108 coinc_xml_obj = ligolw_utils.load_fileobj(coinc_file_obj)[0]
1109 ligolw_utils.write_filename(
1110 coinc_xml_obj, os.path.join(self.
basepath,
"coinc.xml")
1112 if psd_file_obj
is not None:
1113 path_to_psd = os.path.join(self.
basepath,
"psd.xml.gz")
1114 psd_xml_obj = ligolw_utils.load_fileobj(psd_file_obj)[0]
1115 psd_dict = lalseries.read_psd_xmldoc(psd_xml_obj)
1116 ligolw_utils.write_filename(psd_xml_obj, path_to_psd)
1118 lsctables.CoincInspiralTable.get_table(
1123 os.path.realpath(path_to_psd), ifos,
1124 os.path.realpath(os.path.join(self.
basepath,
"PSDs")),
1131 use_gracedbpsd = (
not self.
config.getboolean(
'input',
'ignore-gracedb-psd'))
1132 except (NoOptionError, NoSectionError):
1133 use_gracedbpsd =
True
1135 coinc_xml_obj, psd_dict=psd_dict, gid=gid,
1136 threshold_snr=threshold_snr, flow=flow,
1137 roq=self.
config.getboolean(
'analysis',
'roq'),
1138 use_gracedbpsd=use_gracedbpsd
1142 if self.
config.has_option(
'input',
'gstlal-db'):
1143 queryfunc=get_zerolag_lloid
1144 dbname=self.
config.get(
'input',
'gstlal-db')
1145 elif self.
config.has_option(
'input',
'pipedown-db'):
1146 queryfunc=get_zerolag_pipedown
1147 dbname=self.
config.get(
'input',
'pipedown-db')
1152 if self.
config.has_option(
'input',
'time-slide-dump'):
1153 timeslidedump=self.
config.get(
'input',
'time-slide-dump')
1156 if self.
config.has_option(
'input',
'min-cfar'):
1157 mincfar=self.
config.getfloat(
'input',
'min-cfar')
1160 if self.
config.has_option(
'input',
'max-cfar'):
1161 maxcfar=self.
config.getfloat(
'input',
'max-cfar')
1164 if self.
config.get(
'input',
'timeslides').lower()==
'true':
1165 events=
get_timeslides_pipedown(db_connection, gpsstart=gpsstart, gpsend=gpsend,dumpfile=timeslidedump,max_cfar=maxcfar)
1167 events=queryfunc(db_connection, gpsstart=gpsstart, gpsend=gpsend, dumpfile=timeslidedump,max_cfar=maxcfar,min_cfar=mincfar)
1168 if(selected_events
is not None):
1170 for i
in selected_events:
1173 used_events.append(e)
1175 if gpsstart
is not None:
1176 events = list(filter(
lambda e:
not e.trig_time<gpsstart, events))
1177 if gpsend
is not None:
1178 events = list(filter(
lambda e:
not e.trig_time>gpsend, events))
1183 if self.
config.has_section(
'spin_evol'):
1184 from lalsimulation
import SimInspiralGetApproximantFromString, SimInspiralGetSpinSupportFromApproximant
1186 tidal_run_tests = self.
config.has_option(
'engine',
'tidal')
or self.
config.has_option(
'engine',
'tidalT')
1188 nonprecessing_run_tests = self.
config.has_option(
'engine',
'disable-spin')
or self.
config.has_option(
'engine',
'aligned-spin')
1192 precessing_wf_test = (SimInspiralGetSpinSupportFromApproximant(approx_num) == 3)
1195 print(
"\n****** Note: Spin evolution will not be performed because tidal parameters are turned on ******\n")
1197 elif precessing_wf_test
and not nonprecessing_run_tests:
1200 print(
"\n****** Note: Spin evolution will not be performed because this is not a precessing run ******\n")
1205 return spin_evol_flag
1209 Generate an end-to-end analysis of a given event (Event class)
1210 For LALinferenceNest code. Uses parallel runs
if specified
1212 evstring=str(event.event_id)
1213 if event.trig_time
is not None:
1214 evstring=
str(event.trig_time)+
'-'+
str(event.event_id)
1215 if self.
config.has_option(
'analysis',
'nparallel'):
1216 Npar=self.
config.getint(
'analysis',
'nparallel')
1223 for i
in range(Npar):
1224 n,bwpsdnodes,bwpostnodes=self.
add_engine_node(event,bwpsd=bwpsdnodes,bwpost=bwpostnodes)
1227 n.add_var_arg(
'--dont-dump-extras')
1228 enginenodes.append(n)
1229 if len(enginenodes)==0:
1231 myifos=enginenodes[0].get_ifos()
1233 pagedir=os.path.join(self.
webdir,evstring,myifos)
1237 mergenode.set_pos_output_file(os.path.join(self.
posteriorpath,
'posterior_%s_%s.hdf5'%(myifos,evstring)))
1238 self.add_node(mergenode)
1242 self.add_node(evolve_spins_node)
1244 respage_parent = evolve_spins_node
1246 respage_parent = mergenode
1249 enginenodes[0].finalize()
1250 enginenodes[0].set_psd_files()
1251 enginenodes[0].set_snr_file()
1252 if self.
config.getboolean(
'analysis',
'coherence-test')
and len(enginenodes[0].ifos)>1:
1253 if "summarypages" not in self.
config.get(
'condor',
'resultspage'):
1255 respagenode.set_psd_files(enginenodes[0].get_psd_files())
1256 respagenode.set_snr_file(enginenodes[0].get_snr_file())
1257 if os.path.exists(self.
basepath+
'/coinc.xml'):
1259 gid = self.
config.get(
'input',
'gid')
1262 respagenode.set_coinc_file(os.path.join(self.
basepath,
'coinc.xml'), gid)
1265 for ifo
in enginenodes[0].ifos:
1268 for i
in range(Npar):
1269 cot_node,bwpsdnodes,bwpostnodes=self.
add_engine_node(event,bwpsd=bwpsdnodes,bwpost=bwpostnodes,ifos=[ifo],co_test=
True)
1270 if cot_node
is not None:
1272 cot_node.add_var_arg(
'--dont-dump-extras')
1273 cotest_nodes.append(cot_node)
1274 if len(cotest_nodes)==0:
1276 for co
in cotest_nodes:
1277 co.set_psdstart(enginenodes[0].GPSstart)
1278 co.set_psdlength(enginenodes[0].psdlength)
1279 if co!=cotest_nodes[0]:
1280 co.add_var_arg(
'--dont-dump-extras')
1284 pmergenode=
MergeNode(co_merge_job,parents=cotest_nodes,engine=
'nest')
1285 pmergenode.set_pos_output_file(os.path.join(self.
posteriorpath,
'posterior_%s_%s.hdf5'%(ifo,evstring)))
1286 self.add_node(pmergenode)
1287 par_mergenodes.append(pmergenode)
1288 presultsdir=os.path.join(pagedir,ifo)
1293 subresnode.set_psd_files(cotest_nodes[0].get_psd_files())
1294 subresnode.set_snr_file(cotest_nodes[0].get_snr_file())
1295 if os.path.exists(self.
basepath+
'/coinc.xml'):
1297 gid = self.
config.get(
'input',
'gid')
1300 subresnode.set_coinc_file(os.path.join(self.
basepath,
'coinc.xml'), gid)
1301 if self.
config.has_option(
'input',
'injection-file')
and event.event_id
is not None:
1302 subresnode.set_injection(self.
config.get(
'input',
'injection-file'),event.event_id)
1303 elif self.
config.has_option(
'input',
'burst-injection-file')
and event.event_id
is not None:
1304 subresnode.set_injection(self.
config.get(
'input',
'burst-injection-file'),event.event_id)
1306 coherence_node.add_coherent_parent(mergenode)
1307 for parmergenode
in par_mergenodes:
1308 coherence_node.add_incoherent_parent(parmergenode)
1309 self.add_node(coherence_node)
1310 respagenode.add_parent(coherence_node)
1311 respagenode.set_bayes_coherent_incoherent(coherence_node.outfile)
1313 elif "summarypages" in self.
config.get(
'condor',
'resultspage'):
1315 for ifo
in enginenodes[0].ifos:
1318 for i
in range(Npar):
1319 cot_node,bwpsdnodes,bwpostnodes=self.
add_engine_node(event,bwpsd=bwpsdnodes,bwpost=bwpostnodes,ifos=[ifo],co_test=
True)
1320 if cot_node
is not None:
1322 cot_node.add_var_arg(
'--dont-dump-extras')
1323 cotest_nodes.append(cot_node)
1324 if len(cotest_nodes)==0:
1326 for co
in cotest_nodes:
1327 co.set_psdstart(enginenodes[0].GPSstart)
1328 co.set_psdlength(enginenodes[0].psdlength)
1329 if co!=cotest_nodes[0]:
1330 co.add_var_arg(
'--dont-dump-extras')
1334 pmergenode=
MergeNode(co_merge_job,parents=cotest_nodes,engine=
'nest')
1335 pmergenode.set_pos_output_file(os.path.join(self.
posteriorpath,
'posterior_%s_%s.hdf5'%(ifo,evstring)))
1336 self.add_node(pmergenode)
1337 par_mergenodes.append(pmergenode)
1338 presultsdir=os.path.join(pagedir,ifo)
1343 evstring=evstring, coherence=
True)
1344 respagenode.set_psd_files(enginenodes[0].ifos, enginenodes[0].get_psd_files())
1346 cachefiles = self.
config.get(
'resultspage',
'plot-strain-data')
1347 cachefiles_option =
True
1349 cachefiles_option =
False
1350 if cachefiles_option:
1351 respagenode.set_cache_files(enginenodes[0].channels, enginenodes[0].cachefiles)
1354 labels = self.
config.get(
'resultspage',
'label')
1357 respagenode.set_labels(labels)
1360 gid = self.
config.get(
'input',
'gid')
1363 respagenode.set_gid(gid)
1364 if os.path.exists(self.
basepath+
'/coinc.xml'):
1365 respagenode.set_coinc_file(os.path.join(self.
basepath,
'coinc.xml'), gid)
1369 if "summarypages" in self.
config.get(
'condor',
'resultspage'):
1371 evstring=evstring, coherence=
False)
1372 respagenode.set_psd_files(enginenodes[0].ifos, enginenodes[0].get_psd_files())
1374 cachefiles = self.
config.get(
'resultspage',
'plot-strain-data')
1375 cachefiles_option =
True
1377 cachefiles_option =
False
1378 if cachefiles_option:
1379 respagenode.set_cache_files(enginenodes[0].channels, enginenodes[0].cachefiles)
1381 labels = self.
config.get(
'resultspage',
'label')
1384 respagenode.set_labels(labels)
1387 gid = self.
config.get(
'input',
'gid')
1390 respagenode.set_gid(gid)
1391 if os.path.exists(self.
basepath+
'/coinc.xml'):
1392 respagenode.set_coinc_file(os.path.join(self.
basepath,
'coinc.xml'), gid)
1394 respagenode=self.
add_results_page_node(outdir=pagedir,parent=respage_parent,gzip_output=
None,ifos=enginenodes[0].ifos)
1395 respagenode.set_psd_files(enginenodes[0].get_psd_files())
1396 respagenode.set_snr_file(enginenodes[0].get_snr_file())
1397 if os.path.exists(self.
basepath+
'/coinc.xml'):
1399 gid = self.
config.get(
'input',
'gid')
1402 respagenode.set_coinc_file(os.path.join(self.
basepath,
'coinc.xml'), gid)
1403 if self.
config.has_option(
'input',
'injection-file')
and event.event_id
is not None:
1404 respagenode.set_injection(self.
config.get(
'input',
'injection-file'),event.event_id)
1405 elif self.
config.has_option(
'input',
'burst-injection-file')
and event.event_id
is not None:
1406 respagenode.set_injection(self.
config.get(
'input',
'burst-injection-file'),event.event_id)
1408 if self.
config.has_option(
'analysis',
'upload-to-gracedb'):
1409 if self.
config.has_option(
'gracedbinfo',
'server'):
1410 gdb_srv=self.
config.get(
'gracedbinfo',
'server')
1414 if self.
config.getboolean(
'analysis',
'upload-to-gracedb')
and event.GID
is not None:
1415 self.
add_gracedb_start_node(event.GID,
'LALInference',[sciseg.get_df_node()
for sciseg
in enginenodes[0].scisegs.values()],server=gdb_srv)
1417 elif self.
config.has_option(
'analysis',
'ugid'):
1422 ugid=self.
config.get(
'analysis',
'ugid')
1424 self.
add_gracedb_start_node(ugid,
'LIB',[sciseg.get_df_node()
for sciseg
in enginenodes[0].scisegs.values()],server=gdb_srv)
1426 if self.
config.has_option(
'resultspage',
'email'):
1427 emailto=self.
config.get(
'resultspage',
'email')
1430 if self.
config.has_option(
'gracedbinfo',
'server'):
1431 gdb_srv=self.
config.get(
'gracedbinfo',
'server')
1436 if self.
config.has_option(
'condor',
'ligo-skymap-plot')
and self.
config.has_option(
'condor',
'ligo-skymap-from-samples'):
1437 if self.
engine==
'lalinferenceburst': prefix=
'LIB'
1438 else: prefix=
'LALInference'
1439 mapnode =
SkyMapNode(self.
mapjob, posfile = mergenode.get_pos_file(), parent=mergenode,
1440 prefix= prefix, outdir=pagedir, ifos=self.
ifos)
1441 plotmapnode =
PlotSkyMapNode(self.
plotmapjob, parent=mapnode, inputfits = mapnode.outfits, output=os.path.join(pagedir,
'skymap.png'))
1442 self.add_node(mapnode)
1443 self.add_node(plotmapnode)
1448 Generate an end-to-end analysis of a given event
1449 For LALInferenceMCMC.
1451 evstring=str(event.event_id)
1452 if event.trig_time
is not None:
1453 evstring=
str(event.trig_time)+
'-'+
str(event.event_id)
1454 if self.
config.has_option(
'analysis',
'nparallel'):
1455 Npar=self.
config.getint(
'analysis',
'nparallel')
1461 for i
in range(Npar):
1462 n,bwpsdnodes,bwpostnodes=self.
add_engine_node(event,bwpsd=bwpsdnodes,bwpost=bwpostnodes)
1465 n.add_var_arg(
'--dont-dump-extras')
1466 enginenodes.append(n)
1467 if len(enginenodes)==0:
1469 myifos=enginenodes[0].get_ifos()
1470 enginenodes[0].set_psd_files()
1471 enginenodes[0].set_snr_file()
1472 pagedir=os.path.join(self.
webdir,evstring,myifos)
1475 for i
in range(Npar):
1477 input_file = combinenodes[i].get_parent_posfile(enginenodes[i])
1478 input_file_split_index = input_file.find(
'lalinferencemcmc-')
1479 combinenodes[i].set_pos_output_file(input_file[:input_file_split_index]+
'combine_'+input_file[input_file_split_index:])
1480 combinenodes[i].add_file_arg(input_file)
1481 number_of_mpi_jobs = self.
config.getint(
'mpi',
'mpi_task_count')
1482 for j
in range(1,number_of_mpi_jobs):
1483 combinenodes[i].add_file_arg(input_file+
".%02d" % j)
1484 self.add_node(combinenodes[i])
1486 mergenode.set_pos_output_file(os.path.join(self.
posteriorpath,
'posterior_%s_%s.hdf5'%(myifos,evstring)))
1487 if self.
config.has_option(
'resultspage',
'deltaLogP'):
1488 mergenode.add_var_arg(
'--deltaLogP '+
str(self.
config.getfloat(
'resultspage',
'deltaLogP')))
1489 if self.
config.has_option(
'resultspage',
'downsample'):
1490 mergenode.add_var_arg(
'--downsample '+
str(self.
config.getint(
'resultspage',
'downsample')))
1491 if self.
config.has_option(
'resultspage',
'fixedBurnin'):
1492 mergenode.add_var_arg(
'--fixedBurnin '+
str(self.
config.getint(
'resultspage',
'fixedBurnin')))
1493 self.add_node(mergenode)
1498 self.add_node(evolve_spins_node)
1500 respage_parent = evolve_spins_node
1502 respage_parent = mergenode
1504 if "summarypages" in self.
config.get(
'condor',
'resultspage'):
1505 respagenode=self.
add_results_page_node_pesummary(outdir=pagedir,parent=respage_parent,gzip_output=
None,ifos=enginenodes[0].ifos, evstring=evstring, coherence=self.
config.getboolean(
'analysis',
'coherence-test'))
1506 respagenode.set_psd_files(enginenodes[0].ifos, enginenodes[0].get_psd_files())
1508 cachefiles = self.
config.get(
'resultspage',
'plot-strain-data')
1509 cachefiles_option =
True
1511 cachefiles_option =
False
1512 if cachefiles_option:
1513 respagenode.set_cache_files(enginenodes[0].channels, enginenodes[0].cachefiles)
1517 labels = self.
config.get(
'resultspage',
'label')
1520 respagenode.set_labels(labels)
1522 if os.path.exists(self.
basepath+
'/coinc.xml'):
1524 gid = self.
config.get(
'input',
'gid')
1527 respagenode.set_coinc_file(os.path.join(self.
basepath,
'coinc.xml'), gid)
1529 respagenode=self.
add_results_page_node(outdir=pagedir,parent=respage_parent,gzip_output=
None,ifos=enginenodes[0].ifos)
1530 respagenode.set_psd_files(enginenodes[0].get_psd_files())
1531 respagenode.set_snr_file(enginenodes[0].get_snr_file())
1532 if os.path.exists(self.
basepath+
'/coinc.xml'):
1534 gid = self.
config.get(
'input',
'gid')
1537 respagenode.set_coinc_file(os.path.join(self.
basepath,
'coinc.xml'), gid)
1538 if self.
config.has_option(
'input',
'injection-file')
and event.event_id
is not None:
1539 respagenode.set_injection(self.
config.get(
'input',
'injection-file'),event.event_id)
1540 if self.
config.has_option(
'input',
'burst-injection-file')
and event.event_id
is not None:
1541 respagenode.set_injection(self.
config.get(
'input',
'burst-injection-file'),event.event_id)
1542 if event.GID
is not None:
1543 if self.
config.has_option(
'analysis',
'upload-to-gracedb'):
1544 if self.
config.getboolean(
'analysis',
'upload-to-gracedb'):
1545 if self.
config.has_option(
'gracedbinfo',
'server'):
1546 gdb_srv=self.
config.get(
'gracedbinfo',
'server')
1549 self.
add_gracedb_start_node(event.GID,
'LALInference',[sciseg.get_df_node()
for sciseg
in enginenodes[0].scisegs.values()],server=gdb_srv)
1551 if self.
config.has_option(
'condor',
'ligo-skymap-plot')
and self.
config.has_option(
'condor',
'ligo-skymap-from-samples'):
1552 mapnode =
SkyMapNode(self.
mapjob, posfile = mergenode.get_pos_file(), parent=mergenode,
1553 prefix=
'LALInference', outdir=pagedir, ifos=self.
ifos)
1554 plotmapnode =
PlotSkyMapNode(self.
plotmapjob, parent=mapnode, inputfits = mapnode.outfits, output=os.path.join(pagedir,
'skymap.png'))
1555 self.add_node(mapnode)
1556 self.add_node(plotmapnode)
1561 start=self.
config.getfloat(
'input',
'gps-start-time')
1562 end=self.
config.getfloat(
'input',
'gps-end-time')
1563 if self.
config.has_option(
'input',
'ignore-state-vector'):
1564 if self.
config.getboolean(
'input',
'ignore-state-vector'):
1566 for ifo
in self.
ifos:
1567 sciseg=pipeline.ScienceSegment((i,start,end,end-start))
1569 sciseg.set_df_node(df_node)
1574 for ifo
in self.
ifos:
1575 print(
'Read state vector for {0} data between {1} and {2}'.format(ifo,start,end))
1577 ifo, self.
frtypes[ifo], ast.literal_eval(self.
config.get(
'statevector',
'state-vector-channel'))[ifo],
1578 ast.literal_eval(self.
config.get(
'statevector',
'bits')), start, end
1582 sciseg=pipeline.ScienceSegment((segs.index(seg),seg[0],seg[1],seg[1]-seg[0]))
1584 sciseg.set_df_node(df_node)
1589 node.set_observatory(ifo[0])
1590 node.set_type(frtype)
1591 node.set_start(gpsstart)
1592 node.set_end(gpsend)
1596 def add_engine_node(self,event,bwpsd={},bwpost={},ifos=None,co_test=False,extra_options=None):
1598 Add an engine node to the dag. Will find the appropriate cache files automatically.
1599 Will determine the data to be read and the output file.
1600 Will use all IFOs known to the DAG, unless otherwise specified
as a list of strings
1602 if ifos
is None and self.
config.has_option(
'analysis',
'ifos'):
1603 ifos = ast.literal_eval(self.
config.get(
'analysis',
'ifos'))
1604 if ifos
is None and len(event.ifos)>0:
1608 end_time=event.trig_time
1609 if self.
config.has_option(
'lalinference',
'seglen'):
1610 seglen=self.
config.getfloat(
'lalinference',
'seglen')
1611 elif self.
config.has_option(
'engine',
'seglen'):
1612 seglen=self.
config.getfloat(
'engine',
'seglen')
1614 seglen=event.duration
1615 segstart=end_time+2-seglen
1616 segend=segstart+seglen
1619 use_gracedbpsd = (
not self.
config.getboolean(
'input',
'ignore-gracedb-psd'))
1620 except (NoOptionError, NoSectionError):
1621 use_gracedbpsd =
True
1622 use_lalinference_psd =
not ((use_gracedbpsd
and os.path.isfile(os.path.join(self.
basepath,
'psd.xml.gz')))
1623 or self.
config.has_option(
'condor',
'bayesline')
1624 or self.
config.has_option(
'condor',
'bayeswave'))
1626 if use_lalinference_psd
and self.
config.has_option(
'input',
'minimum_realizations_number'):
1627 psdstart = segstart - self.
config.getint(
'input',
'padding') - \
1628 self.
config.getint(
'input',
'minimum_realizations_number') * seglen
1634 if psdstart >= seg.start()
and segend <= seg.end():
1638 print(
'No data found for time %f - %f, skipping'%(segstart,segend))
1641 computeroqweightsnode={}
1643 bayeswavepsdnode=bwpsd
1644 bayeswavepostnode=bwpost
1647 roqeventpath=os.path.join(self.
preengine_job.roqpath,
str(event.event_id)+
'/')
1648 if self.
config.has_option(
'condor',
'bayesline')
or self.
config.has_option(
'condor',
'bayeswave')
or self.
config.getboolean(
'analysis',
'roq'):
1650 node.set_trig_time(end_time)
1651 prenode.set_trig_time(end_time)
1657 original_srate=event.srate
1658 if self.
config.has_option(
'lalinference',
'srate'):
1659 original_srate=self.
config.getfloat(
'lalinference',
'srate')
1660 elif self.
config.has_option(
'engine',
'srate'):
1661 original_srate=self.
config.getfloat(
'engine',
'srate')
1662 if (np.log2(original_srate)%1):
1663 print(
'The srate given,'+
str(original_srate)+
' Hz, is not a power of 2.')
1664 print(
'For data handling purposes, the srate used will be '+
str(np.power(2., np.ceil(np.log2(original_srate))))+
' Hz')
1665 print(
'The inner products will still however be integrated up to '+
str(original_srate/2.)+
' Hz')
1666 srate = np.power(2., np.ceil(np.log2(original_srate)))
1668 srate = original_srate
1670 node.set_srate(
int(np.ceil(srate)))
1671 prenode.set_srate(
int(np.ceil(srate)))
1672 if original_srate != 0:
1674 node.fhighs[ifo]=
str(original_srate/2.-1./seglen)
1675 prenode.fhighs[ifo]=
str(original_srate/2.-1./seglen)
1676 node.set_srate(srate)
1677 prenode.set_srate(srate)
1679 node.set_trigSNR(event.trigSNR)
1680 if event.horizon_distance:
1681 node.set_horizon_distance(event.horizon_distance)
1683 node.set_dataseed(self.
dataseed+event.event_id)
1684 prenode.set_dataseed(self.
dataseed+event.event_id)
1687 if ifo
in event.timeslides:
1688 slide=event.timeslides[ifo]
1692 if segstart >= seg.start()
and segend <= seg.end():
1693 if not self.
config.has_option(
'lalinference',
'fake-cache'):
1694 if self.
config.has_option(
'condor',
'bayesline')
or self.
config.getboolean(
'analysis',
'roq'):
1695 prenode.add_ifo_data(ifo,seg,self.
channels[ifo],timeslide=slide)
1696 gotdata+=node.add_ifo_data(ifo,seg,self.
channels[ifo],timeslide=slide)
1698 fakecachefiles=ast.literal_eval(self.
config.get(
'lalinference',
'fake-cache'))
1699 if self.
config.has_option(
'condor',
'bayesline')
or self.
config.getboolean(
'analysis',
'roq'):
1700 prenode.add_fake_ifo_data(ifo,seg,fakecachefiles[ifo],self.
channels[ifo],timeslide=slide)
1701 gotdata+=node.add_fake_ifo_data(ifo,seg,fakecachefiles[ifo],self.
channels[ifo],timeslide=slide)
1702 if self.
config.has_option(
'lalinference',
'psd-xmlfile'):
1703 psdpath=os.path.realpath(self.
config.get(
'lalinference',
'psd-xmlfile'))
1707 node.ifos=node.cachefiles.keys()
1708 prenode.ifos=prenode.cachefiles.keys()
1714 use_gracedbpsd = (
not self.
config.getboolean(
'input',
'ignore-gracedb-psd'))
1715 except (NoOptionError, NoSectionError):
1716 use_gracedbpsd =
True
1718 if os.path.isfile(os.path.join(self.
basepath,
'psd.xml.gz')):
1719 psdpath=os.path.join(self.
basepath,
'psd.xml.gz')
1723 prenode.flows[ifo]=
str(20.0)
1724 if self.
config.has_option(
'lalinference',
'flow'):
1725 node.flows=ast.literal_eval(self.
config.get(
'lalinference',
'flow'))
1726 prenode.flows=ast.literal_eval(self.
config.get(
'lalinference',
'flow'))
1729 node.fhighs[ifo]=
str(event.fhigh)
1730 prenode.fhighs[ifo]=
str(event.fhigh)
1731 if self.
config.has_option(
'lalinference',
'fhigh'):
1732 node.fhighs=ast.literal_eval(self.
config.get(
'lalinference',
'fhigh'))
1733 prenode.fhighs=ast.literal_eval(self.
config.get(
'lalinference',
'fhigh'))
1734 prenode.set_max_psdlength(self.
config.getint(
'input',
'max-psd-length'))
1735 prenode.set_padding(self.
config.getint(
'input',
'padding'))
1737 prenode.add_file_opt(
'outfile',roqeventpath+
'data-dump',file_is_output_file=
True)
1738 prenode.add_var_arg(
'--data-dump')
1739 if self.
config.has_option(
'lalinference',
'seglen'):
1740 p_seglen=self.
config.getfloat(
'lalinference',
'seglen')
1741 elif self.
config.has_option(
'engine',
'seglen'):
1742 p_seglen=self.
config.getfloat(
'engine',
'seglen')
1744 p_seglen=event.duration
1745 prenode.set_seglen(p_seglen)
1746 if self.
config.has_option(
'condor',
'bayeswave'):
1747 prenode.set_psdlength(p_seglen)
1748 if self.
config.has_option(
'lalinference',
'seglen'):
1749 bw_seglen = self.
config.getfloat(
'lalinference',
'seglen')
1750 elif self.
config.has_option(
'engine',
'seglen'):
1751 bw_seglen = self.
config.getfloat(
'engine',
'seglen')
1753 bw_seglen = event.duration
1754 if (np.log2(bw_seglen)%1):
1755 print(
'BayesWave only supports seglengths which are powers of 2, you have specified a seglength of '+
str(bw_seglen)+
' seconds.')
1756 print(
'Instead, a seglenth of '+
str(np.power(2., np.ceil(np.log2(bw_seglen))))+
's will be used for the BayesWave PSD estimation.')
1757 print(
'The main LALInference job will stil use seglength '+
str(bw_seglen)+
' seconds.')
1758 bw_seglen = np.power(2., np.ceil(np.log2(bw_seglen)))
1759 if self.
config.has_option(
'condor',
'bayeswave'):
1760 if self.
config.has_option(
'bayeswave',
'bw_srate'):
1761 bw_srate = self.
config.getfloat(
'bayeswave',
'bw_srate')
1762 print(
'BayesWave will generate PSDs using a srate of '+
str(bw_srate)+
' Hz.')
1763 print(
'The main LALInference job will stil use srate '+
str(srate)+
' Hz.')
1764 elif (np.log2(srate)%1):
1765 print(
'BayesWave only supports srates which are powers of 2, you have specified a srate of '+
str(srate)+
' Hertz.')
1766 print(
'Instead, a srate of '+
str(np.power(2., np.ceil(np.log2(srate))))+
'Hz will be used for the BayesWave PSD estimation.')
1767 print(
'The main LALInference job will stil use srate '+
str(srate)+
' Hertz.')
1768 bw_srate = np.power(2., np.ceil(np.log2(srate)))
1772 for ifokey, seg
in node.scisegs.items():
1773 dfnode=seg.get_df_node()
1776 if self.
config.has_option(
'condor',
'bayeswave')
and not co_test:
1778 bwPSDpath = os.path.join(roqeventpath,
'BayesWave_PSD_'+ifo+
'/')
1779 if not os.path.isdir(bwPSDpath):
1781 if ifo
not in bayeswavepsdnode:
1783 bayeswavepsdnode[ifo].add_var_arg(
'--bayesLine')
1784 bayeswavepsdnode[ifo].add_var_arg(
'--cleanOnly')
1785 bayeswavepsdnode[ifo].add_var_arg(
'--checkpoint')
1786 bayeswavepsdnode[ifo].set_output_dir(bwPSDpath)
1787 bayeswavepsdnode[ifo].set_trig_time(end_time)
1788 bayeswavepsdnode[ifo].set_seglen(bw_seglen)
1789 bayeswavepsdnode[ifo].set_psdlength(bw_seglen)
1790 bayeswavepsdnode[ifo].set_srate(bw_srate)
1791 if ifo
in event.timeslides:
1792 slide=event.timeslides[ifo]
1796 if segstart >= seg.start()
and segend <= seg.end():
1797 if not self.
config.has_option(
'lalinference',
'fake-cache'):
1798 bayeswavepsdnode[ifo].add_ifo_data(ifo,seg,self.
channels[ifo],timeslide=slide)
1800 fakecachefiles=ast.literal_eval(self.
config.get(
'lalinference',
'fake-cache'))
1801 bayeswavepsdnode[ifo].add_fake_ifo_data(ifo,seg,fakecachefiles[ifo],self.
channels[ifo],timeslide=slide)
1802 if self.
config.has_option(
'lalinference',
'flow'):
1803 bayeswavepsdnode[ifo].flows[ifo]=np.power(2,np.floor(np.log2(ast.literal_eval(self.
config.get(
'lalinference',
'flow'))[ifo])))
1804 print(
'BayesWave requires f_low being a power of 2, therefore f_low for '+ifo+
' has been changed from '+
str(ast.literal_eval(self.
config.get(
'lalinference',
'flow'))[ifo])+
' to '+
str(np.power(2,np.floor(np.log2(ast.literal_eval(self.
config.get(
'lalinference',
'flow'))[ifo]))))+
' Hz (for the BayesWave job only, in the main LALInference jobs f_low will still be '+
str(ast.literal_eval(self.
config.get(
'lalinference',
'flow'))[ifo])+
' Hz)')
1805 bayeswavepsdnode[ifo].set_seed(self.
randomseed)
1806 bayeswavepsdnode[ifo].set_chainseed(self.
randomseed+event.event_id)
1808 bayeswavepsdnode[ifo].set_dataseed(self.
dataseed+event.event_id)
1810 bayeswavepsdnode[ifo].set_dataseed(self.
randomseed+event.event_id)
1811 if ifo
not in bayeswavepostnode:
1813 bayeswavepostnode[ifo].add_var_arg(
'--0noise')
1814 bayeswavepostnode[ifo].add_var_arg(
'--lite')
1815 bayeswavepostnode[ifo].add_var_arg(
'--bayesLine')
1816 bayeswavepostnode[ifo].add_var_arg(
'--cleanOnly')
1818 bayeswavepostnode[ifo].set_output_dir(bwPSDpath)
1821 bayeswavepostnode[ifo].set_trig_time(end_time)
1822 bayeswavepostnode[ifo].set_seglen(bw_seglen)
1823 bayeswavepostnode[ifo].set_psdlength(bw_seglen)
1824 bayeswavepostnode[ifo].set_srate(bw_srate)
1825 bayeswavepost_fakecache =
'interp:'+bwPSDpath+ifo+
'_fairdraw_asd.dat'
1827 if ifo
in event.timeslides:
1828 slide=event.timeslides[ifo]
1832 if segstart >= seg.start()
and segend <= seg.end():
1837 bayeswavepostnode[ifo].add_fake_ifo_data(ifo,seg,bayeswavepost_fakecache,self.
channels[ifo],timeslide=slide)
1839 if self.
config.has_option(
'lalinference',
'flow'):
1840 bayeswavepostnode[ifo].flows[ifo]=np.power(2,np.floor(np.log2(ast.literal_eval(self.
config.get(
'lalinference',
'flow'))[ifo])))
1841 bayeswavepostnode[ifo].set_seed(self.
randomseed)
1842 bayeswavepostnode[ifo].set_chainseed(self.
randomseed+event.event_id)
1844 bayeswavepostnode[ifo].set_dataseed(self.
dataseed+event.event_id)
1846 bayeswavepostnode[ifo].set_dataseed(self.
randomseed+event.event_id)
1847 if self.
config.has_option(
'condor',
'bayesline')
or self.
config.getboolean(
'analysis',
'roq'):
1848 if gotdata
and event.event_id
not in self.
prenodes.keys():
1849 if prenode
not in self.get_nodes():
1850 self.add_node(prenode)
1852 if self.
config.getboolean(
'analysis',
'roq'):
1855 if self.
config.has_option(
'input',
'injection-file'):
1856 freqDataFile=os.path.join(roqeventpath,
'data-dump'+ifo+
'-freqDataWithInjection.dat')
1858 freqDataFile=os.path.join(roqeventpath,
'data-dump'+ifo+
'-freqData.dat')
1859 prenode.add_output_file(freqDataFile)
1860 prenode.add_output_file(os.path.join(roqeventpath,
'data-dump'+ifo+
'-PSD.dat'))
1861 if self.
config.has_option(
'condor',
'bayesline'):
1863 bayeslinenode[ifo].add_var_arg(
'-i '+freqDataFile)
1864 bayeslinenode[ifo].add_input_file(freqDataFile)
1865 bayeslinenode[ifo].add_var_arg(
'-o '+os.path.join(roqeventpath,
'BayesLine_PSD_'+ifo+
'.dat'))
1866 bayeslinenode[ifo].add_output_file(os.path.join(roqeventpath,
'BayesLine_PSD_'+ifo+
'.dat'))
1867 if self.
config.getboolean(
'analysis',
'roq'):
1868 computeroqweightsnode[ifo].add_var_arg(
'--fHigh '+
str(prenode.fhighs[ifo]))
1869 computeroqweightsnode[ifo].add_file_opt(
'data',freqDataFile)
1870 computeroqweightsnode[ifo].add_file_opt(
'psd',os.path.join(roqeventpath,
'data-dump'+ifo+
'-PSD.dat'))
1871 computeroqweightsnode[ifo].add_file_opt(
'out',roqeventpath,file_is_output_file=
True)
1872 computeroqweightsnode[ifo].add_output_file(os.path.join(roqeventpath,
'weights_quadratic_'+ifo+
'.dat'))
1874 if self.
config.has_option(
'condor',
'bayesline'):
1875 self.
prenodes[event.event_id]=(prenode,bayeslinenode)
1876 if self.
config.getboolean(
'analysis',
'roq'):
1877 self.
prenodes[event.event_id]=(prenode,computeroqweightsnode)
1879 if self.
config.has_option(
'condor',
'bayesline')
or self.
config.getboolean(
'analysis',
'roq'):
1881 node.add_parent(self.
prenodes[event.event_id][1][ifokey])
1883 if dfnode
is not None and dfnode
not in self.get_nodes():
1884 if not self.
config.has_option(
'lalinference',
'fake-cache'):
1885 self.add_node(dfnode)
1890 print(
'no data found for time %f'%(end_time))
1891 return None, bayeswavepsdnode
1892 if extra_options
is not None:
1893 for opt
in extra_options.keys():
1894 node.add_var_arg(
'--'+opt+
' '+extra_options[opt])
1896 if self.
config.has_option(
'input',
'injection-file'):
1897 node.set_injection(self.
config.get(
'input',
'injection-file'),event.event_id)
1898 prenode.set_injection(self.
config.get(
'input',
'injection-file'),event.event_id)
1899 if self.
config.has_option(
'condor',
'bayeswave')
and bayeswavepsdnode
and bayeswavepostnode:
1901 bayeswavepsdnode[ifo].set_injection(self.
config.get(
'input',
'injection-file'),event.event_id)
1902 bayeswavepostnode[ifo].set_injection(self.
config.get(
'input',
'injection-file'),event.event_id)
1903 if self.
config.has_option(
'input',
'burst-injection-file'):
1904 node.set_injection(self.
config.get(
'input',
'burst-injection-file'),event.event_id)
1905 prenode.set_injection(self.
config.get(
'input',
'burst-injection-file'),event.event_id)
1906 if self.
config.has_option(
'lalinference',
'seglen'):
1907 node.set_seglen(self.
config.getfloat(
'lalinference',
'seglen'))
1908 elif self.
config.has_option(
'engine',
'seglen'):
1909 node.set_seglen(self.
config.getfloat(
'engine',
'seglen'))
1911 node.set_seglen(event.duration)
1912 if self.
config.has_option(
'condor',
'bayeswave')
and bayeswavepsdnode
and bayeswavepostnode:
1913 node.set_psdlength(bw_seglen)
1914 if self.
config.has_option(
'input',
'psd-length'):
1915 node.set_psdlength(self.
config.getint(
'input',
'psd-length'))
1916 prenode.set_psdlength(self.
config.getint(
'input',
'psd-length'))
1917 if self.
config.has_option(
'condor',
'bayeswave')
and bayeswavepsdnode
and bayeswavepostnode:
1919 bayeswavepsdnode[ifo].set_psdlength(self.
config.getint(
'input',
'psd-length'))
1920 bayeswavepostnode[ifo].set_psdlength(self.
config.getint(
'input',
'psd-length'))
1921 if self.
config.has_option(
'input',
'psd-start-time'):
1922 node.set_psdstart(self.
config.getfloat(
'input',
'psd-start-time'))
1923 prenode.set_psdstart(self.
config.getfloat(
'input',
'psd-start-time'))
1924 if self.
config.has_option(
'condor',
'bayeswave')
and bayeswavepsdnode
and bayeswavepostnode:
1926 bayeswavepsdnode[ifo].set_psdstart(self.
config.getfloat(
'input',
'psd-start-time'))
1927 bayeswavepostnode[ifo].set_psdstart(self.
config.getfloat(
'input',
'psd-start-time'))
1928 node.set_max_psdlength(self.
config.getint(
'input',
'max-psd-length'))
1929 prenode.set_max_psdlength(self.
config.getint(
'input',
'max-psd-length'))
1930 node.set_padding(self.
config.getint(
'input',
'padding'))
1931 prenode.set_padding(self.
config.getint(
'input',
'padding'))
1932 if self.
config.has_option(
'condor',
'bayeswave')
and bayeswavepsdnode
and bayeswavepostnode:
1934 bayeswavepsdnode[ifo].set_max_psdlength(self.
config.getint(
'input',
'max-psd-length'))
1935 bayeswavepsdnode[ifo].set_padding(self.
config.getint(
'input',
'padding'))
1936 bayeswavepostnode[ifo].set_max_psdlength(self.
config.getint(
'input',
'max-psd-length'))
1937 bayeswavepostnode[ifo].set_padding(self.
config.getint(
'input',
'padding'))
1938 out_dir=os.path.join(self.
basepath,
'engine')
1940 node.set_output_file(os.path.join(out_dir,node.engine+
'-'+
str(event.event_id)+
'-'+node.get_ifos()+
'-'+
str(node.get_trig_time())+
'-'+
str(node.id)))
1941 if self.
config.getboolean(
'analysis',
'roq'):
1943 node.add_file_opt(ifo+
'-roqweightsLinear',os.path.join(roqeventpath,
'weights_linear_'+ifo+
'.dat'))
1944 node.add_file_opt(ifo+
'-roqweightsQuadratic',os.path.join(roqeventpath,
'weights_quadratic_'+ifo+
'.dat'))
1945 node.add_file_opt(
'roqtime_steps',os.path.join(roqeventpath,
'roq_sizes.dat'))
1946 node.add_file_opt(
'roq-times',os.path.join(roqeventpath,
'tcs.dat'))
1947 node.add_file_opt(
'roqnodesLinear',os.path.join(roqeventpath,
'fnodes_linear.dat'))
1948 node.add_file_opt(
'roqnodesQuadratic',os.path.join(roqeventpath,
'fnodes_quadratic.dat'))
1949 if self.
config.has_option(
'condor',
'bayesline'):
1951 node.psds[ifo]=os.path.join(roqeventpath,
'BayesLine_PSD_'+ifo+
'.dat')
1952 node.add_input_file(os.path.join(roqeventpath,
'BayesLine_PSD_'+ifo+
'.dat'))
1953 prenode.psds[ifo]=os.path.join(roqeventpath,
'BayesLine_PSD_'+ifo+
'.dat')
1954 prenode.add_input_file(os.path.join(roqeventpath,
'BayesLine_PSD_'+ifo+
'.dat'))
1955 if self.
config.has_option(
'condor',
'bayeswave')
and bayeswavepostnode:
1957 node.psds[ifo]=os.path.join(roqeventpath,
'BayesWave_PSD_'+ifo+
'/post/clean/glitch_median_PSD_forLI_'+ifo+
'.dat')
1958 node.add_input_file(os.path.join(roqeventpath,
'BayesWave_PSD_'+ifo+
'/post/clean/glitch_median_PSD_forLI_'+ifo+
'.dat'))
1959 prenode.psds[ifo]=os.path.join(roqeventpath,
'BayesWave_PSD_'+ifo+
'/post/clean/glitch_median_PSD_forLI_'+ifo+
'.dat')
1960 prenode.add_input_file(os.path.join(roqeventpath,
'BayesWave_PSD_'+ifo+
'/post/clean/glitch_median_PSD_forLI_'+ifo+
'.dat'))
1961 for (opt,arg)
in event.engine_opts.items():
1962 node.add_var_opt(opt,arg)
1963 if self.
config.has_option(
'condor',
'bayeswave')
and self.
engine not in [
'bayeswave',
'bayeswavepost']:
1965 node.add_parent(bayeswavepostnode[ifo])
1966 prenode.add_parent(bayeswavepostnode[ifo])
1967 return node,bayeswavepsdnode,bayeswavepostnode
1973 if parent
is not None:
1974 node.add_parent(parent)
1976 infiles = [os.path.join(self.
posteriorpath,
'posterior_%s_%s.hdf5'%(ifo,evstring))
for ifo
in ifos]
1977 infiles.append(parent.get_pos_file())
1979 infiles = [parent.get_pos_file()]
1980 conffile = os.path.join(self.
config.get(
'paths',
'webdir'),
'config.ini')
1981 node.add_input_file(conffile)
1982 inifiles = [conffile]*len(infiles)
1983 node.add_var_opt(
"samples",
" ".join(infiles))
1985 node.add_input_file(f)
1986 node.add_file_opt(
"config",
" ".join(inifiles))
1988 node.add_var_opt(
'approximant',
" ".join([approximant]*len(infiles)))
1992 calibration_envelope = self.
config.get(
"engine",
"%s-spcal-envelope" %(ifo))
1993 node.add_input_file(calibration_envelope)
1994 calibration.append(
"%s:%s" %(ifo, calibration_envelope))
1997 if len(calibration) > 0:
1998 node.add_var_opt(
"calibration",
" ".join(calibration))
1999 node.set_output_path(outdir)
2007 if parent
is not None:
2008 node.add_parent(parent)
2009 infile=parent.get_pos_file()
2010 node.add_file_arg(infile)
2011 node.append_in_files(infile)
2013 node.set_output_path(outdir)
2014 if gzip_output
is not None:
2015 node.set_gzip_output(gzip_output)
2016 if ifos
is not None:
2017 if isinstance(ifos,list):
2029 node.set_message(name+
' online parameter estimation started.')
2035 node=
GraceDBNode(self.
gracedbjob,parent=respagenode,gid=gid,command=
'create log',tag=
'pe',server=server)
2038 node.set_message(
"LALInference online parameter estimation finished. <a href="+resurl+
"/posplots.html>results</a>")
2043 if self.
config.has_option(
'analysis',
'add-lvem-tag'):
2044 if self.
config.getboolean(
'analysis',
'add-lvem-tag'):
2048 node.set_filename(respagenode.webpath+
'/corner/extrinsic.png')
2053 node.set_filename(respagenode.webpath+
'/corner/intrinsic.png')
2058 node.set_filename(respagenode.webpath+
'/corner/sourceFrame.png')
2068 if engine==
'lalinferenceburst':
2070 elif engine
is None:
2073 prefix=
'LALInference'
2075 if self.
config.has_option(
'condor',
'skyarea'):
2076 if self.
config.has_option(
'analysis',
'upload-to-gracedb'):
2077 if self.
config.getboolean(
'analysis',
'upload-to-gracedb'):
2079 if self.
config.has_option(
'analysis',
'add-lvem-tag'):
2080 if self.
config.getboolean(
'analysis',
'add-lvem-tag'):
2082 skynodes=filter(
lambda x: isinstance(x,SkyMapNode) ,self.get_nodes())
2090 node.set_filename(sk.outfits)
2091 node.set_message(
'%s FITS sky map'%prefix)
2096 def add_gracedb_info_node(self,respagenode,gid,analysis='LALInference',issky=False,prefix="LIB",email=None,server=None):
2101 if respagenode
is not None:
2102 samples=respagenode.posfile
2103 hdf5samples=respagenode.get_in_files()[0]
2106 resnodes=filter(
lambda x: isinstance(x,ResultsPageNode) ,self.get_nodes())
2110 samples=respagenode.posfile
2111 hdf5samples=respagenode.get_in_files()[0]
2117 if email
is not None:
2118 node.set_email(email)
2120 skynodes=filter(
lambda x: isinstance(x,SkyMapNode) ,self.get_nodes())
2122 skymap=sk.outdir+
'/%s.fits'%prefix
2123 message=
' %s FITS sky map'%prefix
2125 node.set_skymap(skymap)
2126 node.set_message(message)
2128 bci=respagenode.get_bcifile()
2131 bsn=respagenode.get_bsnfile()
2134 node.set_analysis(analysis)
2143 node=
ROMNode(self.computeroqweights_job,ifo,parent.seglen,parent.flows[ifo])
2144 self.computeroqweightsnode[ifo]=node
2145 if parent
is not None:
2146 node.add_parent(parent)
2153 if parent
is not None:
2154 node.add_parent(parent)
2161 if parent
is not None:
2162 node.add_parent(parent)
2169 if parent
is not None:
2170 node.add_parent(parent)
2177 Class to define DAG Jobs for lalinference pipeline.
2178 Handles some common condor settings like requirements, accounting groups,
2182 cp : configparser object
2183 sharedfs: If
False, will map files to local paths on execute node
2185 def __init__(self, cp=None, sharedfs=False, requires_frames=False):
2192 from configparser
import ConfigParser
2195 if cp.has_option(
'condor',
'sharedfs'):
2196 if cp.getboolean(
'condor',
'sharedfs'):
2198 if cp.has_option(
'analysis',
'osg'):
2201 self.
osg=cp.getboolean(
'analysis',
'osg')
2207 self.add_condor_cmd(
'request_disk',
'500M')
2208 self.add_condor_cmd(
'getenv',
'True')
2212 self.add_condor_cmd(
'max_retries',
'5')
2214 if cp.has_option(
'condor',
'requirements'):
2217 if cp.has_option(
'condor',
'accounting_group'):
2218 self.add_condor_cmd(
'accounting_group',cp.get(
'condor',
'accounting_group'))
2219 if cp.has_option(
'condor',
'accounting_group_user'):
2220 self.add_condor_cmd(
'accounting_group_user',cp.get(
'condor',
'accounting_group_user'))
2221 if cp.has_option(
'condor',
'queue'):
2222 self.add_condor_cmd(
'+'+cp.get(
'condor',
'queue'),
'True')
2227 self.add_condor_cmd(
'transfer_executable',
'False')
2228 self.add_condor_cmd(
'transfer_input_files',
'$(macroinput)')
2229 self.add_condor_cmd(
'transfer_output_files',
'$(macrooutput)')
2230 self.add_condor_cmd(
'transfer_output_remaps',
'"$(macrooutputremaps)"')
2232 self.add_condor_cmd(
'should_transfer_files',
'YES')
2233 self.add_condor_cmd(
'when_to_transfer_output',
'ON_EXIT_OR_EVICT')
2235 self.add_condor_cmd(
'+PreCmd',
'"lalinf_touch_output"')
2236 self.add_condor_cmd(
'+PreArguments',
'"$(macrooutput)"')
2238 self.add_condor_cmd(
'stream_output',
True)
2239 self.add_condor_cmd(
'stream_error',
True)
2241 self.add_condor_cmd(
'should_transfer_files',
'NO')
2250 self.add_condor_cmd(
'+DESIRED_Sites',
'"nogrid"')
2252 self.add_condor_cmd(
'+flock_local',
True)
2256 Copy the x509 proxy file to the path given, and
2257 use it
in the DAG. Requires ligo-proxy-init to be run
2258 before generating DAG.
2260 from shutil
import copyfile
2261 from subprocess
import check_output, CalledProcessError
2264 res = check_output([
'grid-proxy-info',
'-path'])
2265 except CalledProcessError
as e:
2267 print(
'Error: x509proxy file not found. Please run ligo-proxy-init before creating DAG')
2269 tmp_path = res.strip()
2270 print(
'Using x509 proxy from ',tmp_path)
2271 copyfile(tmp_path, path)
2276 Add a requirement to the condor submit file
2284 Over-load CondorDAGJob.write_sub_file to write the requirements
2290 self.get_config(
'paths',
'basedir'),
'x509proxy')
2293 self.add_condor_cmd(
'use_x509userproxy',
'True')
2294 self.add_condor_cmd(
'x509userproxy',self.
x509path)
2296 self.add_condor_cmd(
'requirements',
'&&'.join(
'({0})'.format(r)
for r
in self.
requirements))
2303 kwargs[
'sharedfs']=
True
2304 super(LALInferenceDAGSharedFSJob, self).
__init__(*args, **kwargs)
2308 Node for LALInference DAG jobs. Will perform filename mapping
2309 to run
in condor
's local filesystem
2313 if self.job().transfer_files:
2314 fname=os.path.basename(
2315 os.path.relpath(fname,
2316 start=self.job().get_config(
'paths',
'basedir')))
2317 self.add_output_macro(fname)
2320 filename=os.path.relpath(filename,start=self.job().get_config(
'paths',
'basedir'))
2321 self.add_input_macro(filename)
2324 filename=os.path.relpath(filename,start=self.job().get_config(
'paths',
'basedir'))
2325 self.add_checkpoint_macro(filename)
2329 relfile=os.path.relpath(filename,start=self.job().get_config(
'paths',
'basedir'))
2331 if self.job().transfer_files:
2332 self.add_var_opt(opt,os.path.join(
'.',os.path.basename(relfile)))
2334 self.add_var_opt(opt, relfile)
2335 if file_is_output_file:
2342 outputremaps =
';'.join( [
'{0}={1}'.format(os.path.basename(f), f)
2343 for f
in self.get_output_files()]
2345 if self.job().transfer_files:
2346 self.add_macro(
'macrooutputremaps', outputremaps)
2347 self.
add_input_file(os.path.join(self.job().get_config(
'paths',
'basedir'),
'lalinf_touch_output'))
2348 for f
in self.job().get_input_files():
2350 super(LALInferenceDAGNode, self).
finalize()
2354 def __init__(self,cp,submitFile,logdir,engine,ispreengine=False,sharedfs=False,
2355 requires_frames=False, *args, **kwargs):
2358 basepath=cp.get(
'paths',
'basedir')
2359 universe =
'vanilla'
2360 if ispreengine
is True:
2361 roqpath=os.path.join(basepath,
'ROQdata')
2364 exe=cp.get(
'condor',self.
engine)
2366 if self.
engine==
'lalinferencemcmc':
2367 exe=cp.get(
'condor',
'mpiwrapper')
2368 elif self.
engine==
'lalinferencenest' or self.
engine==
'lalinferenceburst':
2369 exe=cp.get(
'condor',self.
engine)
2371 print(
'LALInferencePipe: Unknown engine node type %s!'%(self.
engine))
2374 pipeline.CondorDAGJob.__init__(self,universe,exe)
2375 pipeline.AnalysisJob.__init__(self,cp)
2376 LALInferenceDAGJob.__init__(self, cp, sharedfs=sharedfs, requires_frames=requires_frames)
2378 if cp.has_option(
'engine',
'resume'):
2381 self.add_condor_cmd(
'+SuccessCheckpointExitCode',
'77')
2382 self.add_condor_cmd(
'+WantFTOnCheckpoint',
'True')
2383 self.add_opt(
'checkpoint-exit-code',
'77')
2388 self.set_sub_file(os.path.abspath(submitFile))
2389 if self.
engine==
'lalinferencemcmc':
2392 if cp.has_section(
'mpi'):
2393 if ispreengine
is False:
2396 if cp.has_option(
'mpi',
'mpi_task_count'):
2412 if self.
engine==
'lalinferencenest':
2413 self.add_condor_cmd(
'request_memory',
'4000')
2415 self.add_condor_cmd(
'stream_output',
True)
2416 self.add_condor_cmd(
'stream_error',
True)
2418 if cp.has_section(self.
engine):
2420 self.add_ini_opts(cp,self.
engine)
2423 for k
in cp.options(
'engine'):
2424 arg=cp.get(
'engine',k)
2426 self.add_input_file(arg)
2427 self.add_opt(k, os.path.join(
'.', os.path.basename(arg)))
2431 self.set_stdout_file(os.path.join(logdir,
'lalinference-$(cluster)-$(process)-$(node).out'))
2432 self.set_stderr_file(os.path.join(logdir,
'lalinference-$(cluster)-$(process)-$(node).err'))
2434 if self.
engine==
'lalinferencenest':
2435 self.add_condor_cmd(
'environment',
'OMP_NUM_THREADS=1')
2436 if cp.has_option(
'condor',
'notification'):
2437 self.set_notification(cp.get(
'condor',
'notification'))
2438 if cp.has_option(
'resultspage',
'email'):
2439 self.add_condor_cmd(
'notify_user',cp.get(
'resultspage',
'email'))
2443 new_id = itertools.count()
2445 super(EngineNode,self).
__init__(li_job)
2462 self.
id=next(EngineNode.new_id)
2483 self.add_var_opt(
'randomseed',
str(
int(seed)+self.
id))
2486 self.add_var_opt(
'srate',
str(srate))
2489 self.add_var_opt(
'trigger-snr',
str(trigSNR))
2492 self.add_var_opt(
'distance-max',
str(horizon_distance))
2495 self.add_var_opt(
'dataseed',
str(seed))
2498 self.add_var_opt(
'chainseed',
str(seed))
2501 return ''.join(map(str,self.
ifos))
2504 pathroot=self.posfile
2505 if pathroot[-3:]==
'.00':
2506 pathroot=pathroot[:-3]
2509 tmpst=
"%s%s-PSD.dat,"%(pathroot,i)
2521 pathroot=self.fileroot
2524 pathroot=self.posfile
2525 if pathroot[-3:]==
'.00':
2526 pathroot=pathroot[:-3]
2527 st=
"%s_snr.txt"%pathroot
2536 Set the end time of the signal for the centre of the prior
in time
2539 self.add_var_opt('trigtime',
'{:.9f}'.format(float(time)))
2543 Set the event number in the injection XML.
2545 if event
is not None:
2547 self.add_var_opt(
'event',
str(event))
2551 Set a software injection to be performed.
2558 def add_fake_ifo_data(self,ifo,sciseg,fake_cache_name,fake_channel_name,timeslide=0):
2560 Dummy method to set up fake data without needing to run datafind
2562 self.ifos.append(ifo)
2566 self.channels[ifo]=fake_channel_name
2571 if self.
ifos != ifo:
2572 self.
ifos.append(ifo)
2574 parent=sciseg.get_df_node()
2575 if parent
is not None:
2576 self.add_parent(parent)
2577 df_output=parent.get_output()
2581 self.job().requires_frames=
True
2587 Add a cache file from LIGODataFind. Based on same method
from pipeline.AnalysisNode
2598 def _finalize_ifo_data(self):
2600 Add final list of IFOs and data to analyse to command line arguments.
2602 for ifo
in self.
ifos:
2603 self.add_var_arg(
'--ifo '+ifo)
2605 self.add_var_opt(
'%s-cache'%(ifo),self.
cachefiles[ifo])
2608 self.add_var_opt(
'%s-channel'%(ifo),self.
channels[ifo])
2609 if self.
flows: self.add_var_opt(
'%s-flow'%(ifo),self.
flows[ifo])
2610 if self.
fhighs: self.add_var_opt(
'%s-fhigh'%(ifo),self.
fhighs[ifo])
2612 if os.path.exists(self.
psds[ifo]):
2615 self.add_var_opt(
'%s-psd'%(ifo),self.
psds[ifo])
2618 """ The logic here is the following:
2619 The CBC code starts from the earliest commont time, but that means that
if you run on *the same trigtime* the PSD start
and PSDlength you
'll get will be different, depending on wheather you are running on only one event or several, and the exact position of the event you are interested in in the list of times.
2620 Instead for each event (including single IFO runs) we do:
2622 b) set PSDlengh=maxPSD (requested by the user
or equal to 32seglen)
2623 c) go define GPSstart= trigtime - (PSDlength + seglen + padding - 2)
2625 By definition this means that GPSstart+ PSDlengh
with never overlap
with trigtime. Furthermore running on the same event will lead to the same PSDstart
and lenght, no matter of whether that
is a one-event
or multi-event run.
2626 We should check that the PSDstart so obtained
is in science mode. This
is what the
while loop 9 lines below
is meant
for.
2643 print(
'Unable to find data for event {0}'.format(trig_time))
2644 if length<maxLength:
2645 print(
'Trimmed analysis data for trigger {}. Analysing {} -- {} ({}s of data)'.format(trig_time, self.
GPSstart, self.
GPSstart+length,length))
2653 self.add_var_opt(
'psdstart',
'{:.9f}'.format(self.
GPSstart))
2658 self.add_var_opt(
'psdlength',self.
psdlength)
2659 self.add_var_opt(
'seglen',self.
seglen)
2665 super(LALInferenceNestNode,self).
__init__(li_job)
2679 super(LALInferenceNestNode,self).
__init__(li_job)
2684 Set a software injection to be performed.
2691 super(LALInferenceMCMCNode,self).
__init__(li_job)
2694 self.add_var_opt(
'mpirun',li_job.mpirun)
2695 self.add_var_opt(
'np',
str(li_job.mpi_task_count))
2697 self.add_var_opt(
'executable',li_job.binary)
2702 if self.job().resume:
2706 for i
in range(1,
int(self.job().mpi_task_count)):
2707 tempfile = self.
posfile +
'.' +
'{:d}'.format(i).zfill(2)
2709 if self.job().resume:
2717 super(LALInferenceDataDumpNode,self).
__init__(li_job)
2723class BayesWavePSDJob(LALInferenceDAGSharedFSJob,pipeline.CondorDAGJob,pipeline.AnalysisJob):
2725 Class for a BayesWave job
2727 Make sure all necessary commands are given
for O3 BayesWave
2730 exe=cp.get(
'condor',
'bayeswave')
2731 pipeline.CondorDAGJob.__init__(self,
"vanilla",exe)
2732 pipeline.AnalysisJob.__init__(self,cp)
2733 LALInferenceDAGSharedFSJob.__init__(self, cp)
2734 if cp.has_section(
'bayeswave'):
2735 self.add_ini_opts(cp,
'bayeswave')
2736 self.set_sub_file(submitFile)
2737 self.set_stdout_file(os.path.join(logdir,
'bayeswavepsd-$(cluster)-$(process).out'))
2738 self.set_stderr_file(os.path.join(logdir,
'bayeswavepsd-$(cluster)-$(process).err'))
2740 self.add_condor_cmd(
'transfer_executable',
'False')
2741 self.add_condor_cmd(
'getenv',
'True')
2742 self.add_condor_cmd(
'request_memory',cp.get(
'condor',
'bayeswave_request_memory'))
2744 self.add_condor_cmd(
'stream_output',
True)
2745 self.add_condor_cmd(
'stream_error',
True)
2746 self.add_condor_cmd(
'+SuccessCheckpointExitCode', 77)
2747 self.add_condor_cmd(
'+WantFTOnCheckpoint',
True)
2748 self.add_condor_cmd(
'should_transfer_files',
'YES')
2749 self.add_condor_cmd(
'when_to_transfer_output',
'ON_EXIT_OR_EVICT')
2750 self.add_condor_cmd(
'transfer_input_files',
'$(macroinput),/usr/bin/mkdir,caches')
2751 self.add_condor_cmd(
'transfer_output_files',
'$(macrooutput)')
2752 self.add_condor_cmd(
'+PreCmd',
'"mkdir"')
2753 self.add_condor_cmd(
'+PreArguments',
'"-p $(macrooutputDir)"')
2757 Returns the top directory in a path, e.g.
2760 a,b=os.path.split(path)
2769 super(BayesWavePSDNode,self).
__init__(bayeswavepsd_job)
2777 path = os.path.relpath(dirname,
2778 start=self.job().get_config(
'paths',
'basedir'))
2779 self.add_var_opt(
'outputDir',path)
2783 self.add_output_file(
topdir(path))
2785class BayesWavePostJob(LALInferenceDAGSharedFSJob,pipeline.CondorDAGJob,pipeline.AnalysisJob):
2787 Class for a BayesWavePost job
2789 Make sure all necessary commands are given
for O3 BayesWavePost
2792 exe=cp.get(
'condor',
'bayeswavepost')
2793 pipeline.CondorDAGJob.__init__(self,
"vanilla",exe)
2794 pipeline.AnalysisJob.__init__(self,cp)
2795 LALInferenceDAGSharedFSJob.__init__(self, cp)
2796 if cp.has_section(
'bayeswave'):
2797 self.add_ini_opts(cp,
'bayeswave')
2798 self.set_sub_file(submitFile)
2799 self.add_condor_cmd(
'transfer_executable',
'False')
2800 self.set_stdout_file(os.path.join(logdir,
'bayeswavepost-$(cluster)-$(process).out'))
2801 self.set_stderr_file(os.path.join(logdir,
'bayeswavepost-$(cluster)-$(process).err'))
2802 self.add_condor_cmd(
'getenv',
'True')
2803 self.add_condor_cmd(
'request_memory',cp.get(
'condor',
'bayeswavepost_request_memory'))
2804 self.add_condor_cmd(
'stream_output',
True)
2805 self.add_condor_cmd(
'stream_error',
True)
2806 self.add_condor_cmd(
'+SuccessCheckpointExitCode', 77)
2807 self.add_condor_cmd(
'+WantFTOnCheckpoint',
True)
2808 self.add_condor_cmd(
'should_transfer_files',
'YES')
2809 self.add_condor_cmd(
'when_to_transfer_output',
'ON_EXIT_OR_EVICT')
2810 self.add_condor_cmd(
'transfer_input_files',
'$(workdir)')
2811 self.add_condor_cmd(
'transfer_output_files',
'$(workdir)')
2816 super(BayesWavePostNode,self).
__init__(bayeswavepost_job)
2824 path = os.path.relpath(dirname,
2825 start=self.job().get_config(
'paths',
'basedir'))
2826 self.add_var_opt(
'outputDir',path)
2828 self.add_macro(
'workdir',
topdir(path))
2830class PESummaryResultsPageJob(LALInferenceDAGSharedFSJob,pipeline.AnalysisJob):
2831 """Class to handle the creation of the summary page job using `PESummary`
2835 exe=cp.get(
'condor',
'resultspage')
2836 pipeline.CondorDAGJob.__init__(self,
"vanilla",exe)
2837 pipeline.AnalysisJob.__init__(self,cp)
2838 LALInferenceDAGSharedFSJob.__init__(self,cp)
2839 self.set_sub_file(os.path.abspath(submitFile))
2840 self.set_stdout_file(os.path.join(logdir,
'resultspage-$(cluster)-$(process).out'))
2841 self.set_stderr_file(os.path.join(logdir,
'resultspage-$(cluster)-$(process).err'))
2842 self.add_condor_cmd(
'getenv',
'True')
2844 self.add_condor_cmd(
'request_memory', cp.get(
'condor',
'resultspage_memory'))
2845 except NoOptionError:
2846 self.add_condor_cmd(
'request_memory',
'2000')
2851 exe=cp.get(
'condor',
'resultspage')
2852 pipeline.CondorDAGJob.__init__(self,
"vanilla",exe)
2853 pipeline.AnalysisJob.__init__(self,cp)
2854 LALInferenceDAGSharedFSJob.__init__(self, cp)
2855 self.set_sub_file(os.path.abspath(submitFile))
2856 self.set_stdout_file(os.path.join(logdir,
'resultspage-$(cluster)-$(process).out'))
2857 self.set_stderr_file(os.path.join(logdir,
'resultspage-$(cluster)-$(process).err'))
2858 self.add_condor_cmd(
'getenv',
'True')
2860 self.add_condor_cmd(
'request_memory', cp.get(
'condor',
'resultspage_memory'))
2861 except NoOptionError:
2862 self.add_condor_cmd(
'request_memory',
'2000')
2863 self.add_ini_opts(cp,
'resultspage')
2865 if cp.has_option(
'results',
'skyres'):
2866 self.add_opt(
'skyres',cp.get(
'results',
'skyres'))
2871 super(PESummaryResultsPageNode,self).
__init__(results_page_job)
2872 if outpath
is not None:
2877 entries = glob(path+
"/*")
2878 if "%s/home.html" %(path)
in entries:
2885 if option ==
"existing":
2886 self.add_var_opt(
'existing_webdir',path)
2888 self.add_var_opt(
'webdir',path)
2903 Set the event number in the injection XML.
2905 if event
is not None:
2907 self.add_var_opt(
'eventnum',
str(event))
2917 for num, i
in enumerate(ifos):
2918 psds +=
" %s:%s" %(i, st[num])
2919 self.add_var_opt(
'psd',psds)
2922 if cachefiles == {}:
2924 if channels
is None:
2928 for i
in channels.keys():
2929 gwdata +=
" %s:%s" % (channels[i], cachefiles[i])
2930 self.add_var_opt(
'gwdata',gwdata)
2938 for num, i
in enumerate(ifos):
2939 calibration +=
" %s:%s" %(i, st[num])
2940 self.add_var_opt(
'calibration',calibration)
2945 l = labels.split(
",")
2946 self.add_var_opt(
'labels',
" ".join(l))
2961 Add a parent node which is one of the engine nodes
2962 And automatically set options accordingly
2964 self.add_parent(node)
2965 self.add_file_arg(node.get_pos_file())
2966 self.infiles.append(node.get_pos_file())
2974 self.add_var_opt(
'gracedb',gid)
2992 super(ResultsPageNode,self).
__init__(results_page_job)
2993 if outpath
is not None:
3003 self.
add_file_opt(
'archive',path,file_is_output_file=
True)
3008 self.add_var_opt(
'outpath',path)
3011 self.
posfile=os.path.join(path,
'posterior_samples.dat')
3027 Set the event number in the injection XML.
3029 if event
is not None:
3031 self.add_var_arg(
'--eventnum '+
str(event))
3039 for i
in st.split(
','):
3041 self.add_var_arg(
'--psdfiles %s'%st)
3053 self.add_var_arg(
'--trig '+coinc)
3057 self.
infiles.append(this_file)
3061 Add a parent node which is one of the engine nodes
3062 And automatically set options accordingly
3064 self.add_parent(node)
3065 self.add_file_arg(node.get_pos_file())
3093 Class defining the coherence test job to be run as part of a pipeline.
3096 exe=cp.get(
'condor',
'coherencetest')
3097 pipeline.CondorDAGJob.__init__(self,
"vanilla",exe)
3098 pipeline.AnalysisJob.__init__(self,cp)
3099 LALInferenceDAGJob.__init__(self, cp)
3100 self.add_opt(
'new-coherent-incoherent-noise',
'')
3101 self.add_condor_cmd(
'getenv',
'True')
3102 self.set_stdout_file(os.path.join(logdir,
'coherencetest-$(cluster)-$(process).out'))
3103 self.set_stderr_file(os.path.join(logdir,
'coherencetest-$(cluster)-$(process).err'))
3104 self.set_sub_file(os.path.abspath(submitFile))
3108 Class defining the node for the coherence test
3111 super(CoherenceTestNode,self).
__init__(coherencetest_job)
3116 if outfile
is not None:
3118 self.
add_file_opt(
'outfile',outfile,file_is_output_file=
True)
3122 Add a parent node which is an engine node,
and process its outputfiles
3125 self.add_parent(node)
3128 Add a parent node which provides one of the single-ifo evidence values
3131 self.add_parent(node)
3134 Construct command line
3140 self.add_file_arg(inco.get_pos_file())
3141 super(CoherenceTestNode, self).
finalize()
3145 Class defining a job which merges several parallel nested sampling or MCMC jobs into a single file
3147 cp - A configparser object containing the setup of the analysis
3148 submitFile - Path to store the submit file
3149 logdir - A directory to hold the stderr, stdout files of the merge runs
3150 engine - Set to either
'nest' or 'mcmc' for the appropriate behaviour
3153 if engine ==
'mcmc':
3154 exe=cp.get(
'condor',
'mergeMCMCscript')
3156 exe=cp.get(
'condor',
'mergeNSscript')
3157 pipeline.CondorDAGJob.__init__(self,
"vanilla",exe)
3158 pipeline.AnalysisJob.__init__(self,cp)
3159 LALInferenceDAGSharedFSJob.__init__(self, cp)
3160 self.set_sub_file(os.path.abspath(submitFile))
3161 self.set_stdout_file(os.path.join(logdir,
'merge-$(cluster)-$(process).out'))
3162 self.set_stderr_file(os.path.join(logdir,
'merge-$(cluster)-$(process).err'))
3163 self.add_condor_cmd(
'getenv',
'True')
3164 if cp.has_option(
'merge',
'npos')
and engine ==
'nest':
3165 self.add_opt(
'npos',cp.get(
'merge',
'npos'))
3170 Class defining the DAG node for a NS merge job
3172 merge_job = A MergeJob object
3173 parents = iterable of parent LALInferenceNest nodes (must have get_ns_file() method)
3174 engine - Set to either
'nest' or 'mcmc' for the appropriate behaviour
3176 def __init__(self,merge_job,parents=None,engine='nest'):
3177 super(MergeNode,self).
__init__(merge_job)
3178 if parents
is not None:
3179 for parent
in parents:
3180 if engine ==
'nest':
3186 self.add_parent(parent)
3187 self.add_file_arg(parent.get_ns_file())
3190 self.add_parent(parent)
3191 self.add_file_arg(parent.get_pos_file())
3201 Class defining a job which combines several parallel MCMC chains into a single hdf5 file
3203 cp - A configparser object containing the setup of the analysis
3204 submitFile - Path to store the submit file
3205 logdir - A directory to hold the stderr, stdout files of the merge runs
3208 exe=cp.get(
'condor',
'combinePTMCMCh5script')
3209 pipeline.CondorDAGJob.__init__(self,
"vanilla",exe)
3210 pipeline.AnalysisJob.__init__(self,cp)
3211 LALInferenceDAGSharedFSJob.__init__(self, cp)
3212 self.set_sub_file(os.path.abspath(submitFile))
3213 self.set_stdout_file(os.path.join(logdir,
'combine-$(cluster)-$(process).out'))
3214 self.set_stderr_file(os.path.join(logdir,
'combine-$(cluster)-$(process).err'))
3215 self.add_condor_cmd(
'getenv',
'True')
3219 Class defining the DAG node for a MCMC combine job
3221 combine_job = A CombineMCMCJob object
3222 parents = iterable of parent LALInferenceMCMC nodes (must have get_ns_file() method)
3225 super(CombineMCMCNode,self).
__init__(combine_job)
3226 if parents
is not None:
3227 for parent
in parents:
3231 self.add_parent(parent)
3232 self.add_file_arg(parent.get_pos_file())
3235 return parent.get_pos_file()
3238 self.
add_file_opt(
'outfile',file,file_is_output_file=
True)
3245 Class for a gracedb job
3248 exe=cp.get(
'condor',
'gracedb')
3249 pipeline.CondorDAGJob.__init__(self,
"scheduler",exe)
3250 pipeline.AnalysisJob.__init__(self,cp)
3251 LALInferenceDAGSharedFSJob.__init__(self, cp)
3252 self.set_sub_file(os.path.abspath(submitFile))
3253 self.set_stdout_file(os.path.join(logdir,
'gracedb-$(cluster)-$(process).out'))
3254 self.set_stderr_file(os.path.join(logdir,
'gracedb-$(cluster)-$(process).err'))
3255 self.add_condor_cmd(
'getenv',
'True')
3261 Run the gracedb executable to report the results
3263 def __init__(self,gracedb_job,gid=None,parent=None,message=None,upfile=None,command='upload',tag=None,server=None):
3266 super(GraceDBNode,self).
__init__(gracedb_job)
3267 if gid: self.set_gid(gid)
3269 if isinstance(parent, list):
3273 self.add_parent(parent)
3283 Set the GraceDB ID to log to
3296 self.add_var_arg(self.
command)
3298 self.add_var_arg(
'--tag-name='+self.
tag)
3299 self.add_var_arg(
str(self.
gid))
3301 self.add_var_arg(self.
filename+
' ')
3303 self.add_var_arg(
"'{}'".format(self.
message))
3305 self.add_var_arg(
"--service-url %s"%self.
server)
3308 super(GraceDBNode, self).
finalize()
3313 Class for a ROM compute weights job
3316 time_step=0.000172895418228
3321 exe=cp.get(
'condor',
'computeroqweights')
3322 pipeline.CondorDAGJob.__init__(self,
"vanilla",exe)
3323 pipeline.AnalysisJob.__init__(self,cp)
3324 LALInferenceDAGSharedFSJob.__init__(self, cp)
3325 self.set_sub_file(submitFile)
3326 self.set_stdout_file(os.path.join(logdir,
'computeroqweights-$(cluster)-$(process).out'))
3327 self.set_stderr_file(os.path.join(logdir,
'computeroqweights-$(cluster)-$(process).err'))
3328 self.add_condor_cmd(
'getenv',
'True')
3329 self.add_arg(
'-B '+
str(cp.get(
'paths',
'roq_b_matrix_directory')))
3330 if cp.has_option(
'engine',
'dt'):
3331 dt=cp.getfloat(
'engine',
'dt')
3332 self.add_arg(
'-t '+
str(dt))
3333 if cp.has_option(
'engine',
'time_step'):
3334 time_step=cp.get(
'engine',
'time_step')
3335 self.add_arg(
'-T '+
str(time_step))
3336 if cp.has_option(
'engine',
'approx'):
3337 self.add_arg(
'-a ' +
str(cp.get(
'engine',
'approx')))
3338 if cp.has_option(
'condor',
'computeroqweights_memory'):
3339 required_memory =
str(cp.get(
'condor',
'computeroqweights_memory'))
3341 roq_dir = cp.get(
'paths',
'roq_b_matrix_directory')
3342 params = np.genfromtxt(os.path.join(roq_dir,
'params.dat'), names=
True)
3343 flow, fhigh, seglen = params[
'flow'], params[
'fhigh'], params[
'seglen']
3344 if os.path.exists(os.path.join(roq_dir,
'B_linear.npy'))
and os.path.exists(os.path.join(roq_dir,
'B_quadratic.npy')):
3345 linear_basis_size = os.path.getsize(os.path.join(roq_dir,
'B_linear.npy'))
3346 quadratic_basis_size = os.path.getsize(os.path.join(roq_dir,
'B_quadratic.npy'))
3347 elif os.path.exists(os.path.join(roq_dir,
'selected_params_linear.npy'))
and \
3348 os.path.exists(os.path.join(roq_dir,
'selected_params_quadratic.npy')):
3349 linear_basis_size = 32 * (fhigh - flow) * seglen * \
3350 len(np.load(os.path.join(roq_dir,
'selected_params_linear.npy')))
3351 quadratic_basis_size = 32 * (fhigh - flow) * seglen * \
3352 len(np.load(os.path.join(roq_dir,
'selected_params_quadratic.npy')))
3353 roq_weights_size = 3 * ((fhigh - flow) * seglen) * \
3354 (float(dt + 0.05) * 2 / float(time_step)) * 2 * 8
3357 required_memory =
int(
3358 (linear_basis_size + quadratic_basis_size + roq_weights_size)
3359 / (1024 * 1024) + 4096)
3360 print(
'Requesting {} of memory for '
3361 'computeroqweights.'.format(required_memory))
3362 self.add_condor_cmd(
'request_memory',
str(required_memory))
3366 Run the ROM compute weights script
3368 def __init__(self,computeroqweights_job,ifo,seglen,flow):
3369 super(ROMNode,self).
__init__(computeroqweights_job)
3371 self.add_var_arg(
'--seglen '+
str(seglen))
3372 self.add_var_arg(
'--fLow '+
str(flow))
3373 self.add_var_arg(
'--ifo '+ifo)
3377 Class for a BayesLine job
3380 exe=cp.get(
'condor',
'bayesline')
3381 pipeline.CondorDAGJob.__init__(self,
"vanilla",exe)
3382 pipeline.AnalysisJob.__init__(self,cp)
3383 LALInferenceDAGSharedFSJob.__init__(self, cp)
3384 self.set_sub_file(submitFile)
3385 self.set_stdout_file(os.path.join(logdir,
'bayesline-$(cluster)-$(process).out'))
3386 self.set_stderr_file(os.path.join(logdir,
'bayesline-$(cluster)-$(process).err'))
3387 self.add_condor_cmd(
'getenv',
'True')
3391 Run the BayesLine code
3394 super(BayesLineNode,self).
__init__(bayesline_job)
3399 def __init__(self, skymap_job, posfile=None, parent=None, objid=None, prefix=None, outdir=None, ifos=None):
3401 super(SkyMapNode, self).
__init__(skymap_job)
3407 self.add_parent(parent)
3419 name = self.
prefix+
'.fits'
3421 name =
'skymap.fits'
3429 Object ID for the fits file
3434 Construct command line
3438 self.add_file_opt(
'samples',self.
posfile)
3439 self.add_file_opt(
'fitsoutname',self.
outfits, file_is_output_file=
True)
3440 self.add_file_opt(
'outdir',self.
outdir, file_is_output_file=
True)
3442 self.add_var_opt(
'objid',self.
objid)
3444 self.add_var_opt(
'instruments',
' '.join(self.
ifos))
3449 Node to run ligo-skymap-from-samples
3452 exe=cp.get(
'condor',
'ligo-skymap-from-samples')
3453 pipeline.CondorDAGJob.__init__(self,
"vanilla",exe)
3454 pipeline.AnalysisJob.__init__(self,cp)
3455 LALInferenceDAGSharedFSJob.__init__(self, cp)
3456 self.set_sub_file(submitFile)
3457 self.set_stdout_file(os.path.join(logdir,
'samples2map-$(cluster)-$(process).out'))
3458 self.set_stderr_file(os.path.join(logdir,
'samples2map-$(cluster)-$(process).err'))
3462 self.add_ini_opts(cp,
'ligo-skymap-from-samples')
3463 if cp.has_option(
'engine',
'margdist')
or cp.has_option(
'engine',
'margdist-comoving'):
3464 self.add_opt(
'disable-distance-map',
'')
3465 self.add_opt(
'enable-multiresolution',
'')
3470 Job to run ligo-skymap-plot
3473 exe=cp.get(
'condor',
'ligo-skymap-plot')
3474 pipeline.CondorDAGJob.__init__(self,
"vanilla", exe)
3475 pipeline.AnalysisJob.__init__(self, cp)
3476 LALInferenceDAGSharedFSJob.__init__(self, cp)
3477 self.set_sub_file(submitFile)
3478 self.set_stdout_file(os.path.join(logdir,
'plotskymap-$(cluster)-$(process).out'))
3479 self.set_stderr_file(os.path.join(logdir,
'plotskymap-$(cluster)-$(process).err'))
3483 self.add_ini_opts(cp,
'ligo-skymap-plot')
3486 def __init__(self, plotskymap_job, parent=None, inputfits = None, output=None):
3487 super(PlotSkyMapNode, self).
__init__(plotskymap_job)
3489 self.add_parent(parent)
3503 Construct command line
3511 self.add_var_opt(
'output',self.
outfile)
3512 super(PlotSkyMapNode,self).
finalize()
3518 if not cp.has_option(
'condor',
'gdbinfo'):
3521 exe=cp.get(
'condor',
'gdbinfo')
3522 pipeline.CondorDAGJob.__init__(self,
"vanilla",exe)
3523 pipeline.AnalysisJob.__init__(self,cp)
3524 LALInferenceDAGSharedFSJob.__init__(self, cp)
3525 self.set_sub_file(os.path.abspath(submitFile))
3526 self.set_stdout_file(os.path.join(logdir,
'gdbinfo-$(cluster)-$(process).out'))
3527 self.set_stderr_file(os.path.join(logdir,
'gdbinfo-$(cluster)-$(process).err'))
3528 self.add_condor_cmd(
'getenv',
'True')
3529 self.add_condor_cmd(
'RequestMemory',
'1000')
3532 def __init__(self,post_run_info_job,gid=None,parent=None,samples=None,server=None):
3533 super(PostRunInfoNode, self).
__init__(post_run_info_job)
3542 if server
is not None:
3546 self.add_var_opt(
'analysis',self.
analysis)
3547 super(PostRunInfoNode, self).
finalize()
3549 self.add_parent(parentnode)
3551 if samples
is not None:
3552 self.add_var_arg(
'--samples %s'%samples)
3554 self.add_var_arg(
'--skymap %s'%skymap)
3556 self.add_var_arg(
"--message '%s'"%message)
3558 self.add_var_arg(
'--email %s'%email)
3560 self.add_var_opt(
'gid',gid)
3569 if server
is not None:
3570 self.add_var_arg(
'--server %s'%self.
server)
3574 Node to evolve spins of posterior samples
3576 def __init__(self,evolve_sample_job,parent=None,posfile=None):
3577 pipeline.CondorDAGNode.__init__(self,evolve_sample_job)
3579 self.add_parent(parent)
3585 self.add_var_arg(
'--sample_file %s'%posfile)
3592 Class for evolving the spins of posterior samples
3595 exe=cp.get(
'condor',
'evolve_spins')
3596 pipeline.CondorDAGJob.__init__(self,
"vanilla",exe)
3597 pipeline.AnalysisJob.__init__(self,cp)
3598 if cp.has_option(
'condor',
'accounting_group'):
3599 self.add_condor_cmd(
'accounting_group',cp.get(
'condor',
'accounting_group'))
3600 if cp.has_option(
'condor',
'accounting_group_user'):
3601 self.add_condor_cmd(
'accounting_group_user',cp.get(
'condor',
'accounting_group_user'))
3603 if cp.has_option(
'condor',
'queue'):
3604 self.add_condor_cmd(
'+'+cp.get(
'condor',
'queue'),
'True')
3605 requirements=
'(TARGET.'+cp.get(
'condor',
'queue')+
' =?= True)'
3606 if cp.has_option(
'condor',
'Requirements'):
3607 if requirements!=
'':
3608 requirements=requirements+
' && '
3609 requirements=requirements+cp.get(
'condor',
'Requirements')
3610 if requirements!=
'':
3611 self.add_condor_cmd(
'Requirements',requirements)
3612 self.set_sub_file(submitFile)
3613 self.set_stdout_file(os.path.join(logdir,
'evolve_spins-$(cluster)-$(process).out'))
3614 self.set_stderr_file(os.path.join(logdir,
'evolve_spins-$(cluster)-$(process).err'))
3615 self.add_condor_cmd(
'getenv',
'True')
3616 if cp.has_option(
'spin_evol',
'vfinal'):
3617 self.add_opt(
'vfinal', cp.get(
'spin_evol',
'vfinal'))
Class for a BayesLine job.
def __init__(self, cp, submitFile, logdir)
def __init__(self, bayesline_job)
Class for a BayesWave job.
def __init__(self, cp, submitFile, logdir)
def __init__(self, bayeswavepsd_job)
def set_output_dir(self, dirname)
def set_output_file(self, filename)
Class for a BayesWavePost job.
def __init__(self, cp, submitFile, logdir)
def __init__(self, bayeswavepost_job)
def set_output_dir(self, dirname)
def set_output_file(self, filename)
Class defining the coherence test job to be run as part of a pipeline.
def __init__(self, cp, submitFile, logdir)
Class defining the node for the coherence test.
def finalize(self)
Construct command line.
def __init__(self, coherencetest_job, outfile=None)
def add_coherent_parent(self, node)
Add a parent node which is an engine node, and process its outputfiles.
def add_incoherent_parent(self, node)
Add a parent node which provides one of the single-ifo evidence values.
Class defining a job which combines several parallel MCMC chains into a single hdf5 file Input argume...
def __init__(self, cp, submitFile, logdir)
Class defining the DAG node for a MCMC combine job Input arguments: combine_job = A CombineMCMCJob ob...
def __init__(self, combine_job, parents=None)
def add_engine_parent(self, parent)
def set_pos_output_file(self, file)
def get_parent_posfile(self, parent)
def __init__(self, cp, submitFile, logdir, engine, ispreengine=False, sharedfs=False, requires_frames=False, *args, **kwargs)
def set_cache(self, filename, ifo)
Add a cache file from LIGODataFind.
def __init__(self, li_job)
def set_horizon_distance(self, horizon_distance)
def set_trig_time(self, time)
Set the end time of the signal for the centre of the prior in time.
def set_chainseed(self, seed)
def set_psdstart(self, psdstart)
def set_seglen(self, seglen)
def set_trigSNR(self, trigSNR)
def add_fake_ifo_data(self, ifo, sciseg, fake_cache_name, fake_channel_name, timeslide=0)
Dummy method to set up fake data without needing to run datafind.
def set_dataseed(self, seed)
def set_injection(self, injfile, event)
Set a software injection to be performed.
def set_max_psdlength(self, psdlength)
def set_padding(self, padding)
def set_psdlength(self, psdlength)
def set_event_number(self, event)
Set the event number in the injection XML.
def add_ifo_data(self, ifo, sciseg, channelname, timeslide=0)
def _finalize_ifo_data(self)
def set_srate(self, srate)
Represents a unique event to run on.
def __init__(self, trig_time=None, SimInspiral=None, SimBurst=None, SnglInspiral=None, CoincInspiral=None, event_id=None, timeslide_dict=None, GID=None, ifos=None, duration=None, srate=None, trigSNR=None, fhigh=None, horizon_distance=None)
def set_engine_option(self, opt, val)
Can set event-specific options for the engine nodes using this option, e.g.
Class for evolving the spins of posterior samples.
def __init__(self, cp, submitFile, logdir)
Node to evolve spins of posterior samples.
def __init__(self, evolve_sample_job, parent=None, posfile=None)
def set_posfile(self, posfile)
def __init__(self, cp, submitFile, logdir)
Run the gracedb executable to report the results.
def set_message(self, message)
def __init__(self, gracedb_job, gid=None, parent=None, message=None, upfile=None, command='upload', tag=None, server=None)
def set_filename(self, filename)
def set_gid(self, gid)
Set the GraceDB ID to log to.
def set_injection(self, injfile, event)
Set a software injection to be performed.
def __init__(self, li_job)
Class to define DAG Jobs for lalinference pipeline.
def add_requirement(self, requirement)
Add a requirement to the condor submit file.
def set_x509path(self, path)
Copy the x509 proxy file to the path given, and use it in the DAG.
def write_sub_file(self)
Over-load CondorDAGJob.write_sub_file to write the requirements.
def __init__(self, cp=None, sharedfs=False, requires_frames=False)
Node for LALInference DAG jobs.
def add_output_file(self, filename)
def add_checkpoint_file(self, filename)
def add_file_opt(self, opt, filename, file_is_output_file=False)
def add_input_file(self, filename)
def __init__(self, *args, **kwargs)
def set_output_file(self, filename)
def __init__(self, li_job)
def __init__(self, li_job)
def set_output_file(self, filename)
def __init__(self, li_job)
def set_output_file(self, filename)
def add_gracedb_log_node(self, respagenode, gid, burst=False, server=None)
def add_full_analysis_lalinferencemcmc(self, event)
Generate an end-to-end analysis of a given event For LALInferenceMCMC.
def add_results_page_node(self, resjob=None, outdir=None, parent=None, extra_options=None, gzip_output=None, ifos=None)
def add_gracedb_FITSskymap_upload(self, event, engine=None)
def add_gracedb_info_node(self, respagenode, gid, analysis='LALInference', issky=False, prefix="LIB", email=None, server=None)
def spin_evol_checks(self)
def get_required_data(self, times)
Calculate the data that will be needed to process all events.
def get_datafind_node(self, ifo, frtype, gpsstart, gpsend)
def add_results_page_node_pesummary(self, resjob=None, outdir=None, parent=None, extra_options=None, gzip_output=None, ifos=None, evstring=None, coherence=False)
def add_full_analysis(self, event)
def add_bayeswavepost_node(self, ifo, parent=None)
def add_rom_weights_node(self, ifo, parent=None)
def add_science_segments(self)
def setup_from_inputs(self)
Scan the list of inputs, i.e.
def write_wrapper_script(self, path)
def add_engine_node(self, event, bwpsd={}, bwpost={}, ifos=None, co_test=False, extra_options=None)
Add an engine node to the dag.
def select_events(self)
Read events from the config parser.
def add_full_analysis_lalinferencenest(self, event)
Generate an end-to-end analysis of a given event (Event class) For LALinferenceNest code.
def add_gracedb_start_node(self, gid, name='', parent=None, server=None)
def add_bayeswavepsd_node(self, ifo, parent=None)
def setup_from_times(self, times)
Generate a DAG from a list of times.
def add_bayesline_node(self, ifo, parent=None)
Class defining a job which merges several parallel nested sampling or MCMC jobs into a single file In...
def __init__(self, cp, submitFile, logdir, engine='nest')
Class defining the DAG node for a NS merge job Input arguments: merge_job = A MergeJob object parents...
def add_combine_parent(self, parent)
def set_pos_output_file(self, file)
def add_engine_parent(self, parent)
def __init__(self, merge_job, parents=None, engine='nest')
Class to handle the creation of the summary page job using PESummary
def __init__(self, cp, submitFile, logdir)
def set_psd_files(self, ifos, st)
def set_injection(self, injfile, eventnumber)
def set_event_number(self, event)
Set the event number in the injection XML.
def set_header_file(self, headerfile)
def set_labels(self, labels)
def set_output_path(self, path)
def set_cache_files(self, channels, cachefiles)
def set_bayes_coherent_incoherent(self, bcifile)
def set_calibration_files(self, ifos, st)
def determine_webdir_or_existing_webdir(path)
def add_engine_parent(self, node)
Add a parent node which is one of the engine nodes And automatically set options accordingly.
def set_coinc_file(self, coinc, gid=None)
def set_bayes_coherent_noise(self, bsnfile)
def __init__(self, results_page_job, outpath=None)
def get_event_number(self)
def get_output_path(self)
def set_snr_file(self, st)
Job to run ligo-skymap-plot.
def __init__(self, cp, submitFile, logdir)
def __init__(self, plotskymap_job, parent=None, inputfits=None, output=None)
def finalize(self)
Construct command line.
def set_output(self, outfile)
def set_input_fits(self, fitsfile)
def __init__(self, cp, submitFile, logdir)
def set_skymap(self, skymap)
def set_email(self, email)
def set_samples(self, samples)
def set_message(self, message)
def set_parent(self, parentnode)
def set_analysis(self, analysis)
def set_server(self, server)
def __init__(self, post_run_info_job, gid=None, parent=None, samples=None, server=None)
Class for a ROM compute weights job.
def __init__(self, cp, submitFile, logdir)
Run the ROM compute weights script.
def __init__(self, computeroqweights_job, ifo, seglen, flow)
def __init__(self, cp, submitFile, logdir)
def set_gzip_output(self, path)
def set_snr_file(self, st)
def append_in_files(self, this_file)
def get_event_number(self)
def set_header_file(self, headerfile)
def set_psd_files(self, st)
def get_output_path(self)
def set_event_number(self, event)
Set the event number in the injection XML.
def set_bayes_coherent_noise(self, bsnfile)
def set_output_path(self, path)
def set_injection(self, injfile, eventnumber)
def set_bayes_coherent_incoherent(self, bcifile)
def set_coinc_file(self, coinc, gid=None)
def __init__(self, results_page_job, outpath=None)
def add_engine_parent(self, node)
Add a parent node which is one of the engine nodes And automatically set options accordingly.
Node to run ligo-skymap-from-samples.
def __init__(self, cp, submitFile, logdir)
def finalize(self)
Construct command line.
def __init__(self, skymap_job, posfile=None, parent=None, objid=None, prefix=None, outdir=None, ifos=None)
def set_outdir(self, outdir)
def set_posfile(self, posfile)
def set_objid(self, objid)
Object ID for the fits file.
def get_roq_component_mass_priors(path, roq_paths, roq_params, key, coinc_xml_obj=None, sim_inspiral=None)
def topdir(path)
Returns the top directory in a path, e.g.
def mkdirs(path)
Helper function.
def get_roq_mass_freq_scale_factor(mc_priors, trigger_mchirp, force_flow=None)
def get_roq_mchirp_priors(path, roq_paths, roq_params, key, coinc_xml_obj=None, sim_inspiral=None)
def Query_ROQ_Bounds_Type(path, roq_paths)
def get_timeslides_pipedown(database_connection, dumpfile=None, gpsstart=None, gpsend=None, max_cfar=-1)
Returns a list of Event objects with times and timeslide offsets.
def create_events_from_coinc_and_psd(coinc_xml_obj, psd_dict=None, gid=None, threshold_snr=None, flow=20.0, roq=False, use_gracedbpsd=False)
This function calculates seglen, fhigh, srate and horizon distance from coinc.xml and psd....
def get_zerolag_pipedown(database_connection, dumpfile=None, gpsstart=None, gpsend=None, max_cfar=-1, min_cfar=-1)
Returns a list of Event objects from pipedown data base.
def findSegmentsToAnalyze(ifo, frametype, state_vector_channel, bits, gpsstart, gpsend)
Return list of segments whose data quality is good enough for PE.
def chooseEngineNode(name)
def mchirp_from_components(m1, m2)
def get_xml_psds(psdxml, ifos, outpath, end_time=None)
Get a psd.xml.gz file and: 1) Reads it 2) Checks the psd file contains all the IFO we want to analyze...
def open_pipedown_database(database_filename, tmp_space)
Open the connection to the pipedown database.
def guess_url(fslocation)
Try to work out the web address of a given path.
def get_zerolag_lloid(database_connection, dumpfile=None, gpsstart=None, gpsend=None, max_cfar=-1, min_cfar=-1)
Returns a list of Event objects from pipedown data base.
def scan_timefile(timefile)
def get_trigger_chirpmass(coinc_xml_obj)