Loading [MathJax]/extensions/TeX/AMSmath.js
LALBurst 2.0.7.1-8a6b96f
All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Macros Modules Pages
stringutils.py
Go to the documentation of this file.
1# Copyright (C) 2009--2019,2021 Kipp Cannon
2#
3# This program is free software; you can redistribute it and/or modify it
4# under the terms of the GNU General Public License as published by the
5# Free Software Foundation; either version 2 of the License, or (at your
6# option) any later version.
7#
8# This program is distributed in the hope that it will be useful, but
9# WITHOUT ANY WARRANTY; without even the implied warranty of
10# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
11# Public License for more details.
12#
13# You should have received a copy of the GNU General Public License along
14# with this program; if not, write to the Free Software Foundation, Inc.,
15# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
16
17
18#
19# =============================================================================
20#
21# Preamble
22#
23# =============================================================================
24#
25
26
27try:
28 from fpconst import NegInf
29except ImportError:
30 NegInf = float("-inf")
31import itertools
32import math
33import numpy
34import scipy.stats
35import sys
36
37
38from igwn_ligolw import ligolw
39from igwn_ligolw import lsctables
40from igwn_ligolw import utils as ligolw_utils
41from igwn_ligolw.utils import process as ligolw_process
42import lal
43from lal import rate
44from igwn_segments import utils as segmentsUtils
45from .offsetvector import offsetvector
46from . import snglcoinc
47
48
49__author__ = "Kipp Cannon <kipp.cannon@ligo.org>"
50from .git_version import date as __date__
51from .git_version import version as __version__
52
53
54#
55# =============================================================================
56#
57# Likelihood Machinery
58#
59# =============================================================================
60#
61
62
63#
64# Make a look-up table of time-of-arrival triangulators
65#
66
67
68def triangulators(timing_uncertainties):
69 """
70 Return a dictionary of snglcoinc.TOATriangulator objects
71 initialized for a variety of instrument combinations.
72 timing_uncertainties is a dictionary of instrument->$\\Delta t$
73 pairs. The return value is a dictionary of (instrument
74 tuple)->TOATrangulator mappings. The instrument names in each
75 tuple are sorted in alphabetical order, and the triangulators are
76 constructed with the instruments in that order (the the
77 documentation for snglcoinc.TOATriangulator for more information).
78
79 Example:
80
81 >>> x = triangulators({"H1": 0.005, "L1": 0.005, "V1": 0.005})
82
83 constructs a dictionary of triangulators for every combination of
84 two or more instruments that can be constructed from those three.
85
86 The program lalapps_string_plot_binj can be used to measure the
87 timing uncertainties for the instruments in a search.
88 """
89 allinstruments = sorted(timing_uncertainties.keys())
90
91 triangulators = {}
92 for n in range(2, len(allinstruments) + 1):
93 for instruments in itertools.combinations(allinstruments, n):
94 triangulators[instruments] = snglcoinc.TOATriangulator([lal.cached_detector_by_prefix[instrument].location for instrument in instruments], [timing_uncertainties[instrument] for instrument in instruments])
95
96 return triangulators
97
98
99#
100# A binning for instrument combinations
101#
102# FIXME: we decided that the coherent and null stream naming convention
103# would look like
104#
105# H1H2:LSC-STRAIN_HPLUS, H1H2:LSC-STRAIN_HNULL
106#
107# and so on. i.e., the +, x and null streams from a coherent network would
108# be different channels from a single instrument whose name would be the
109# mash-up of the names of the instruments in the network. that is
110# inconsisntent with the "H1H2+", "H1H2-" shown here, so this needs to be
111# fixed but I don't know how. maybe it'll go away before it needs to be
112# fixed.
113#
114
115
116class InstrumentBins(rate.HashableBins):
117 """
118 Example:
119
120 >>> x = InstrumentBins()
121 >>> x[frozenset(("H1", "L1"))]
122 55
123 >>> x.centres()[55]
124 frozenset(['H1', 'L1'])
125 """
126
127 names = ("E0", "E1", "E2", "E3", "G1", "H1", "H2", "H1H2+", "H1H2-", "L1", "V1")
128
129 def __init__(self, names):
130 super(InstrumentBins, self).__init__(frozenset(combo) for n in range(len(names) + 1) for combo in itertools.combinations(names, n))
131
132 # FIXME: hack to allow instrument binnings to be included as a
133 # dimension in multi-dimensional PDFs by defining a volume for
134 # them. investigate more sensible ways to do this. maybe NDBins
135 # and BinnedDensity should understand the difference between
136 # functional and parametric co-ordinates.
137 def lower(self):
138 return numpy.arange(0, len(self), dtype = "double")
139 def upper(self):
140 return numpy.arange(1, len(self) + 1, dtype = "double")
141
142 xml_bins_name = u"instrumentbins"
143
144# NOTE: side effect of importing this module:
145rate.NDBins.xml_bins_name_mapping.update({
146 InstrumentBins.xml_bins_name: InstrumentBins,
147 InstrumentBins: InstrumentBins.xml_bins_name
148})
149
150
151#
152# Parameter distributions
153#
154
155
157 # FIXME: the interps dictionary maintained here should be
158 # eliminated in favour of an internal mechanism within the PDFs
159 # themselves that performs the interpolation on-the-fly, without
160 # requiring an intermediate object to be created
161 def __init__(self, instruments):
162 self.densities = {}
163 for instrument in instruments:
164 self.densities["%s_snr2_chi2" % instrument] = rate.BinnedLnPDF(rate.NDBins((rate.ATanLogarithmicBins(10, 1e7, 801), rate.ATanLogarithmicBins(.1, 1e4, 801))))
165 for pair in itertools.combinations(sorted(instruments), 2):
166 dt = 0.005 + snglcoinc.light_travel_time(*pair) # seconds
167 self.densities["%s_%s_dt" % pair] = rate.BinnedLnPDF(rate.NDBins((rate.ATanBins(-dt, +dt, 801),)))
168 self.densities["%s_%s_dA" % pair] = rate.BinnedLnPDF(rate.NDBins((rate.ATanBins(-0.5, +0.5, 801),)))
169 self.densities["%s_%s_df" % pair] = rate.BinnedLnPDF(rate.NDBins((rate.ATanBins(-0.2, +0.2, 501),)))
170 # only non-negative rss timing residual bins will be used
171 # but we want a binning that's linear at the origin so
172 # instead of inventing a new one we just use atan bins that
173 # are symmetric about 0
174 self.densities["instrumentgroup,rss_timing_residual"] = rate.BinnedLnPDF(rate.NDBins((InstrumentBins(instruments), rate.ATanBins(-0.02, +0.02, 1001))))
175
176 def __call__(self, **params):
177 try:
178 interps = self.interps
179 except AttributeError:
180 self.mkinterps()
181 interps = self.interps
182 return sum(interps[param](*value) for param, value in params.items()) if params else NegInf
183
184 def __iadd__(self, other):
185 if type(self) != type(other) or set(self.densities) != set(other.densities):
186 raise TypeError("cannot add %s and %s" % (type(self), type(other)))
187 for key, pdf in self.densities.items():
188 pdf += other.densities[key]
189 try:
190 del self.interps
191 except AttributeError:
192 pass
193 return self
194
195 def increment(self, weight = 1.0, **params):
196 for param, value in params.items():
197 self.densities[param].count[value] += weight
198
199 def copy(self):
200 new = type(self)([])
201 for key, pdf in self.densities.items():
202 new.densities[key] = pdf.copy()
203 return new
204
205 def mkinterps(self):
206 self.interps = dict((key, pdf.mkinterp()) for key, pdf in self.densities.items() if "rss_timing_residual" not in key)
207
208 def finish(self):
209 for key, pdf in self.densities.items():
210 if key.endswith("_snr2_chi2"):
211 rate.filter_array(pdf.array, rate.gaussian_window(11, 11, sigma = 20))
212 elif key.endswith("_dt") or key.endswith("_dA") or key.endswith("_df"):
213 rate.filter_array(pdf.array, rate.gaussian_window(11, sigma = 20))
214 elif key.startswith("instrumentgroup"):
215 # instrument group filter is a no-op
216 pass
217 else:
218 # shouldn't get here
219 raise Exception
220 pdf.normalize()
221 self.mkinterps()
222
223 def to_xml(self, name):
224 xml = super(LnLRDensity, self).to_xml(name)
225 instruments = set(key.split("_", 1)[0] for key in self.densities if key.endswith("_snr2_chi2"))
226 xml.appendChild(ligolw.Param.from_pyvalue("instruments", lsctables.instrumentsproperty.set(instruments)))
227 for key, pdf in self.densities.items():
228 xml.appendChild(pdf.to_xml(key))
229 return xml
230
231 @classmethod
232 def from_xml(cls, xml, name):
233 xml = cls.get_xml_root(xml, name)
234 self = cls(lsctables.instrumentsproperty.get(ligolw.Param.get_param(xml, "instruments").value))
235 for key in self.densities:
236 self.densities[key] = rate.BinnedLnPDF.from_xml(xml, key)
237 return self
238
239
241 ligo_lw_name_suffix = u"stringcusp_coincparamsdistributions"
242
243 def __init__(self, instruments):
244 self.triangulators = triangulators(dict.fromkeys(instruments, 8e-5))
245 self.numerator = LnLRDensity(instruments)
246 self.denominator = LnLRDensity(instruments)
247 self.candidates = LnLRDensity(instruments)
248
249 def __iadd__(self, other):
250 if type(self) != type(other):
251 raise TypeError(other)
252 if set(self.triangulators.keys()) != set(other.triangulators.keys()):
253 raise ValueError("incompatible instruments")
254 self.numerator += other.numerator
255 self.denominator += other.denominator
256 self.candidates += other.candidates
257 return self
258
259 def __call__(self, **kwargs):
260 # recover the instrument set. FIXME: this is stupid, but
261 # it's how we have to do it for now
262 instruments = set(param[:2] for param in kwargs if param.endswith("_snr2_chi2"))
263
264 # disallow H1+H2 only coincs
265 if instruments == set(("H1", "H2")):
266 return NegInf
267
268 # normal likelihood ratio
269 return super(StringCoincParamsDistributions, self).__call__(**kwargs)
270
271 def copy(self):
272 new = type(self)([])
273 new.triangulators = self.triangulators # share reference
274 new.numerator = self.numerator.copy()
275 new.denominator = self.denominator.copy()
276 new.candidates = self.candidates.copy()
277 return new
278
279 def coinc_params(self, events, offsetvector):
280 params = {}
281
282 #
283 # check for coincs that have been vetoed entirely
284 #
285
286 if len(events) < 2:
287 return params
288
289 #
290 # Initialize the parameter dictionary, sort the events by
291 # instrument name (the multi-instrument parameters are defined for
292 # the instruments in this order and the triangulators are
293 # constructed this way too), and retrieve the sorted instrument
294 # names
295 #
296
297 events = tuple(sorted(events, key = lambda event: event.ifo))
298 instruments = tuple(event.ifo for event in events)
299
300 #
301 # zero-instrument parameters
302 #
303
304 ignored, ignored, ignored, rss_timing_residual = self.triangulators[instruments](tuple(event.peak + offsetvector[event.ifo] for event in events))
305 # FIXME: rss_timing_residual is disabled. all the code to
306 # compute it properly is still here and given suitable
307 # initializations, the distribution data is still
308 # two-dimensional and has a suitable filter applied to it,
309 # but all events are forced into the RSS_{\Delta t} = 0
310 # bin, in effect removing that dimension from the data. We
311 # can look at this again sometime in the future if we're
312 # curious why it didn't help. Just delete the next line
313 # and you're back in business.
314 rss_timing_residual = 0.0
315 # FIXME: why is this commented out?
316 #params["instrumentgroup,rss_timing_residual"] = (frozenset(instruments), rss_timing_residual)
317
318 #
319 # one-instrument parameters
320 #
321
322 for event in events:
323 params["%s_snr2_chi2" % event.ifo] = (event.snr**2.0, event.chisq / event.chisq_dof)
324
325 #
326 # two-instrument parameters. note that events are sorted by
327 # instrument
328 #
329
330 for event1, event2 in itertools.combinations(events, 2):
331 assert event1.ifo != event2.ifo
332
333 prefix = "%s_%s_" % (event1.ifo, event2.ifo)
334
335 dt = float((event1.peak + offsetvector[event1.ifo]) - (event2.peak + offsetvector[event2.ifo]))
336 params["%sdt" % prefix] = (dt,)
337
338 dA = math.log10(abs(event1.amplitude / event2.amplitude))
339 params["%sdA" % prefix] = (dA,)
340
341 # f_cut = central_freq + bandwidth/2
342 f_cut1 = event1.central_freq + event1.bandwidth / 2
343 f_cut2 = event2.central_freq + event2.bandwidth / 2
344 df = float((math.log10(f_cut1) - math.log10(f_cut2)) / (math.log10(f_cut1) + math.log10(f_cut2)))
345 params["%sdf" % prefix] = (df,)
346
347 #
348 # done
349 #
350
351 return params
352
353 def ln_lr_from_triggers(self, events, offsetvector):
354 return self(**self.coinc_params(events, offsetvector))
355
356 def finish(self):
357 self.numerator.finish()
358 self.denominator.finish()
359 self.candidates.finish()
360 return self
361
362 @classmethod
363 def get_xml_root(cls, xml, name):
364 name = u"%s:%s" % (name, cls.ligo_lw_name_suffix)
365 xml = [elem for elem in xml.getElementsByTagName(ligolw.LIGO_LW.tagName) if elem.hasAttribute(u"Name") and elem.Name == name]
366 if len(xml) != 1:
367 raise ValueError("XML tree must contain exactly one %s element named %s" % (ligolw.LIGO_LW.tagName, name))
368 return xml[0]
369
370 @classmethod
371 def from_xml(cls, xml, name):
372 xml = cls.get_xml_root(xml, name)
373 self = cls([])
374 self.numerator = LnLRDensity.from_xml(xml, "numerator")
375 self.denominator = LnLRDensity.from_xml(xml, "denominator")
376 self.candidates = LnLRDensity.from_xml(xml, "candidates")
377 # FIXME: stupid way to get instruments, store them in the
378 # XML
379 instruments = [key.replace("_snr2_chi2", "") for key in self.numerator.densities.keys() if "_snr2_chi2" in key]
380 self.triangulators = triangulators(dict.fromkeys(instruments, 8e-5))
381 return self
382
383 def to_xml(self, name):
384 xml = ligolw.LIGO_LW({u"Name": u"%s:%s" % (name, self.ligo_lw_name_suffix)})
385 xml.appendChild(self.numerator.to_xml("numerator"))
386 xml.appendChild(self.denominator.to_xml("denominator"))
387 xml.appendChild(self.candidates.to_xml("candidates"))
388 return xml
389
390
391#
392# I/O
393#
394
395
396def load_likelihood_data(filenames, verbose = False):
397 coinc_params = None
398 seglists = None
399 for n, filename in enumerate(filenames, 1):
400 if verbose:
401 print("%d/%d:" % (n, len(filenames)), end=' ', file=sys.stderr)
402 xmldoc = ligolw_utils.load_filename(filename, verbose = verbose)
403 this_coinc_params = StringCoincParamsDistributions.from_xml(xmldoc, u"string_cusp_likelihood")
404 this_seglists = lsctables.SearchSummaryTable.get_table(xmldoc).get_out_segmentlistdict(lsctables.ProcessTable.get_table(xmldoc).get_ids_by_program(u"lalapps_string_meas_likelihood")).coalesce()
405 xmldoc.unlink()
406 if coinc_params is None:
407 coinc_params = this_coinc_params
408 else:
409 coinc_params += this_coinc_params
410 if seglists is None:
411 seglists = this_seglists
412 else:
413 seglists |= this_seglists
414 return coinc_params, seglists
415
416
417#
418# =============================================================================
419#
420# Livetime
421#
422# =============================================================================
423#
424
425
426def time_slides_livetime(seglists, time_slides, min_instruments, verbose = False, clip = None):
427 """
428 seglists is a segmentlistdict of times when each of a set of
429 instruments were on, time_slides is a sequence of
430 instrument-->offset dictionaries, each vector of offsets in the
431 sequence is applied to the segmentlists and the total time during
432 which at least min_instruments were on is summed and returned. If
433 clip is not None, after each offset vector is applied to seglists
434 the result is intersected with clip before computing the livetime.
435 If verbose is True then progress reports are printed to stderr.
436 """
437 livetime = 0.0
438 seglists = seglists.copy() # don't modify original
439 N = len(time_slides)
440 if verbose:
441 print("computing the live time for %d time slides:" % N, file=sys.stderr)
442 for n, time_slide in enumerate(time_slides):
443 if verbose:
444 print("\t%.1f%%\r" % (100.0 * n / N), end=' ', file=sys.stderr)
445 seglists.offsets.update(time_slide)
446 if clip is None:
447 livetime += float(abs(segmentsUtils.vote(seglists.values(), min_instruments)))
448 else:
449 livetime += float(abs(segmentsUtils.vote((seglists & clip).values(), min_instruments)))
450 if verbose:
451 print("\t100.0%", file=sys.stderr)
452 return livetime
453
454
455def time_slides_livetime_for_instrument_combo(seglists, time_slides, instruments, verbose = False, clip = None):
456 """
457 like time_slides_livetime() except computes the time for which
458 exactly the instruments given by the sequence instruments were on
459 (and nothing else).
460 """
461 livetime = 0.0
462 # segments for instruments that must be on
463 onseglists = seglists.copy(keys = instruments)
464 # segments for instruments that must be off
465 offseglists = seglists.copy(keys = set(seglists) - set(instruments))
466 N = len(time_slides)
467 if verbose:
468 print("computing the live time for %s in %d time slides:" % (", ".join(instruments), N), file=sys.stderr)
469 for n, time_slide in enumerate(time_slides):
470 if verbose:
471 print("\t%.1f%%\r" % (100.0 * n / N), end=' ', file=sys.stderr)
472 onseglists.offsets.update(time_slide)
473 offseglists.offsets.update(time_slide)
474 if clip is None:
475 livetime += float(abs(onseglists.intersection(onseglists.keys()) - offseglists.union(offseglists.keys())))
476 else:
477 livetime += float(abs((onseglists & clip).intersection(onseglists.keys()) - offseglists.union(offseglists.keys())))
478 if verbose:
479 print("\t100.0%", file=sys.stderr)
480 return livetime
481
482
483#
484# =============================================================================
485#
486# Database Utilities
487#
488# =============================================================================
489#
490
491
492def create_recovered_ln_likelihood_ratio_table(connection, coinc_def_id):
493 """
494 Create a temporary table named "recovered_ln_likelihood_ratio"
495 containing two columns: "simulation_id", the simulation_id of an
496 injection, and "ln_likelihood_ratio", the highest log likelihood
497 ratio at which that injection was recovered by a coincidence of
498 type coinc_def_id.
499 """
500 cursor = connection.cursor()
501 cursor.execute("""
502CREATE TEMPORARY TABLE recovered_ln_likelihood_ratio (simulation_id TEXT PRIMARY KEY, ln_likelihood_ratio REAL)
503 """)
504 cursor.execute("""
505INSERT OR REPLACE INTO
506 recovered_ln_likelihood_ratio
507SELECT
508 sim_burst.simulation_id AS simulation_id,
509 MAX(coinc_event.likelihood) AS ln_likelihood_ratio
510FROM
511 sim_burst
512 JOIN coinc_event_map AS a ON (
513 a.table_name == "sim_burst"
514 AND a.event_id == sim_burst.simulation_id
515 )
516 JOIN coinc_event_map AS b ON (
517 b.coinc_event_id == a.coinc_event_id
518 )
519 JOIN coinc_event ON (
520 b.table_name == "coinc_event"
521 AND b.event_id == coinc_event.coinc_event_id
522 )
523WHERE
524 coinc_event.coinc_def_id == ?
525GROUP BY
526 sim_burst.simulation_id
527 """, (coinc_def_id,))
528 cursor.close()
529
530
531def create_sim_burst_best_string_sngl_map(connection, coinc_def_id):
532 """
533 Construct a sim_burst --> best matching coinc_event mapping.
534 """
535 connection.cursor().execute("""
536CREATE TEMPORARY TABLE
537 sim_burst_best_string_sngl_map
538AS
539 SELECT
540 sim_burst.simulation_id AS simulation_id,
541 (
542 SELECT
543 sngl_burst.event_id
544 FROM
545 coinc_event_map AS a
546 JOIN coinc_event_map AS b ON (
547 b.coinc_event_id == a.coinc_event_id
548 )
549 JOIN coinc_event ON (
550 coinc_event.coinc_event_id == a.coinc_event_id
551 )
552 JOIN sngl_burst ON (
553 b.table_name == 'sngl_burst'
554 AND b.event_id == sngl_burst.event_id
555 )
556 WHERE
557 a.table_name == 'sim_burst'
558 AND a.event_id == sim_burst.simulation_id
559 AND coinc_event.coinc_def_id == ?
560 ORDER BY
561 (sngl_burst.chisq / sngl_burst.chisq_dof) / (sngl_burst.snr * sngl_burst.snr)
562 LIMIT 1
563 ) AS event_id
564 FROM
565 sim_burst
566 WHERE
567 event_id IS NOT NULL
568 """, (coinc_def_id,))
569
570
571def create_sim_burst_best_string_coinc_map(connection, coinc_def_id):
572 """
573 Construct a sim_burst --> best matching coinc_event mapping for
574 string cusp injections and coincs.
575 """
576 # FIXME: this hasn't finished being ported from the inspiral code
577 connection.cursor().execute("""
578CREATE TEMPORARY TABLE
579 sim_burst_best_string_coinc_map
580AS
581 SELECT
582 sim_burst.simulation_id AS simulation_id,
583 (
584 SELECT
585 coinc_inspiral.coinc_event_id
586 FROM
587 coinc_event_map AS a
588 JOIN coinc_event_map AS b ON (
589 b.coinc_event_id == a.coinc_event_id
590 )
591 JOIN coinc_inspiral ON (
592 b.table_name == 'coinc_event'
593 AND b.event_id == coinc_inspiral.coinc_event_id
594 )
595 WHERE
596 a.table_name == 'sim_burst'
597 AND a.event_id == sim_burst.simulation_id
598 AND coinc_event.coinc_def_id == ?
599 ORDER BY
600 (sngl_burst.chisq / sngl_burst.chisq_dof) / (sngl_burst.snr * sngl_burst.snr)
601 LIMIT 1
602 ) AS coinc_event_id
603 FROM
604 sim_burst
605 WHERE
606 coinc_event_id IS NOT NULL
607 """, (coinc_def_id,))
Base class for parameter distribution densities for use in log likelihood ratio ranking statistics.
Definition: snglcoinc.py:2320
def get_xml_root(cls, xml, name)
Sub-classes can use this in their overrides of the .from_xml() method to find the root element of the...
Definition: snglcoinc.py:2391
Mixin to assist in implementing a log likelihood ratio ranking statistic class.
Definition: snglcoinc.py:2485
Time-of-arrival triangulator.
Definition: snglcoinc.py:1999
def __init__(self, instruments)
Definition: stringutils.py:161
def copy(self)
Return a duplicate copy of this object.
Definition: stringutils.py:199
def __iadd__(self, other)
Marginalize the two densities.
Definition: stringutils.py:184
def increment(self, weight=1.0, **params)
Increment the counts defining this density at the given parameters.
Definition: stringutils.py:195
def finish(self)
Ensure all internal densities are normalized, and initialize interpolator objects as needed for smoot...
Definition: stringutils.py:208
def from_xml(cls, xml, name)
In the XML document tree rooted at xml, search for the serialized LnLRDensity object named name,...
Definition: stringutils.py:232
def __call__(self, **params)
Evaluate.
Definition: stringutils.py:176
def to_xml(self, name)
Serialize to an XML fragment and return the root element of the resulting XML tree.
Definition: stringutils.py:223
def ln_lr_from_triggers(self, events, offsetvector)
Definition: stringutils.py:353
def __call__(self, **kwargs)
Return the natural logarithm of the likelihood ratio for the given parameters,.
Definition: stringutils.py:259
def coinc_params(self, events, offsetvector)
Definition: stringutils.py:279
def time_slides_livetime(seglists, time_slides, min_instruments, verbose=False, clip=None)
seglists is a segmentlistdict of times when each of a set of instruments were on, time_slides is a se...
Definition: stringutils.py:436
def create_recovered_ln_likelihood_ratio_table(connection, coinc_def_id)
Create a temporary table named "recovered_ln_likelihood_ratio" containing two columns: "simulation_id...
Definition: stringutils.py:499
def time_slides_livetime_for_instrument_combo(seglists, time_slides, instruments, verbose=False, clip=None)
like time_slides_livetime() except computes the time for which exactly the instruments given by the s...
Definition: stringutils.py:460
def load_likelihood_data(filenames, verbose=False)
Definition: stringutils.py:396
def triangulators(timing_uncertainties)
Return a dictionary of snglcoinc.TOATriangulator objects initialized for a variety of instrument comb...
Definition: stringutils.py:88