Loading [MathJax]/extensions/TeX/AMSsymbols.js
LALApps 10.1.0.1-5e288d3
All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Modules Pages
lalapps_power_likelihood_pipe.py
Go to the documentation of this file.
1##python
2#
3# Copyright (C) 2007 Kipp C. Cannon
4#
5# This program is free software; you can redistribute it and/or modify it
6# under the terms of the GNU General Public License as published by the
7# Free Software Foundation; either version 2 of the License, or (at your
8# option) any later version.
9#
10# This program is distributed in the hope that it will be useful, but
11# WITHOUT ANY WARRANTY; without even the implied warranty of
12# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
13# Public License for more details.
14#
15# You should have received a copy of the GNU General Public License along
16# with this program; if not, write to the Free Software Foundation, Inc.,
17# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
18
19
20#
21# =============================================================================
22#
23# Preamble
24#
25# =============================================================================
26#
27
28
29"""
30Excess power offline pipeline's likelihood stage construction script.
31"""
32
33
34from __future__ import print_function
35
36
37import itertools
38from optparse import OptionParser
39import sys
40import tempfile
41from configparser import ConfigParser
42
43
44from igwn_segments import utils as segmentsUtils
45import lal
46from lal import pipeline
47from lal.utils import CacheEntry
48from lalburst import power
49
50
51__author__ = "Kipp Cannon <kipp@gravity.phys.uwm.edu>"
52__date__ = "$Date$"
53__version__ = "$Revision$"
54
55
56#
57# =============================================================================
58#
59# Command Line
60#
61# =============================================================================
62#
63
64
66 parser = OptionParser(
67 version = "%prog CVS $Id$",
68 usage = "%prog [options]",
69 description = "Constructs the likelihood-ratio based coincidence stage for an excess power analysis. The input consists of one or more LAL caches listing the sqlite database trigger files, and a list of segments giving the time intervals that should be considered to be independent. The LAL caches list all trigger files together, that is injections, time slides, and zero-lag. The individual trigger files are self-describing, so the analysis codes can autodetect their type. Each segment will be analyzed using the files that intersect it: the likelihood ratios will be constructed from the injections and time-lag triggers contained in files that intersect the segment, and that data used to assign likelihoods to the injections, time-lag, and zero-lag coincs in all files that intersect the same segment."
70 )
71 parser.add_option("--input-cache", metavar = "filename", action = "append", default = [], help = "Add the contents of this cache file to the list of files from which to draw statistics.")
72 parser.add_option("--round-robin-cache", metavar = "filename", action = "append", default = [], help = "Add the contents of this cache file to the list of files from which to draw injection statistics in a round-robin way.")
73 parser.add_option("--condor-log-dir", metavar = "path", default = ".", help = "Set the directory for Condor log files (default = \".\").")
74 parser.add_option("--config-file", metavar = "filename", default = "power.ini", help = "Set .ini configuration file name (default = \"power.ini\").")
75 parser.add_option("--distribution-segments", metavar = "filename", help = "Read boundaries for distribution data intervals from this segwizard format segments file (required).")
76 parser.add_option("-v", "--verbose", action = "store_true", help = "Be verbose.")
77 options, filenames = parser.parse_args()
78
79 if options.distribution_segments is None:
80 raise ValueError("missing required argument --distribution-segments")
81 options.distribution_segments = segmentsUtils.fromsegwizard(file(options.distribution_segments), coltype = lal.LIGOTimeGPS)
82
83 options.input_cache = set([CacheEntry(line) for filename in options.input_cache for line in file(filename)])
84 options.round_robin_cache = [set(map(CacheEntry, file(filename))) for filename in options.round_robin_cache]
85
86 return options, (filenames or [])
87
88
89#
90# =============================================================================
91#
92# Config
93#
94# =============================================================================
95#
96
97
98def parse_config_file(options):
99 if options.verbose:
100 print("reading %s ..." % options.config_file, file=sys.stderr)
101 config = ConfigParser()
102 config.read(options.config_file)
103
104 options.tag = config.get("pipeline", "user_tag")
105 options.likelihood_data_cache_base = config.get("pipeline", "likelihood_data_cache_base")
106
107 return config
108
109
110#
111# =============================================================================
112#
113# Place Holder
114#
115# =============================================================================
116#
117
118
119class PlaceHolder(object):
120 def __init__(self):
121 self.cache = set()
122
123 def add_input_cache(self, cache):
124 self.cache |= cache
125
127 return self.cache
128
129
130#
131# =============================================================================
132#
133# DAG Construction
134#
135# =============================================================================
136#
137
138
139#
140# Command line
141#
142
143
144options, filenames = parse_command_line()
145
146
147#
148# Parse .ini file, input cache(s), and segment list.
149#
150
151
152config_parser = parse_config_file(options)
153
154
155#
156# Define .sub files
157#
158
159
160power.init_job_types(config_parser)
161
162
163#
164# Start DAG
165#
166
167
168power.make_dag_directories(config_parser)
169dag = pipeline.CondorDAG(tempfile.mkstemp(".log", "power_likelihood_", options.condor_log_dir)[1])
170dag.set_dag_file("power_likelihood")
171
172
173#
174# Generate likelihood data
175#
176
177
178input_cache_nodes = set()
179round_robin_cache_nodes = [set() for cache in options.round_robin_cache]
180for seg in options.distribution_segments:
181 if options.verbose:
182 print("generating distribution measurement jobs for %s ..." % str(seg), file=sys.stderr)
183 input_cache_nodes |= power.make_burca_tailor_fragment(dag, set([entry for entry in options.input_cache if entry.segmentlistdict.intersects_segment(seg)]), seg, "LIKELIHOOD_MAIN")
184 for i, (nodes, cache) in enumerate(zip(round_robin_cache_nodes, options.round_robin_cache)):
185 nodes |= power.make_burca_tailor_fragment(dag, set([entry for entry in cache if entry.segmentlistdict.intersects_segment(seg)]), seg, "LIKELIHOOD_RR%02d" % i)
186
187
188#
189# Compute likelihood ratios for coincs
190#
191
192
193if options.verbose:
194 print("generating likelihood assignment jobs for main group ...", file=sys.stderr)
195parents = reduce(lambda a, b: a | b, round_robin_cache_nodes, input_cache_nodes)
196nodes = power.make_burca2_fragment(dag, options.input_cache, parents, "LIKELIHOOD_MAIN")
197
198
199def round_robin(round_robin_cache_nodes, round_robin_cache):
200 parents = list(itertools.combinations(round_robin_cache_nodes, len(round_robin_cache_nodes) - 1))
201 parents.reverse()
202 parents = [reduce(lambda a, b: a | b, seq) for seq in parents]
203 return zip(parents, [cache for (cache,) in itertools.combinations(round_robin_cache, 1)])
204
205for i, (parents, apply_to_cache) in enumerate(round_robin(round_robin_cache_nodes, options.round_robin_cache)):
206 if options.verbose:
207 print("generating likelihood assignment jobs for round-robin group %d ..." % i, file=sys.stderr)
208 nodes |= power.make_burca2_fragment(dag, apply_to_cache, parents | input_cache_nodes, "LIKELIHOOD_RR%02d" % i)
209
210
211#
212# Output
213#
214
215
216if options.verbose:
217 print("writing dag ...", file=sys.stderr)
218dag.write_sub_files()
219dag.write_dag()
def round_robin(round_robin_cache_nodes, round_robin_cache)