"""The :code:`bilby_pipe_gracedb` command line program provides a methodto generate ini files for a GraceDB event. This ini file can then beused as the input for the other bilby_pipe modules.In addition to reading the data from gracedb, it will attempt to copythe PSD/strain data files to the local machine.The functionality of much of these utility assumes the user is running on theCIT cluster, e.g. the ROQ and calibration directories are in their usual place."""importargparseimportjsonimportosimporttimeimportnumpyasnpfromgwpy.timeseriesimportTimeSeries,TimeSeriesListfrom.importparserfrom.parserimportStoreBooleanfrom.utilsimport(DEFAULT_DISTANCE_LOOKUPS,BilbyPipeError,check_directory_exists_and_if_not_mkdir,logger,next_power_of_2,run_command_line,tcolors,test_connection,)# Default channels from: https://wiki.ligo.org/LSC/JRPComm/ObsRun3# NOTE: can these be read from gracedb?
[docs]defread_from_gracedb(gracedb,gracedb_url,outdir):""" Read GraceDB events from GraceDB Parameters ---------- gracedb: str GraceDB id of event gracedb_url: str Service url for GraceDB events GraceDB 'https://gracedb.ligo.org/api/' (default) GraceDB-playground 'https://gracedb-playground.ligo.org/api/' outdir: str Output directory Returns ------- event: Contains contents of GraceDB event from GraceDB, json format """fromurllib.errorimportHTTPErrorfromligo.gracedb.restimportGraceDbtest_connection()logger.info(f"Connecting to {gracedb_url}")try:client=GraceDb(service_url=gracedb_url)exceptIOError:logger.warning("Failed to connect to GraceDB")raiselogger.info(f"Querying event {gracedb}")event=client.event(gracedb).json()withopen(f"{outdir}/{gracedb}.json","w")asff:json.dump(event,ff,indent=2)logger.info(f"Requesting coinc.xml for {gracedb}")try:data=client.files(gracedb,filename="coinc.xml")coinc_filename=f"{outdir}/coinc.xml"withopen(coinc_filename,"wb")asff:ff.write(data.data)event["coinc_file"]=coinc_filenameexceptHTTPError:logger.warning("Failed to download coinc.xml. PSDs will be generated by bilby_pipe.")event["coinc_file"]=Nonereturnevent
[docs]defdownload_bayestar_skymap(gracedb,gracedb_url,outdir):""" Download bayestar skymap from GraceDB Parameters ---------- gracedb: str GraceDB id of event gracedb_url: str Service url for GraceDB events GraceDB 'https://gracedb.ligo.org/api/' (default) GraceDB-playground 'https://gracedb-playground.ligo.org/api/' outdir: str Output directory Returns ------- skymap_file: str Name of downloaded fits file """fromurllib.errorimportHTTPErrorfromligo.gracedb.restimportGraceDbtest_connection()logger.info(f"Connecting to {gracedb_url}")try:client=GraceDb(service_url=gracedb_url)exceptIOError:logger.warning("Failed to connect to GraceDB")raiselogger.info(f"Requesting bayestar.multiorder.fits for {gracedb}")try:data=client.files(gracedb,filename="bayestar.multiorder.fits")skymap_file=f"{outdir}/bayestar.multiorder.fits"withopen(skymap_file,"wb")asff:ff.write(data.data)exceptHTTPError:logger.warning("Failed to download bayestar.multiorder.fits. Distance maximum ""will be set to the default value.")skymap_file=Nonereturnskymap_file
[docs]defextract_psds_from_xml(coinc_file,ifos,outdir="."):fromgwpy.frequencyseriesimportFrequencySeriespsd_filenames=dict()psd_maximum_frequency=Noneforifoinifos:try:psd=FrequencySeries.read(coinc_file,instrument=ifo)ifpsd_maximum_frequencyisNone:psd_maximum_frequency=psd.frequencies.value[-1]else:psd_maximum_frequency=min(psd_maximum_frequency,psd.frequencies.value[-1],)filename=f"{outdir}/{ifo}_psd.txt"psd.write(target=filename,format="txt")psd_filenames[ifo]=filenameexceptValueError:logger.warning(f"PSD for {ifo} not found in {coinc_file}.")returnpsd_filenames,psd_maximum_frequency
[docs]defread_from_json(json_file):"""Read GraceDB events from json file Parameters ---------- json_file: str Filename of json json file output Returns ------- candidate: dict Contains contents of GraceDB event from json, json format """ifos.path.isfile(json_file)isFalse:raiseFileNotFoundError(f"File {json_file} not found")try:withopen(json_file,"r")asfile:candidate=json.load(file)exceptIOError:logger.warning("Unable to load event contents of json file")returncandidate
[docs]defcalibration_lookup_o4(trigger_time,detector):""" Lookup function for the relevant calibration file for O4 data Assumes that it is running on CIT where the calibration files are stored under /home/cal/public_html/archive for the LIGO instruments and a uniform in magnitude, time, and phase calibration envelope for Virgo. Parameters ---------- trigger_time: float The trigger time of interest detector: str [H1, L1, V1] Detector string Returns ------- filepath: str The path to the relevant calibration envelope file. If no calibration file can be determined, None is returned. Notes ----- We search the available estimates in reverse chronological order and take the closest available estimate prior to the specified trigger time. We only look for v0 calibration uncertainty and may not be the best estimate for offline analyses. The calibration archive sometimes contains directories for epochs that don't contain a usable uncertainty and so those directories are ignored. """ifdetector=="V1":# FIXME: update path if Virgo provides a new uncertaintyreturn("/home/cbc/pe/O3/calibrationenvelopes/Virgo/""V_O3a_calibrationUncertaintyEnvelope_""magnitude5percent_phase35milliradians10microseconds.txt")base=f"/home/cal/public_html/archive/{detector}/uncertainty/v0"epochs=[int(epoch)forepochinos.listdir(base)ifepoch.isnumeric()]epochs.sort(reverse=True)forepochinepochs:times=[int(str(epoch)+tt)forttinos.listdir(f"{base}/{epoch}")]times.sort(reverse=True)forttintimes:iftrigger_time>tt:end=str(tt)[-6:]calib_file=(f"{base}/{epoch}/{end}/calibration_uncertainty_{detector}_{tt}.txt")ifos.path.exists(calib_file):returnos.path.abspath(calib_file)raiseBilbyPipeError("Requested trigger time prior to earliest calibration file, you may need to ""use the calibration_lookup_o3.")
[docs]defcalibration_lookup_o3(trigger_time,detector):""" Lookup function for the relevant calibration file for O3 data Assumes that it is running on CIT where the calibration files are stored under /home/cbc/pe/O3/calibrationenvelopes Parameters ---------- trigger_time: float The trigger time of interest detector: str [H1, L1, V1] Detector string Returns ------- filepath: str The path to the relevant calibration envelope file. If no calibration file can be determined, None is returned. """base="/home/cbc/pe/O3/calibrationenvelopes"CALENVS_LOOKUP=dict(H1=os.path.join(base,"LIGO_Hanford/H_CalEnvs.txt"),L1=os.path.join(base,"LIGO_Livingston/L_CalEnvs.txt"),V1=os.path.join(base,"Virgo/V_CalEnvs.txt"),)ifos.path.isdir(base)isFalse:raiseBilbyPipeError(f"Unable to read from calibration folder {base}")calenv=CALENVS_LOOKUP[detector]times=list()files=dict()withopen(calenv,"r")asf:forlineinf:time,filename=line.rstrip("\n").rstrip().split(" ")times.append(float(time))files[float(time)]=filenametimes=sorted(times)iftrigger_time<times[0]:raiseBilbyPipeError("Requested trigger time prior to earliest calibration file")fortimeintimes:iftrigger_time>time:directory=os.path.dirname(calenv)calib_file=f"{directory}/{files[time]}"returnos.path.abspath(calib_file)
[docs]defcalibration_lookup(trigger_time,detector):""" Lookup function for the relevant calibration file. This is a wrapper to the O3 and O4 specific functions. Parameters ---------- trigger_time: float The trigger time of interest detector: str [H1, L1, V1] Detector string Returns ------- filepath: str The path to the relevant calibration envelope file. If no calibration file can be determined, None is returned. """iftrigger_time>1275004818:# June 1, 2020func=calibration_lookup_o4eliftrigger_time>1198800018:# January 1, 2018func=calibration_lookup_o3else:raiseBilbyPipeError("Calibration lookup function not implemented for O1/O2")returnfunc(trigger_time,detector)
[docs]defcalibration_dict_lookup(trigger_time,detectors):"""Dictionary lookup function for the relevant calibration files Parameters ---------- trigger_time: float The trigger time of interest detectors: list List of detector string Returns ------- calibration_model, calibration_dict: str, dict Calibration model string and dictionary of paths to the relevant calibration envelope file. """try:calibration_dict={det:calibration_lookup(trigger_time,det)fordetindetectors}return"CubicSpline",calibration_dictexceptBilbyPipeError:returnNone,None
[docs]defread_candidate(candidate):"""Read a gracedb candidate json dictionary"""if"extra_attributes"notincandidate:raiseBilbyPipeError("Cannot parse event dictionary, not 'extra_attributes' present.")elif"CoincInspiral"incandidate["extra_attributes"]:return_read_cbc_candidate(candidate)elif"MultiBurst"incandidate["extra_attributes"]:return_read_burst_candidate(candidate)
[docs]def_read_cbc_candidate(candidate):if"mchirp"notincandidate["extra_attributes"]["CoincInspiral"]:raiseBilbyPipeError(f"Unable to determine chirp mass for {candidate['graceid']} from GraceDB")sngl=candidate["extra_attributes"]["SingleInspiral"][0]trigger_values={}trigger_values["chirp_mass"]=sngl["mchirp"]trigger_values["mass_ratio"]=min(sngl["mass1"],sngl["mass2"])/max(sngl["mass1"],sngl["mass2"])trigger_values["spin_1z"]=sngl["spin1z"]trigger_values["spin_2z"]=sngl["spin2z"]superevent=candidate["superevent"]ifos=candidate["extra_attributes"]["CoincInspiral"]["ifos"].split(",")sngl=candidate["extra_attributes"]["SingleInspiral"]ifos=[entry["ifo"]forentryinsngl]snrs=[entry["snr"]forentryinsngl]# NOTE: the channel name here varies by pipeline and isn't always available# channels = {entry["ifo"]: entry["channel"][3:] for entry in sngl}best_to_worst=np.argsort(snrs)[::-1]best_event=sngl[best_to_worst[0]]trigger_time=best_event["end_time"]+best_event["end_time_ns"]/1e9sorted_ifos=[ifos[idx]foridxinbest_to_worst]time_reference=sorted_ifos[0]iflen(sorted_ifos)>1:reference_frame="".join(sorted_ifos[:2])else:reference_frame="sky"return(trigger_values,superevent,trigger_time,ifos,reference_frame,time_reference,)
[docs]def_read_distance_upper_bound_from_fits(filename,level=0.95):"""Read skymap fits file and return the credible upper bound of distance. If ligo.skymap is not installed, this returns None. Parameters ---------- filename: str level: float Returns ------- upper_bound: float """try:fromligo.skymap.distanceimportmarginal_ppffromligo.skymap.ioimportread_sky_mapexceptImportError:logger.warning("You do not have ligo.skymap installed. The distance prior maximum will ""be set to the default value.")returnNone(prob,mu,sigma,norm),metadata=read_sky_map(filename,distances=True)returnmarginal_ppf(level,prob,mu,sigma,norm)
[docs]def_get_cbc_likelihood_args(mode,trigger_values):"""Return cbc likelihood arguments and quantities characterizing likelihood Parameters ---------- mode: str chirp_mass: float Returns ------- likelihood_args: dict likelihood_parameter_bounds: dict bounds of parameter space where likelihood is expected to be accurate minimum_frequency: float minimum frequency of likelihood integration maximum_frequency: float maximum frequency of likelihood integration duration: float inverse of frequency interval of likelihood integration """ifmodein["phenompv2_bbh_roq"]:return_choose_phenompv2_bbh_roq(trigger_values["chirp_mass"])elifmodein["lowspin_phenomd_narrowmc_roq","lowspin_phenomd_broadmc_roq","lowspin_phenomd_fhigh1024_roq","lowspin_taylorf2_roq","phenompv2_bns_roq","phenompv2nrtidalv2_roq",]:return_choose_bns_roq(trigger_values["chirp_mass"],mode)elifmodein["low_q_phenompv2_roq"]:return_choose_low_q_pv2_roq(trigger_values["chirp_mass"])elifmodein["phenomxphm_roq"]:return_choose_xphm_roq(trigger_values["chirp_mass"])elifmodein["test"]:return_get_default_likelihood_args(trigger_values)else:return_get_cbc_likelihood_args_from_json(mode,trigger_values)
[docs]def_choose_phenompv2_bbh_roq(chirp_mass,ignore_no_params=False):"""Choose an appropriate PhenomPv2 ROQ folder, and return likelihood arguments and quantities characterizing likelihood. The bases were developed in the work of arXiv:1604.08253. For a high-mass trigger with chirp mass above 35 solar mass, this returns arguments with the standard likelihood `GravitationalWaveTransient`, as the analysis is computationally cheap anyway. Parameters ---------- chirp_mass: float ignore_no_params: bool If True, this ignores FileNotFoundError raised when roq params file is not found, which is useful for testing this command outside the CIT cluster. Returns ------- likelihood_args: dict likelihood_parameter_bounds: dict bounds of parameter space where likelihood is expected to be accurate minimum_frequency: float minimum frequency of likelihood integration maximum_frequency: float maximum frequency of likelihood integration duration: float inverse of frequency interval of likelihood integration """likelihood_args={"waveform_approximant":"IMRPhenomPv2",}likelihood_parameter_bounds={"spin_template":"precessing"}ifchirp_mass>35:likelihood_args["likelihood_type"]="GravitationalWaveTransient"likelihood_args["time_marginalization"]=Falselikelihood_parameter_bounds["chirp_mass_min"]=25likelihood_parameter_bounds["chirp_mass_max"]=200likelihood_parameter_bounds["mass_ratio_min"]=0.125likelihood_parameter_bounds["a_1_max"]=0.99likelihood_parameter_bounds["a_2_max"]=0.99minimum_frequency=20maximum_frequency=1024duration=4else:likelihood_args["likelihood_type"]="ROQGravitationalWaveTransient"roq_scale_factor=1duration=_get_default_duration(chirp_mass)likelihood_args["roq_folder"]=f"/home/cbc/ROQ_data/IMRPhenomPv2/{duration}s"ifchirp_mass<0.9:roq_scale_factor=2elifchirp_mass<1.43:roq_scale_factor=1.6roq_params_file=os.path.join(likelihood_args["roq_folder"],"params.dat")ifos.path.exists(roq_params_file):roq_params=np.genfromtxt(roq_params_file,names=True)elifignore_no_params:roq_params={"chirpmassmin":roq_scale_factor*chirp_mass*0.9,"chirpmassmax":roq_scale_factor*chirp_mass*1.1,"qmax":8,"compmin":0,"chiL1min":-0.99,"chiL1max":0.99,"chiL2min":-0.99,"chiL2max":0.99,"flow":20,"fhigh":1024,"seglen":4,}logger.warning(f"{roq_params_file} not found. ROQ parameters are set to {roq_params}.")else:raiseFileNotFoundError(f"{roq_params_file} not found.")likelihood_args["roq_scale_factor"]=roq_scale_factorlikelihood_parameter_bounds["chirp_mass_min"]=(roq_params["chirpmassmin"]/roq_scale_factor)likelihood_parameter_bounds["chirp_mass_max"]=(roq_params["chirpmassmax"]/roq_scale_factor)likelihood_parameter_bounds["mass_ratio_min"]=1/roq_params["qmax"]likelihood_parameter_bounds["comp_min"]=(roq_params["compmin"]/roq_scale_factor)likelihood_parameter_bounds["a_1_max"]=roq_params["chiL1max"]likelihood_parameter_bounds["a_2_max"]=roq_params["chiL2max"]minimum_frequency=roq_params["flow"]*roq_scale_factormaximum_frequency=roq_params["fhigh"]*roq_scale_factorduration=roq_params["seglen"]/roq_scale_factorreturn(likelihood_args,likelihood_parameter_bounds,minimum_frequency,maximum_frequency,duration,)
[docs]def_choose_bns_roq(chirp_mass,mode):"""Choose an appropriate BNS-mass ROQ basis file, and return likelihood arguments and quantities characterizing likelihood. The review information of those bases are found at https://git.ligo.org/pe/O4/review_bns_roq/-/wikis. Parameters ---------- chirp_mass: float mode: str Allowed options are "lowspin_phenomd_narrowmc_roq", "lowspin_phenomd_broadmc_roq", "phenompv2_bns_roq", and "phenompv2nrtidalv2_roq". Returns ------- likelihood_args: dict likelihood_parameter_bounds: dict bounds of parameter space where likelihood is expected to be accurate minimum_frequency: float minimum frequency of likelihood integration maximum_frequency: float maximum frequency of likelihood integration duration: float inverse of frequency interval of likelihood integration """# When phase marginalization is used, psi + np.pi / 2 is indistingushable# from psi. That is why decreasing its maximum to pi / 2 does not change# the inference results at all.likelihood_parameter_bounds={"mass_ratio_min":0.125,"psi_max":np.pi/2}ifmode=="lowspin_phenomd_narrowmc_roq":waveform_approximant="IMRPhenomD"roq_dir="/home/roq/IMRPhenomD/lowspin_narrowmc_bns"likelihood_parameter_bounds["a_1_max"]=0.05likelihood_parameter_bounds["a_2_max"]=0.05likelihood_parameter_bounds["spin_template"]="aligned"elifmode=="lowspin_phenomd_broadmc_roq":waveform_approximant="IMRPhenomD"roq_dir="/home/roq/IMRPhenomD/lowspin_broadmc_bns"likelihood_parameter_bounds["a_1_max"]=0.05likelihood_parameter_bounds["a_2_max"]=0.05likelihood_parameter_bounds["spin_template"]="aligned"elifmode=="lowspin_phenomd_fhigh1024_roq":waveform_approximant="IMRPhenomD"roq_dir="/home/roq/IMRPhenomD/lowspin_fhigh1024"likelihood_parameter_bounds["a_1_max"]=0.05likelihood_parameter_bounds["a_2_max"]=0.05likelihood_parameter_bounds["spin_template"]="aligned"elifmode=="lowspin_taylorf2_roq":waveform_approximant="TaylorF2"roq_dir="/home/roq/TaylorF2/lowspin_narrowmc_bns"likelihood_parameter_bounds["a_1_max"]=0.05likelihood_parameter_bounds["a_2_max"]=0.05likelihood_parameter_bounds["spin_template"]="aligned"elifmode=="phenompv2_bns_roq":waveform_approximant="IMRPhenomPv2"roq_dir="/home/roq/IMRPhenomPv2/bns"likelihood_parameter_bounds["a_1_max"]=0.99likelihood_parameter_bounds["a_2_max"]=0.99likelihood_parameter_bounds["spin_template"]="precessing"elifmode=="phenompv2nrtidalv2_roq":waveform_approximant="IMRPhenomPv2_NRTidalv2"roq_dir="/home/roq/IMRPhenomPv2_NRTidalv2/bns"likelihood_parameter_bounds["a_1_max"]=0.4likelihood_parameter_bounds["a_2_max"]=0.4likelihood_parameter_bounds["spin_template"]="precessing"likelihood_parameter_bounds["lambda_1_max"]=5000likelihood_parameter_bounds["lambda_2_max"]=5000logger.info(f"Searching for a basis file in {roq_dir} ...")# The chirp mass boundaries are chosen so that this passes priors.validate_prior.if4.0>chirp_mass>2.35:basis=os.path.join(roq_dir,"basis_64s.hdf5")likelihood_parameter_bounds["chirp_mass_min"]=2.1likelihood_parameter_bounds["chirp_mass_max"]=4.0maximum_frequency=2048duration=64elifchirp_mass>1.54:basis=os.path.join(roq_dir,"basis_128s.hdf5")likelihood_parameter_bounds["chirp_mass_min"]=1.4likelihood_parameter_bounds["chirp_mass_max"]=2.6maximum_frequency=4096duration=128elifchirp_mass>1.012:basis=os.path.join(roq_dir,"basis_256s.hdf5")likelihood_parameter_bounds["chirp_mass_min"]=0.92likelihood_parameter_bounds["chirp_mass_max"]=1.7maximum_frequency=4096duration=256elifchirp_mass>0.66:basis=os.path.join(roq_dir,"basis_512s.hdf5")likelihood_parameter_bounds["chirp_mass_min"]=0.6likelihood_parameter_bounds["chirp_mass_max"]=1.1maximum_frequency=4096duration=512else:raiseValueError(f"No BNS-mass {waveform_approximant} basis has been found for "f"chirp_mass={chirp_mass}!")ifmode=="lowspin_phenomd_fhigh1024_roq"ormode=="lowspin_taylorf2_roq":maximum_frequency=1024logger.info(f"The selected ROQ basis file is {basis}.")return({"likelihood_type":"ROQGravitationalWaveTransient","roq_linear_matrix":basis,"roq_quadratic_matrix":basis,"roq_scale_factor":1,"waveform_approximant":waveform_approximant,"enforce_signal_duration":False,},likelihood_parameter_bounds,20,maximum_frequency,duration,)
[docs]def_choose_low_q_pv2_roq(chirp_mass):"""Choose an appropriate low-mass-ratio IMRPhenomPv2 ROQ basis file and return likelihood arguments and quantities characterizing likelihood. Parameters ---------- chirp_mass: float Returns ------- likelihood_args: dict likelihood_parameter_bounds: dict bounds of parameter space where likelihood is expected to be accurate minimum_frequency: float minimum frequency of likelihood integration maximum_frequency: float maximum frequency of likelihood integration duration: float inverse of frequency interval of likelihood integration """likelihood_parameter_bounds={"mass_ratio_min":0.06,"a_1_max":0.99,"a_2_max":0.99,"spin_template":"precessing","psi_max":np.pi/2,}roq_dir="/home/roq/IMRPhenomPv2/low_mass_ratio"if21.0>chirp_mass>9.57:basis=os.path.join(roq_dir,"basis_8s.hdf5")likelihood_parameter_bounds["chirp_mass_min"]=8.71likelihood_parameter_bounds["chirp_mass_max"]=20.99duration=8elifchirp_mass>5.72:basis=os.path.join(roq_dir,"basis_16s.hdf5")likelihood_parameter_bounds["chirp_mass_min"]=5.21likelihood_parameter_bounds["chirp_mass_max"]=10.99duration=16elifchirp_mass>3.63:basis=os.path.join(roq_dir,"basis_32s.hdf5")likelihood_parameter_bounds["chirp_mass_min"]=3.31likelihood_parameter_bounds["chirp_mass_max"]=6.29duration=32elifchirp_mass>2.35:basis=os.path.join(roq_dir,"basis_64s.hdf5")likelihood_parameter_bounds["chirp_mass_min"]=2.101likelihood_parameter_bounds["chirp_mass_max"]=3.999duration=64elifchirp_mass>1.5:basis=os.path.join(roq_dir,"basis_128s.hdf5")likelihood_parameter_bounds["chirp_mass_min"]=1.401likelihood_parameter_bounds["chirp_mass_max"]=2.599duration=128else:raiseValueError(f"No low-mass-ratio IMRPhenomPv2 basis has been found for "f"chirp_mass={chirp_mass}!")logger.info(f"The selected ROQ basis file is {basis}.")return({"likelihood_type":"ROQGravitationalWaveTransient","roq_linear_matrix":basis,"roq_quadratic_matrix":basis,"roq_scale_factor":1,"waveform_approximant":"IMRPhenomPv2","enforce_signal_duration":False,},likelihood_parameter_bounds,20,1024,duration,)
[docs]def_choose_xphm_roq(chirp_mass):"""Choose an appropriate IMRPhenomXPHM ROQ basis file and return likelihood arguments and quantities characterizing likelihood. Parameters ---------- chirp_mass: float Returns ------- likelihood_args: dict likelihood_parameter_bounds: dict bounds of parameter space where likelihood is expected to be accurate minimum_frequency: float minimum frequency of likelihood integration maximum_frequency: float maximum frequency of likelihood integration duration: float inverse of frequency interval of likelihood integration """likelihood_parameter_bounds={"mass_ratio_min":0.05,"a_1_max":0.99,"a_2_max":0.99,"spin_template":"precessing",}roq_dir="/home/roq/IMRPhenomXPHM"ifchirp_mass>25:likelihood_args={"likelihood_type":"GravitationalWaveTransient","waveform_approximant":"IMRPhenomXPHM","reference_frequency":20,"phase_marginalization":False,"enforce_signal_duration":False,"time_marginalization":False,}likelihood_parameter_bounds["chirp_mass_min"]=18.8likelihood_parameter_bounds["chirp_mass_max"]=200duration=8else:likelihood_args={"likelihood_type":"ROQGravitationalWaveTransient","roq_scale_factor":1,"waveform_approximant":"IMRPhenomXPHM","reference_frequency":20,"phase_marginalization":False,"enforce_signal_duration":False,}ifchirp_mass>16:basis=os.path.join(roq_dir,"basis_16s.hdf5")likelihood_args["roq_linear_matrix"]=basislikelihood_args["roq_quadratic_matrix"]=basislikelihood_parameter_bounds["chirp_mass_min"]=12.8likelihood_parameter_bounds["chirp_mass_max"]=31.8duration=16elifchirp_mass>10.03:basis=os.path.join(roq_dir,"basis_32s.hdf5")likelihood_args["roq_linear_matrix"]=basislikelihood_args["roq_quadratic_matrix"]=basislikelihood_parameter_bounds["chirp_mass_min"]=10.03likelihood_parameter_bounds["chirp_mass_max"]=19.04duration=32else:raiseValueError(f"No IMRPhenomXPHM basis has been found for chirp_mass={chirp_mass}!")logger.info(f"The selected ROQ basis file is {basis}.")return(likelihood_args,likelihood_parameter_bounds,20,4096,duration,)
[docs]def_get_cbc_likelihood_args_from_json(filename,trigger_values):"""Load input JSON file containing likelihood settings and determine appropriate likelihood arguments and parameter bounds depending on input trigger values. The json file is supposed to contain `likelihood_args`, `likelihood_parameter_bounds`, and/or `trigger_dependent`. The first two contain default arguments and parameter bounds respectively. The last item contains trigger-dependent settings to update the default settings. It contains `range`, `likelihood_args`, and/or `likelihood_parameter_bounds`. `range` contains dictionary of trigger-parameter ranges, whose keys are parameter names (`chirp_mass`, `mass_ratio`, `spin_1z`, and/or `spin_2z`) and values are lists of their ranges. `likelihood_args` contains lists of arguments, one of which is chosen depending on trigger values and used to update the default likelihood arguments. `likelihood_parameter_bounds` contains lists of parameter bounds to update their default. Parameters ---------- filename: str trigger_values: dict Returns ------- likelihood_args: dict likelihood_parameter_bounds: dict bounds of parameter space where likelihood is expected to be accurate minimum_frequency: float minimum frequency of likelihood integration maximum_frequency: float maximum frequency of likelihood integration duration: float inverse of frequency interval of likelihood integration Examples -------- >>> import json >>> from bilby_pipe.gracedb import _get_cbc_likelihood_args_from_json >>> settings = { ... "likelihood_args": { ... "likelihood_type": "ROQGravitationalWaveTransient", ... "minimum_frequency": 20, ... "maximum_frequency": 2048, ... "duration": 4, ... }, ... "trigger_dependent": { ... "range": {"chirp_mass": [[30, 40], [40, 50]]}, ... "likelihood_args": [ ... {"roq_folder": "basis_for_30Msun_to_40Msun"}, ... {"roq_folder": "basis_for_40Msun_to_50Msun"}, ... ] ... }, ... } >>> with open("test.json", "r") as f: ... json.dump(settings, f) >>> _get_cbc_likelihood_args_from_json("test.json", {"chirp_mass": 35}) 09:04 bilby_pipe INFO : Loading likelihood settings from test.json ... ({'likelihood_type': 'ROQGravitationalWaveTransient', 'roq_folder': 'basis_for_30Msun_to_40Msun'}, {}, 20, 2048, 4) >>> _get_cbc_likelihood_args_from_json("test.json", {"chirp_mass": 45}) 09:04 bilby_pipe INFO : Loading likelihood settings from test.json ... ({'likelihood_type': 'ROQGravitationalWaveTransient', 'roq_folder': 'basis_for_40Msun_to_50Msun'}, {}, 20, 2048, 4) """importjsonlogger.info(f"Loading likelihood settings from {filename} ...")withopen(filename,"r")asff:settings=json.load(ff)likelihood_args=dict()if"likelihood_args"insettings:likelihood_args.update(settings["likelihood_args"])likelihood_parameter_bounds=dict()if"likelihood_parameter_bounds"insettings:likelihood_parameter_bounds.update(settings["likelihood_parameter_bounds"])if"trigger_dependent"insettings:trigger_range_dict=settings["trigger_dependent"]["range"]number_of_ranges=len(list(trigger_range_dict.values())[0])in_range=np.ones(number_of_ranges,dtype=bool)forkeyintrigger_range_dict:trigger=trigger_values[key]trigger_range=np.array(trigger_range_dict[key])in_range*=trigger>=trigger_range[:,0]in_range*=trigger<=trigger_range[:,1]ifnotany(in_range):raiseValueError("No likelihood settings found for the trigger values: "f"{trigger_values}!")selected_idx=np.arange(number_of_ranges)[in_range][0]trigger_dependent_settings=settings["trigger_dependent"]if"likelihood_args"intrigger_dependent_settings:likelihood_args.update(trigger_dependent_settings["likelihood_args"][selected_idx])if"likelihood_parameter_bounds"intrigger_dependent_settings:likelihood_parameter_bounds.update(trigger_dependent_settings["likelihood_parameter_bounds"][selected_idx])minimum_frequency=likelihood_args.pop("minimum_frequency")maximum_frequency=likelihood_args.pop("maximum_frequency")duration=likelihood_args.pop("duration")return(likelihood_args,likelihood_parameter_bounds,minimum_frequency,maximum_frequency,duration,)
[docs]def_get_default_likelihood_args(trigger_values):logger.info("Using default likelihood settings, these may not be optimal.")bounds=dict(chirp_mass_min=trigger_values["chirp_mass"]/2,chirp_mass_max=trigger_values["chirp_mass"]*2,spin_template="precessing",a_1_max=0.99,a_2_max=0.99,mass_ratio_min=0.125,)duration=_get_default_duration(trigger_values["chirp_mass"])minimum_frequency=20maximum_frequency=1024returndict(),bounds,minimum_frequency,maximum_frequency,duration
[docs]defcopy_and_save_data(ifos,start_time,end_time,channel_dict,outdir,gracedbid,query_kafka=True,n_attempts=5,replay=False,):"""Attempt to read the strain data from internal servers and save frame files to the run directory. If `query_kafka` is True, then attempt to fetch the data from `/dev/shm/kafka/` (preferred method for low-latency/online analyses). If `query_kafka` is False or the data cannot be found in `/dev/shm/kafka/`, this function will attempt to get the data from TimeSeries.get(), called in a loop to allow for multiple attempts. If data cannot be found for all the ifos, returns None, and data reading will be attempted by the bilby_pipe data_generation stage. Parameters ---------- ifos: list List of ifos for this trigger start_time: float Safe start time for data segment end_time: float Safe end time for data segment channel_dict: dict Dictionary of channel names outdir: str Directory to save frame files gracedbid: str GraceDB id of event query_kafka: bool Whether to attempt to copy frame files from `/dev/shm/kafka/` n_attempts: int Number of attempts to call TimeSeries.get() before failing to obtain data replay: bool Whether to try to fetch O3ReplayMDC data from the kafka directory. Only relevant if query_kafka = True. Returns ------- data_dict: dict Dictionary with {ifo: path_to_copied_frame_file}. None, if data were not able to be obtained for all ifos. """ifo_data=dict()start_time=int(start_time)end_time=int(end_time)forifoinifos:channel=f"{ifo}:{channel_dict[ifo]}"ifquery_kafka:try:logger.info(f"Querying kafka directory for {ifo} data")data=read_and_concat_data_from_kafka(ifo,start_time,end_time,channel=channel,replay=replay)exceptFileNotFoundError:ifchannel.endswith("GWOSC-STRAIN"):logger.info("Failed to load kafka data, calling TimeSeries.fetch_open_data")data=TimeSeries.fetch_open_data(ifo=ifo,start=start_time,end=end_time)data.name=channeldata.channel=channelelse:logger.info(f"Failed to obtain {ifo} data from kafka directory. Calling TimeSeries.get")data=attempt_gwpy_get(channel=channel,start_time=start_time,end_time=end_time,n_attempts=n_attempts,)elifchannel.endswith("GWOSC-STRAIN"):data=TimeSeries.fetch_open_data(ifo=ifo,start=start_time,end=end_time)data.name=channeldata.channel=channelelse:data=attempt_gwpy_get(channel=channel,start_time=start_time,end_time=end_time,n_attempts=n_attempts,)ifdataisFalse:logger.info(f"Could not find data for {ifo} with channel name {channel}")breakelse:ifo_data[ifo]=dataifall(ifoinifo_data.keys()forifoinifos):data_paths=dict()forifoinifo_data.keys():actual_start=ifo_data[ifo].times[0].valueactual_duration=ifo_data[ifo].duration.valueifint(actual_duration)==actual_duration:actual_duration=int(actual_duration)datapath=os.path.join(outdir,f"{ifo[0]}-{ifo}_{gracedbid}_llhoft-{actual_start}-{actual_duration}.gwf",)ifo_data[ifo].write(datapath)data_paths[ifo]=datapathlogger.info(f"Written {ifo} data to {datapath}")data_dict=data_pathselse:logger.info("Not getting data in pre-generation step. Will get data in data generation stage.")data_dict=Nonereturndata_dict
[docs]defprepare_run_configurations(candidate,gracedb,outdir,channel_dict,sampler_kwargs,webdir,search_type="cbc",cbc_likelihood_mode="phenompv2_bbh_roq",settings=None,psd_cut=0.95,query_kafka=True,replay=False,recommended_distance_max=None,):"""Creates ini file from defaults and candidate contents Parameters ---------- candidate: Contains contents of GraceDB event gracedb: str GraceDB id of event outdir: str Output directory where the ini file and all output is written channel_dict: dict Dictionary of channel names sampler_kwargs: str Set of sampler arguments, or option for set of sampler arguments webdir: str Directory to store summary pages search_type: str What kind of search identified the trigger, options are "cbc" and "burst" cbc_likelihood_mode: str Built-in CBC likelihood mode or path to a JSON file containing likelihood settings. The built-in settings include 'phenompv2_bbh_roq', 'phenompv2_bns_roq', 'phenompv2nrtidalv2_roq', 'lowspin_phenomd_narrowmc_roq', 'lowspin_phenomd_broadmc_roq', and 'test'. settings: str JSON filename containing settings to override the defaults psd_cut: float Fractional maximum frequency cutoff relative to the maximum frequency of pipeline psd query_kafka: bool Whether to first attempt to query the kafka directory for data before attempting a call to gwpy TimeSeries.get() replay: bool Whether to try to fetch O3ReplayMDC data from the kafka directory. Only relevant if query_kafka = True. recommended_distance_max: float Recommended prior maximum of luminosity distance in unit of Mpc. If it is None, the maximum falls back to the default value. Returns ------- filename: str Generated ini filename """ifsettingsisnotNone:importjsonwithopen(settings,"r")asff:settings=json.load(ff)else:settings=dict()ifsearch_type=="cbc":(trigger_values,superevent,trigger_time,ifos,reference_frame,time_reference,)=_read_cbc_candidate(candidate)(likelihood_args,likelihood_parameter_bounds,minimum_frequency,maximum_frequency,duration,)=_get_cbc_likelihood_args(cbc_likelihood_mode,trigger_values)(prior_file,distance_marginalization_lookup_table,)=generate_cbc_prior_from_template(trigger_values["chirp_mass"],likelihood_parameter_bounds,outdir,fast_test=(sampler_kwargs=="FastTest"),phase_marginalization=likelihood_args.get("phase_marginalization",True),recommended_distance_max=recommended_distance_max,)calibration_model,calib_dict=calibration_dict_lookup(trigger_time,ifos)ifcalibration_modelisNone:spline_calibration_nodes=0elifsampler_kwargs=="FastTest":spline_calibration_nodes=4else:spline_calibration_nodes=10extra_config_arguments=dict(reference_frequency=100,time_marginalization=False,distance_marginalization=True,phase_marginalization=True,distance_marginalization_lookup_table=distance_marginalization_lookup_table,plot_trace=True,plot_data=True,calibration_model=calibration_model,calibration_correction_type=None,spline_calibration_envelope_dict=calib_dict,spline_calibration_nodes=spline_calibration_nodes,)extra_config_arguments.update(likelihood_args)if("lambda_1_max"inlikelihood_parameter_boundsor"lambda_2_max"inlikelihood_parameter_bounds):extra_config_arguments["default_prior"]="BNSPriorDict"extra_config_arguments["frequency_domain_source_model"]="binary_neutron_star_roq"elifsearch_type=="burst":centre_frequency,superevent,trigger_time,ifos=_read_burst_candidate(candidate)minimum_frequency=min(20,centre_frequency/2)maximum_frequency=next_power_of_2(centre_frequency*2)duration=4extra_config_arguments=dict(frequency_domain_source_model="bilby.gw.source.sinegaussian",default_prior="PriorDict",time_marginalization=False,phase_marginalization=False,sampler_kwargs="FastTest",)prior_file=generate_burst_prior_from_template(minimum_frequency=minimum_frequency,maximum_frequency=maximum_frequency,outdir=outdir,)else:raiseBilbyPipeError(f"search_type should be either 'cbc' or 'burst', not {search_type}")config_dict=dict(label=gracedb,outdir=outdir,accounting="ligo.dev.o4.cbc.pe.bilby",maximum_frequency=maximum_frequency,minimum_frequency=minimum_frequency,sampling_frequency=16384,trigger_time=trigger_time,detectors=ifos,channel_dict=channel_dict,deltaT=0.2,prior_file=prior_file,duration=duration,sampler="dynesty",sampler_kwargs=sampler_kwargs,webdir=webdir,generation_pool="local-pool",local_plot=False,transfer_files=False,create_summary=False,summarypages_arguments={"gracedb":gracedb},plot_trace=True,plot_data=True,plot_calibration=False,plot_corner=False,plot_marginal=False,plot_skymap=False,plot_waveform=False,overwrite_outdir=True,result_format="hdf5",reference_frame=reference_frame,time_reference=time_reference,)ifsampler_kwargs=="FastTest":config_dict["n_parallel"]=2else:config_dict["n_parallel"]=4ifcandidate.get("coinc_file",None)isnotNone:psd_dict,psd_maximum_frequency=extract_psds_from_xml(coinc_file=candidate["coinc_file"],ifos=ifos,outdir=outdir)config_dict["psd_dict"]=psd_dictifpsd_maximum_frequencyisnotNone:psd_maximum_frequency*=min(psd_cut,1)ifconfig_dict["maximum_frequency"]>psd_maximum_frequency:config_dict["maximum_frequency"]=psd_maximum_frequencylogger.info(f"maximum_frequency is reduced to {psd_maximum_frequency} ""due to the limination of pipeline psd")else:psd_dict=dict()start_data,end_data=(trigger_time-duration-2,trigger_time+4)ifnotall(ifoinpsd_dictforifoinifos):start_data-=min(1024,32*duration)data_dict=copy_and_save_data(ifos=ifos,start_time=start_data,end_time=end_data,channel_dict=channel_dict,outdir=outdir,gracedbid=gracedb,query_kafka=query_kafka,replay=replay,)config_dict["data_dict"]=data_dictconfig_dict.update(extra_config_arguments)config_dict["summarypages_arguments"]["nsamples_for_skymap"]=5000config_dict.update(settings)comment=("# Configuration ini file generated from GraceDB "f"for event id {gracedb} superevent id {superevent}")filename=f"{outdir}/bilby_config.ini"_parser=parser.create_parser()_parser.write_to_file(filename=filename,args=config_dict,overwrite=True,include_description=False,exclude_default=True,comment=comment,)returnfilename
[docs]defcreate_config_file(candidate,gracedb,outdir,channel_dict,sampler_kwargs,webdir,search_type="cbc",cbc_likelihood_mode="phenompv2_bbh_roq",settings=None,psd_cut=0.95,query_kafka=True,replay=False,):logger.warning("create_config_file is deprecated and will be removed in a future version.""Calling prepare_run_configurations instead.")returnprepare_run_configurations(candidate=candidate,gracedb=gracedb,outdir=outdir,channel_dict=channel_dict,sampler_kwargs=sampler_kwargs,webdir=webdir,search_type=search_type,cbc_likelihood_mode=cbc_likelihood_mode,settings=settings,psd_cut=psd_cut,query_kafka=query_kafka,replay=replay,)
[docs]def_get_default_duration(chirp_mass):"""Return default duration based on chirp mass Parameters ---------- chirp_mass: float Returns ------- duration: float """ifchirp_mass>13.53:duration=4elifchirp_mass>8.73:duration=8elifchirp_mass>5.66:duration=16elifchirp_mass>3.68:duration=32elifchirp_mass>2.39:duration=64else:duration=128returnduration
[docs]defgenerate_cbc_prior_from_template(chirp_mass,likelihood_parameter_bounds,outdir,fast_test=False,phase_marginalization=True,recommended_distance_max=None,):"""Generate a cbc prior file from a template and write it to file. This returns the paths to the prior file and the corresponding distance look-up table Parameters ---------- chirp_mass: float likelihood_parameter_bounds: dict outdir: str fast_test: bool (optional, default is False) phase_marginalization: bool (optional, default is True) recommended_distance_max: float (default: None) Returns ------- prior_file: str lookup_table: str """ifchirp_mass<2:bounds=(chirp_mass-0.01,chirp_mass+0.01)elifchirp_mass<4:bounds=(chirp_mass-0.1,chirp_mass+0.1)elifchirp_mass<8:bounds=(chirp_mass*0.9,chirp_mass*1.1)else:bounds=(0,1000000)chirp_mass_min=max(likelihood_parameter_bounds["chirp_mass_min"],bounds[0])chirp_mass_max=min(likelihood_parameter_bounds["chirp_mass_max"],bounds[1])to_add=""if"comp_min"inlikelihood_parameter_bounds:comp_min=likelihood_parameter_bounds["comp_min"]to_add+=(f"mass_1 = Constraint(name='mass_1', minimum={comp_min}, maximum=1000)\n")to_add+=(f"mass_2 = Constraint(name='mass_2', minimum={comp_min}, maximum=1000)\n")if"lambda_1_max"inlikelihood_parameter_bounds:lambda_1_max=likelihood_parameter_bounds["lambda_1_max"]to_add+=(f"lambda_1 = Uniform(name='lambda_1', minimum=0, maximum={lambda_1_max})\n")if"lambda_2_max"inlikelihood_parameter_bounds:lambda_2_max=likelihood_parameter_bounds["lambda_2_max"]to_add+=(f"lambda_2 = Uniform(name='lambda_2', minimum=0, maximum={lambda_2_max})\n")distance_bounds,lookup_table=_get_distance_lookup(chirp_mass,phase_marginalization=phase_marginalization)d_min,d_max=distance_boundsifrecommended_distance_maxisnotNoneandd_max<recommended_distance_max:d_max*=np.ceil(recommended_distance_max/d_max)distance_bounds=(d_min,d_max)lookup_table=os.path.join(outdir,f"distance-marginalization-lookup-{int(d_max)}.npz")iffast_test:template=os.path.join(os.path.dirname(os.path.realpath(__file__)),"data_files/fast.prior.template",)withopen(template,"r")asold_prior:prior_string=(old_prior.read().format(mc_min=chirp_mass_min,mc_max=chirp_mass_max,d_min=distance_bounds[0],d_max=distance_bounds[1],)+to_add)else:spin_template=likelihood_parameter_bounds["spin_template"]a_1_max=likelihood_parameter_bounds["a_1_max"]a_2_max=likelihood_parameter_bounds["a_2_max"]ifspin_template=="precessing":template=os.path.join(os.path.dirname(os.path.realpath(__file__)),"data_files/precessing_spin.prior.template",)elifspin_template=="aligned":template=os.path.join(os.path.dirname(os.path.realpath(__file__)),"data_files/aligned_spin.prior.template",)else:raiseValueError(f"Unknown spin template: {spin_template}")withopen(template,"r")asold_prior:prior_string=(old_prior.read().format(mc_min=chirp_mass_min,mc_max=chirp_mass_max,q_min=likelihood_parameter_bounds["mass_ratio_min"],a_1_max=a_1_max,a_2_max=a_2_max,d_min=distance_bounds[0],d_max=distance_bounds[1],psi_max=likelihood_parameter_bounds.get("psi_max",np.pi),)+to_add)prior_file=os.path.join(outdir,"online.prior")withopen(prior_file,"w")asnew_prior:new_prior.write(prior_string)returnprior_file,lookup_table
[docs]defgenerate_burst_prior_from_template(minimum_frequency,maximum_frequency,outdir,template=None):"""Generate a prior file from a template and write it to file Parameters ---------- minimum_frequency: float Minimum frequency for prior maximum_frequency: float Maximum frequency for prior outdir: str Path to the outdir (the prior is written to outdir/online.prior) template: str Alternative template file to use, otherwise the data_files/roq.prior.template file is used """iftemplateisNone:template=os.path.join(os.path.dirname(os.path.realpath(__file__)),"data_files/burst.prior.template",)withopen(template,"r")asold_prior:prior_string=old_prior.read().format(minimum_frequency=minimum_frequency,maximum_frequency=maximum_frequency)prior_file=os.path.join(outdir,"online.prior")withopen(prior_file,"w")asnew_prior:new_prior.write(prior_string)returnprior_file
[docs]defread_and_concat_data_from_kafka(ifo,start,end,channel,replay=False):"""Query the kafka directory for the gwf files with the desired data. Start and end should be set wide enough to include the entire duration. This will read in the individual gwf files and concatenate them into a single gwpy timeseries"""ifreplay:ifo_str=f"{ifo}_O3ReplayMDC"else:ifo_str=ifokafka_directory="/dev/shm/kafka"times=np.arange(start,end)segments=[]fortime_secintimes:ht=TimeSeries.read(f"{kafka_directory}/{ifo_str}/{ifo[0]}-{ifo_str}_llhoft-{time_sec}-1.gwf",channel=channel,)segments.append(ht)segmentlist=TimeSeriesList(*segments)data=segmentlist.join()returndata
[docs]defcreate_parser():parser=argparse.ArgumentParser(description=__doc__,usage="%(prog)s [options]",formatter_class=argparse.RawTextHelpFormatter,)group1=parser.add_mutually_exclusive_group(required=True)group1.add_argument("--gracedb",type=str,help="GraceDB event id")group1.add_argument("--json",type=str,help="Path to json GraceDB file")parser.add_argument("--psd-file",type=str,help="Path to ligolw-xml file containing the PSDs for the interferometers.",)parser.add_argument("--skymap-file",type=str,default=None,help=("Path to fits file containing distance PDF. This is used to set \n""prior bound of distance"),)parser.add_argument("--disable-skymap-download",action="store_true",default=False,help=("If no arguments are passed to --skymap-file, skymap is downloaded \n""from GraceDB to determine the prior maximum of distance. This \n""option can disable it to use the default distance maximum values."),)parser.add_argument("--convert-to-flat-in-component-mass",action="store_true",default=False,help=("Convert a flat-in chirp mass and mass-ratio prior file to flat \n""in component mass during the post-processing. Note, the prior \n""must be uniform in Mc and q with constraints in m1 and m2 for \n""this to work. \n"),)parser.add_argument("--outdir",type=str,help="Output directory where the ini file and all output is written.",)parser.add_argument("--output",type=str,choices=["ini","full","full-local","full-submit"],help=("Flag to create ini, generate directories and/or submit. \n"" ini : generates ini file \n"" full : generates ini and dag submission files (default) \n"" full-local : generates ini and dag submission files and run locally \n"" full-submit : generates ini and dag submission files and submits to condor \n"),default="full",)parser.add_argument("--gracedb-url",type=str,help=("GraceDB service url. \n"" Main page : https://gracedb.ligo.org/api/ (default) \n"" Playground : https://gracedb-playground.ligo.org/api/ \n"),default="https://gracedb.ligo.org/api/",)parser.add_argument("--channel-dict",type=str,default="online",choices=list(CHANNEL_DICTS.keys()),help=("Channel dictionary. \n"" online : use for main GraceDB page events from the current observing run ""(default)\n"" o2replay : use for playground GraceDB page events\n"" o3replay : use for playground GraceDB page events\n"" gwosc : use for events where the strain data is publicly ""available, e.g., previous observing runs\n"),)parser.add_argument("--sampler-kwargs",type=str,default="DynestyDefault",help=("Dictionary of sampler-kwargs to pass in, e.g., {nlive: 1000} OR ""pass pre-defined set of sampler-kwargs {DynestyDefault, BilbyMCMCDefault, FastTest}"),)parser.add_argument("--cbc-likelihood-mode",type=str,default="phenompv2_bbh_roq",help=("Built-in CBC likelihood mode or path to a JSON file containing likelihood settings. ""The built-in settings include 'phenompv2_bbh_roq', ""'lowspin_phenomd_narrowmc_roq', 'lowspin_phenomd_broadmc_roq', ""'lowspin_phenomd_fhigh1024_roq', 'lowspin_taylorf2_roq', ""'phenompv2_bns_roq', 'phenompv2nrtidalv2_roq', ""'low_q_phenompv2_roq', 'phenomxphm_roq', and 'test'."),)parser.add_argument("--webdir",type=str,default=None,help=("Directory to store summary pages. \n"" If not given, defaults to outdir/results_page"),)parser.add_argument("--settings",type=str,default=None,help="JSON file containing extra settings to override the defaults",)parser.add_argument("--psd-cut",type=float,default=0.95,help=("maximum frequency is set to this value multiplied by the maximum frequency of psd contained in coinc.xml."" This is to avoid likelihood overflow caused by the roll-off of pipeline psd due to low-pass filter."),)parser.add_argument("--query-kafka",action=StoreBoolean,default=True,help=("when fetching the data for analysis, check first it is in kafka, and if not, then try to query ifocache.""If False, query ifocache (via gwpy TimeSeries.get() ) by default."),)returnparser
[docs]defmain(args=None,unknown_args=None):ifargsisNone:args,unknown_args=create_parser().parse_known_args()elifunknown_argsisNone:unknown_args=[]iflen(unknown_args)>1andargs.output=="ini":msg=[tcolors.WARNING,f"Unrecognized arguments {unknown_args}, these will be ignored",tcolors.END,]logger.warning(" ".join(msg))outdir=args.outdirifargs.json:candidate=read_from_json(args.json)gracedb=candidate["graceid"]ifoutdirisNone:outdir=f"outdir_{gracedb}"check_directory_exists_and_if_not_mkdir(outdir)ifargs.psd_fileisnotNoneandos.path.isfile(args.psd_file):candidate["coinc_file"]=args.psd_fileelifargs.gracedb:gracedb=args.gracedbgracedb_url=args.gracedb_urlifoutdirisNone:outdir=f"outdir_{gracedb}"check_directory_exists_and_if_not_mkdir(outdir)candidate=read_from_gracedb(gracedb,gracedb_url,outdir)else:raiseBilbyPipeError("Either gracedb ID or json file must be provided.")ifargs.skymap_fileisnotNone:skymap_file=args.skymap_fileelifnotargs.disable_skymap_downloadandargs.gracedb:skymap_file=download_bayestar_skymap(gracedb,gracedb_url,outdir)else:skymap_file=Noneifskymap_fileisnotNone:recommended_distance_max=_read_distance_upper_bound_from_fits(skymap_file,level=0.95)ifrecommended_distance_maxisnotNone:recommended_distance_max*=2else:recommended_distance_max=Noneifargs.webdirisnotNone:webdir=args.webdirelse:webdir=os.path.join(outdir,"results_page")sampler_kwargs=args.sampler_kwargschannel_dict=CHANNEL_DICTS[args.channel_dict.lower()]ifargs.channel_dict.lower()=="o3replay":replay=Trueelse:replay=Falsesearch_type=candidate["group"].lower()ifsearch_typenotin["cbc","burst"]:raiseBilbyPipeError(f"Candidate group {candidate['group']} not recognised.")filename=prepare_run_configurations(candidate=candidate,gracedb=gracedb,outdir=outdir,channel_dict=channel_dict,sampler_kwargs=sampler_kwargs,webdir=webdir,psd_cut=args.psd_cut,search_type=search_type,cbc_likelihood_mode=args.cbc_likelihood_mode,settings=args.settings,query_kafka=args.query_kafka,replay=replay,recommended_distance_max=recommended_distance_max,)ifargs.output=="ini":logger.info("Generating ini with default settings. Run using the command: \n"f" $ bilby_pipe {filename}")else:arguments=["bilby_pipe",filename]ifargs.output=="full":logger.info("Generating dag submissions files")ifargs.output=="full-local":logger.info("Generating dag submission files, running locally")arguments.append("--local")ifargs.output=="full-submit":logger.info("Generating dag submissions files, submitting to condor")arguments.append("--submit")iflen(unknown_args)>1:arguments=arguments+unknown_argsrun_command_line(arguments)