Revision 9291900d0e04ba2345b5cb5f324858ab964abf22 authored by Josh Willis on 28 May 2020, 10:48:11 UTC, committed by GitHub on 28 May 2020, 10:48:11 UTC
* Add scripts for detailed comparison of two offline search runs * Relocate offline search workflow comparisons from tools/ to bin/ * Add copyright statements to all newly added search comparison scripts * Clarify some arguments to 'pycbc_injection_set_comparison'
1 parent 615f9db
pycbc_multi_inspiral
#!/usr/bin/env python
# Copyright (C) 2014 Alex Nitz
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import logging
from collections import defaultdict
import argparse
from pycbc import vetoes, psd, waveform, events, strain, scheme, fft,\
DYN_RANGE_FAC
from pycbc.filter import MatchedFilterControl
from pycbc.types import TimeSeries, zeros, float32, complex64
from pycbc.types import MultiDetOptionAction
import pycbc.detector
import numpy as np
import time
time_init = time.time()
parser = argparse.ArgumentParser(usage='',
description="Find multiple detector gravitational-wave triggers.")
parser.add_argument("-V", "--verbose", action="store_true",
help="print extra debugging information", default=False )
parser.add_argument("--output", type=str)
parser.add_argument("--instruments", nargs="+", type=str, required=True,
help="List of instruments to analyze.")
parser.add_argument("--bank-file", type=str)
parser.add_argument("--snr-threshold",
help="SNR threshold for trigger generation", type=float)
parser.add_argument("--newsnr-threshold", type=float, metavar='THRESHOLD',
help="Cut triggers with NewSNR less than THRESHOLD")
parser.add_argument("--low-frequency-cutoff", type=float,
help="The low frequency cutoff to use for filtering (Hz)")
# add approximant arg
waveform.bank.add_approximant_arg(parser)
parser.add_argument("--order", type=str,
help="The integer half-PN order at which to generate"
" the approximant.")
taper_choices = ["start","end","startend"]
parser.add_argument("--taper-template", choices=taper_choices,
help="For time-domain approximants, taper the start and/or "
"end of the waveform before FFTing.")
parser.add_argument("--cluster-method", choices=["template", "window"])
parser.add_argument("--cluster-window", type=float, default = -1,
help="Length of clustering window in seconds.")
parser.add_argument("--bank-veto-bank-file", type=str)
parser.add_argument("--chisq-bins", default=0)
parser.add_argument("--chisq-threshold", type=float, default=0)
parser.add_argument("--chisq-delta", type=float, default=0)
parser.add_argument("--autochi-number-points", type=int, default=0)
parser.add_argument("--autochi-stride", type=int, default=0)
parser.add_argument("--autochi-onesided", action='store_true', default=False)
parser.add_argument("--downsample-factor", type=int, default=1,
help="Factor that determines the interval between the "
"initial SNR sampling. If not set (or 1) no sparse "
"sample is created, and the standard full SNR is "
"calculated.")
parser.add_argument("--upsample-threshold", type=float,
help="The fraction of the SNR threshold to check the "
"sparse SNR sample.")
parser.add_argument("--upsample-method", choices=["pruned_fft"],
default='pruned_fft',
help="The method to find the SNR points between the "
"sparse SNR sample.")
parser.add_argument("--user-tag", type=str, metavar="TAG", help="""
This is used to identify FULL_DATA jobs for
compatibility with pipedown post-processing.
Option will be removed when no longer needed.""")
# Arguments added for the coherent stuff
parser.add_argument("--longitude", type=float)
parser.add_argument("--latitude", type=float)
parser.add_argument("--coinc-threshold", type=float, default=0.0, help="""
Triggers with coincident/coherent snr below this value will
be discarded.""")
parser.add_argument("--null-min", type=float, default=5.25, help="""
Triggers with null_snr above this value with be
discarded.""")
parser.add_argument("--null-grad", type=float, default=0.2, help="""
The gradient of the line defining the null cut after the
null step.""")
parser.add_argument("--null-step", type=float, default=20., help="""
Triggers with coherent snr above null_step will be cut
according to the null_grad and null_min.""")
parser.add_argument("--trigger-time", type=int, help="""
Time of the GRB, used to set the antenna patterns.""")
# Add options groups
strain.insert_strain_option_group_multi_ifo(parser)
strain.StrainSegments.insert_segment_option_group_multi_ifo(parser)
psd.insert_psd_option_group_multi_ifo(parser)
scheme.insert_processing_option_group(parser)
fft.insert_fft_option_group(parser)
from pycbc.vetoes.sgchisq import SingleDetSGChisq
pycbc.opt.insert_optimization_option_group(parser)
pycbc.inject.insert_injfilterrejector_option_group_multi_ifo(parser)
SingleDetSGChisq.insert_option_group(parser)
opt = parser.parse_args()
# Use the time in the middle of the segment to calculate the antenna patterns.
t_gps = opt.trigger_time
# Set sky position being searched.
ra = opt.longitude
dec = opt.latitude
# Put the ifos in alphabetical order so they are always called in
# the same order.
opt.instruments.sort()
strain.verify_strain_options_multi_ifo(opt, parser, opt.instruments)
strain.StrainSegments.verify_segment_options_multi_ifo(opt, parser,
opt.instruments)
psd.verify_psd_options_multi_ifo(opt, parser, opt.instruments)
scheme.verify_processing_options(opt, parser)
fft.verify_fft_options(opt,parser)
log_level = logging.DEBUG if opt.verbose else logging.INFO
logging.basicConfig(format='%(asctime)s : %(message)s', level=log_level)
inj_filter_rejector = pycbc.inject.InjFilterRejector.from_cli_multi_ifos(opt,opt.instruments)
ctx = scheme.from_cli(opt)
def network_chisq(chisq, chisq_dof, snr_dict):
ifos = sorted(snr_dict.keys())
chisq_per_dof = {}
for ifo in ifos:
chisq_per_dof[ifo] = chisq[ifo] / chisq_dof[ifo]
chisq_per_dof[ifo][chisq_per_dof[ifo]<1] = 1
snr2 = {ifo : np.real(np.array(snr_dict[ifo]) *
np.array(snr_dict[ifo]).conj()) for ifo in ifos}
coinc_snr2 = sum(snr2.values())
snr2_ratio = {ifo : snr2[ifo] / coinc_snr2 for ifo in ifos}
network_chisq = sum( [chisq_per_dof[ifo] * snr2_ratio[ifo] for ifo in ifos] )
return network_chisq
def pycbc_reweight_snr(network_snr, network_chisq, a = 3, b = 1. / 6.):
"""
Output: reweighted_snr: Reweighted SNR for each trigger
Input: network_snr: Dictoinary of coincident or coherent SNR for each
trigger
network_chisq: A chisq value for each trigger
"""
denom = ((1 + network_chisq)**a) / 2
reweighted_snr = network_snr / denom**b
return reweighted_snr
def reweight_snr_by_null(network_snr, nullsnr):
"""
Output: reweighted_snr: Reweighted SNR for each trigger
Input: network_snr: Dictoinary of coincident, coherent, or reweighted
SNR for each trigger
null: Null snr for each trigger
"""
nullsnr = np.array(nullsnr)
nullsnr[nullsnr <= 4.25] = 4.25
reweighted_snr = network_snr / (nullsnr - 3.25)
return reweighted_snr
def get_weighted_antenna_patterns(Fp_dict, Fc_dict, sigma_dict):
"""
Output: wp: 1 x nifo of the weighted antenna response fuctions to plus
polarisation for each ifo
wc: 1 x nifo of the weighted antenna response fuctions to cross
polarisation for each ifo
Input: Fp_dict: Dictionary of the antenna response fuctions to plus
polarisation for each ifo
Fc_dict: Dictionary of the antenna response fuctions to cross
polarisation for each ifo
sigma_dict: Sigma dictionary for each ifo (sensitivity of each ifo)
"""
#Need the keys to be in alphabetical order
keys = sorted(sigma_dict.keys())
wp = np.array([sigma_dict[ifo]*Fp_dict[ifo] for ifo in keys])
wc = np.array([sigma_dict[ifo]*Fc_dict[ifo] for ifo in keys])
return wp, wc
def get_projection_matrix(wp, wc):
"""
Output: projection_matrix: Projects the data onto the signal space
Input: wp,wc: The weighted antenna response fuctions to plus and cross
polarisations respectively
"""
denominator = np.dot(wp, wp) * np.dot(wc, wc) - np.dot(wp, wc)**2
projection_matrix = (np.dot(wc, wc)*np.outer(wp, wp) +
np.dot(wp, wp)*np.outer(wc, wc) -
np.dot(wp, wc)*(np.outer(wp, wc) +
np.outer(wc, wp))) / denominator
return projection_matrix
def coherent_snr(snr_triggers, index, threshold, projection_matrix,
coinc_snr=[]):
"""
Output: rho_coh: an array of the coherent snr for the detector network
index : Indexes that survive cuts
snrv : Dictionary of individual ifo triggers that survive cuts
coinc_snr: The coincident snr value for triggers surviving the
coherent cut
Inputs: snr_triggers: is a dictionary of the normalised complex snr time
series for each ifo. The keys are the ifos (e.g.
'L1','H1', and 'V1')
index : An array of the indexes you want to analyse. Not used for
calculations, just for book keeping
threshold: Triggers with rho_coh<threshold are cut
projection_matrix: Produced by get_projection_matrix.
coinc_snr: Optional- The coincident snr for each trigger.
"""
#Calculate rho_coh
snr_array = np.array([snr_triggers[ifo]
for ifo in sorted(snr_triggers.keys())])
x = np.inner(snr_array.conj().transpose(),projection_matrix)
rho_coh2 = sum(x.transpose()*snr_array)
rho_coh = np.sqrt(rho_coh2)
#Apply thresholds
index = index[rho_coh > threshold]
if len(coinc_snr) != 0: coinc_snr = coinc_snr[rho_coh > threshold]
snrv = {ifo : snr_triggers[ifo][rho_coh > threshold]
for ifo in snr_triggers.keys()}
rho_coh = rho_coh[rho_coh > threshold]
return rho_coh, index, snrv, coinc_snr
def coincident_snr(snr_dict, index, threshold):
"""
Output: rho_coinc: Coincident snr triggers
index : The subset of input index that survive the cuts
coinc_triggers: Dictionary of individual detector SNRs at
indexes that survive cuts
Input: snr_dict: Dictionary of individual detector SNRs in geocent time
index : Geocent indexes you want to find coinc SNR for
threshold: Indexes with coinc SNR below this threshold are cut
"""
#Restrict the snr timeseries to just the interesting points
coinc_triggers = {ifo : snr_dict[ifo][index] for ifo in snr_dict.keys()}
#Calculate the coincident snr
snr_array = np.array([coinc_triggers[ifo]
for ifo in coinc_triggers.keys()])
rho_coinc = np.sqrt(np.sum(snr_array * snr_array.conj(),axis=0))
#Apply threshold
thresh_indexes = rho_coinc > threshold
index = index[thresh_indexes]
coinc_triggers = {ifo : snr_dict[ifo][index] for ifo in snr_dict.keys()}
rho_coinc = rho_coinc[thresh_indexes]
return rho_coinc, index, coinc_triggers
def null_snr(rho_coh, rho_coinc, null_min=5.25, null_grad=0.2, null_step=20.,
index={}, snrv={}):
"""
Output: null: null snr for surviving triggers
rho_coh: Coherent snr for surviving triggers
rho_coinc: Coincident snr for suviving triggers
index: Indexes for surviving triggers
snrv: Single detector snr for surviving triggers
Input: rho_coh: Numpy array of coherent snr triggers
rho_coinc: Numpy array of coincident snr triggers
null_min: Any trigger with null snr below this is cut
null_grad: Any trigger with null snr<(null_grad*rho_coh+null_min)
is cut
null_step: The value for required for coherent snr to start
increasing the null threshold
index: Optional- Indexes of triggers. If given, will remove
triggers that fail cuts
snrv: Optional- Individual ifo snr for triggers. If given will
remove triggers that fail cut
"""
null2 = rho_coinc**2 - rho_coh**2
# Numerical errors may make this negative and break the sqrt, so set
# negative values to 0.
null2[null2 < 0] = 0
null = null2**0.5
# Make cut on null.
keep1 = np.logical_and(null < null_min, rho_coh <= null_step)
keep2 = np.logical_and(null < (rho_coh * null_grad + null_min),
rho_coh > null_step)
keep = np.logical_or(keep1, keep2)
index = index[keep]
rho_coh = rho_coh[keep]
snrv = {ifo : snrv[ifo][keep] for ifo in snrv.keys()}
rho_coinc = rho_coinc[keep]
null = null[keep]
return null, rho_coh, rho_coinc, index, snrv
def get_coinc_indexes(idx_dict, time_delay_idx):
"""
Output: coinc_idx: list of indexes for triggers in geocent time that
appear in multiple detectors
Input: idx_dict: Dictionary of indexes of triggers above threshold in
each detector
time_delay_idx: Dictionary giving time delay index
(time_delay*sample_rate) for each ifo
"""
coinc_list = np.array([], dtype=int)
for ifo in idx_dict.keys():
"""
Create list of indexes above threshold in single detector in
geocent time. Can then search for triggers that appear in multiple
detectors later.
"""
if len(idx_dict[ifo]) != 0:
coinc_list = np.hstack([coinc_list,
idx_dict[ifo] - time_delay_idx[ifo]])
#Search through coinc_idx for repeated indexes. These must have
#been loud in at least 2 detectors.
coinc_idx = np.unique(coinc_list, return_counts=True)[0][
np.unique(coinc_list, return_counts=True)[1]>1]
return coinc_idx
strain_dict = strain.from_cli_multi_ifos(
opt, opt.instruments, inj_filter_rejector,
dyn_range_fac=DYN_RANGE_FAC
)
strain_segments_dict = strain.StrainSegments.from_cli_multi_ifos(
opt, strain_dict, opt.instruments)
with ctx:
fft.from_cli(opt)
# Set some often used variables for easy access
flow = opt.low_frequency_cutoff
flow_dict = defaultdict(lambda : flow)
for count, ifo in enumerate(opt.instruments):
if count == 0:
sample_rate = strain_dict[ifo].sample_rate
sample_rate_dict = defaultdict(lambda : sample_rate)
flen = strain_segments_dict[ifo].freq_len
flen_dict = defaultdict(lambda : flen)
tlen = strain_segments_dict[ifo].time_len
tlen_dict = defaultdict(lambda : tlen)
delta_f = strain_segments_dict[ifo].delta_f
delta_f_dict = defaultdict(lambda : delta_f)
else:
try:
assert(sample_rate == strain_dict[ifo].sample_rate)
assert(flen == strain_segments_dict[ifo].freq_len)
assert(tlen == strain_segments_dict[ifo].time_len)
assert(delta_f == strain_segments_dict[ifo].delta_f)
except:
err_msg = "Sample rate, frequency length and time length "
err_msg += "must all be consistent across ifos."
raise ValueError(err_msg)
logging.info("Making frequency-domain data segments")
segments = {ifo : strain_segments_dict[ifo].fourier_segments()
for ifo in opt.instruments}
del strain_segments_dict
psd.associate_psds_to_multi_ifo_segments(opt, segments, strain_dict, flen,
delta_f, flow, opt.instruments, dyn_range_factor=DYN_RANGE_FAC,
precision='single')
# Currently we are using the same matched-filter parameters for all ifos.
# Therefore only one MatchedFilterControl needed. Maybe this can change if
# needed. Segments is only used to get tlen etc. which is same for all
# ifos, so just send the first ifo
template_mem = zeros(tlen, dtype=complex64)
#Calculate time delay to each detector
time_delay_idx = {ifo : int(round(pycbc.detector.Detector(ifo).
time_delay_from_earth_center(ra,dec,t_gps) * sample_rate))
for ifo in opt.instruments}
#Matched filter each ifo. Don't cluster here for a coherent search.
#Clustering happens at the end of the template loop.
matched_filter = {ifo : MatchedFilterControl(
opt.low_frequency_cutoff, None, opt.snr_threshold, tlen, delta_f,
complex64, segments[ifo], template_mem, use_cluster=False,
downsample_factor=opt.downsample_factor,
upsample_threshold=opt.upsample_threshold,
upsample_method=opt.upsample_method,
cluster_function='symmetric') for ifo in opt.instruments}
logging.info("Initializing signal-based vetoes.")
# The existing SingleDetPowerChisq can calculate the single detector
# chisq for multiple ifos, so just use that directly.
power_chisq = vetoes.SingleDetPowerChisq(opt.chisq_bins)
# The existing SingleDetBankVeto can calculate the single detector
# bank veto for multiple ifos, so we just use it directly.
bank_chisq = vetoes.SingleDetBankVeto(opt.bank_veto_bank_file, flen,
delta_f, flow, complex64,
phase_order=opt.order,
approximant=opt.approximant)
# Same here
autochisq = vetoes.SingleDetAutoChisq(opt.autochi_stride,
opt.autochi_number_points,
onesided=opt.autochi_onesided)
logging.info("Overwhitening frequency-domain data segments")
for ifo in opt.instruments:
for seg in segments[ifo]:
seg /= seg.psd
ifo_out_types = {
'time_index' : int,
'ifo' : int, # IFO is stored as an int internally!
'snr' : complex64,
'chisq' : float32,
'chisq_dof' : int,
'bank_chisq' : float32,
'bank_chisq_dof' : int,
'cont_chisq' : float32,
}
ifo_out_vals = {
'time_index' : None,
'ifo' : None,
'snr' : None,
'chisq' : None,
'chisq_dof' : None,
'bank_chisq' : None,
'bank_chisq_dof' : None,
'cont_chisq' : None,
}
ifo_names = sorted(ifo_out_vals.keys())
network_out_types = {
'latitude' : float32,
'longitude' : float32,
'time_index' : int,
'coherent_snr' : float32,
'null_snr' : float32,
'nifo' : int,
'reweighted_snr' : float32
}
network_out_vals = {
'latitude' : None,
'longitude' : None,
'time_index' : None,
'coherent_snr' : None,
'null_snr' : None,
'nifo' : None,
'reweighted_snr' : None
}
network_names = sorted(network_out_vals.keys())
event_mgr = events.EventManagerCoherent(
opt, opt.instruments, ifo_names, [ifo_out_types[n] for n in ifo_names],
network_names, [network_out_types[n] for n in network_names]
)
logging.info("Read in template bank")
bank = waveform.FilterBank(opt.bank_file, flen, delta_f, complex64,
low_frequency_cutoff=flow, phase_order=opt.order,
taper=opt.taper_template,
approximant=opt.approximant, out=template_mem)
# Use injfilterrejector to reduce the bank to only those templates that
# might actually find something
ntemplates = len(bank)
nfilters = 0
logging.info("Full template bank size: %s", ntemplates)
for ifo in opt.instruments:
bank.template_thinning(inj_filter_rejector[ifo])
if not len(bank) == ntemplates:
logging.info("Template bank size after thinning: %s", len(bank))
Fp = {} #Antenna patterns
Fc = {}
for ifo in opt.instruments:
Fp[ifo], Fc[ifo] = pycbc.detector.Detector(ifo).antenna_pattern(
ra, dec, polarization=0, t_gps=t_gps)
for t_num, template in enumerate(bank): #Loop over templates
for ifo in opt.instruments:
if opt.cluster_method == "window":
cluster_window = int(opt.cluster_window * sample_rate)
elif opt.cluster_method == "template":
cluster_window = int(template.chirp_length * sample_rate)
elif opt.cluster_window == 0:
cluster_window = int(0)
#Loop over segments
for s_num,stilde in enumerate(segments[opt.instruments[0]]):
stilde = {ifo : segments[ifo][s_num] for ifo in opt.instruments}
# Filter check checks the 'inj_filter_rejector' options to
# determine whether to filter this template/segment
# if injections are present.
analyse_segment = True
for ifo in opt.instruments:
if not inj_filter_rejector[ifo].template_segment_checker(
bank, t_num, stilde[ifo], opt.gps_start_time[ifo]):
logging.info("Skipping segment %d/%d with template %d/%d"
" as no detectable injection is present"
% (s_num + 1, len(segments[ifo]),
t_num + 1, len(bank)))
analyse_segment = False
#Find detector sensitivities (sigma) and make array of normalised
sigmasq = {ifo : template.sigmasq(segments[ifo][s_num].psd)
for ifo in opt.instruments}
sigma = {ifo : np.sqrt(sigmasq[ifo]) for ifo in opt.instruments}
# Every time s_num is zero or we skip the segment, we run new
# template to increment the template index
if s_num==0:
event_mgr.new_template(tmplt=template.params, sigmasq=sigmasq)
if not analyse_segment: continue
logging.info("Analyzing segment %d/%d" % (s_num + 1,
len(segments[ifo])))
snrv_dict = {}
norm_dict = {}
corr_dict = {}
snr_ts={}
idx={}
coinc_idx = np.array([])
ifo_list = opt.instruments[:]
for ifo in opt.instruments:
logging.info("Filtering template %d/%d, ifo %s" %
(t_num + 1, len(bank), ifo))
# No clustering in the coherent search until the end.
# The correlation vector is the FFT of the snr (so inverse FFT
# it to get the snr).
snr_ts[ifo], norm_dict[ifo], corr_dict[ifo], idx[ifo],\
snrv_dict[ifo] = matched_filter[ifo].matched_filter_and_cluster(
s_num, template.sigmasq(stilde[ifo].psd),
window=0
)
#List of ifos with triggers
if len(idx[ifo]) == 0: ifo_list.remove(ifo)
#Move onto next segment if there are no triggers.
if len(ifo_list)==0: continue
#Find the data we need to analyse
analyse_data = {ifo : matched_filter[ifo].segments[s_num].analyze
for ifo in opt.instruments}
#Restrict the snr timeseries to just this section
snr = {ifo : snr_ts[ifo][analyse_data[ifo]] * norm_dict[ifo]
for ifo in opt.instruments}
#Save the indexes of triggers (if we have any)
#Even if we have none, need to keep an empty dictionary.
#Only do this is idx doesn't get time shifted out of the time
#we are looking at.
idx_dict = {ifo : idx[ifo][np.logical_and(
idx[ifo] > time_delay_idx[ifo],idx[ifo] -
time_delay_idx[ifo] < len(snr[ifo]))]
for ifo in opt.instruments}
#Find triggers that are coincident (in geocent time) in multiple ifos.
#If a single ifo analysis then just use the indexes from that ifo.
if len(opt.instruments)>1:
coinc_idx = get_coinc_indexes(idx_dict,time_delay_idx)
else:
coinc_idx = idx_dict[opt.instruments[0]] - time_delay_idx[opt.instruments[0]]
logging.info("Found %s coincident triggers" % str(len(coinc_idx)))
# Number of ifos
nifo = len(opt.instruments)
for ifo in opt.instruments:
# Move on if this segment has no data
if len(snr[ifo])==0:
raise ValueError('The SNR triggers dictionary is empty.\
This should not be possible.')
#Apply time delay
snr[ifo].roll(-time_delay_idx[ifo])
#Calculate the coincident and coherent snr
#Check we have data before we try to compute the coherent snr
if len(coinc_idx) != 0 and nifo > 1:
#Find coinc snr at trigger times and apply coinc snr threshold
rho_coinc, coinc_idx, coinc_triggers =\
coincident_snr(snr, coinc_idx, opt.coinc_threshold)
logging.info("%s points above coincident SNR threshold" % \
str(len(coinc_idx)))
if len(coinc_idx) != 0:
logging.info("Max coincident SNR = %s"
% str(max(rho_coinc)))
# If there is only one ifo, then coinc_triggers is just the
# triggers from ifo
elif len(coinc_idx) != 0 and nifo==1:
coinc_triggers = {opt.instruments[0] : snr[opt.instruments[0]][coinc_idx]}
else:
coinc_triggers = {}
logging.info("No triggers above coincident SNR threshold")
# If we have triggers above coinc threshold and more than 2 ifos
# then calculate the coherent statistics
if len(coinc_idx) != 0 and nifo > 2:
wp,wc=get_weighted_antenna_patterns(Fp,Fc,sigma)
projection_matrix = get_projection_matrix(wp,wc)
rho_coh, coinc_idx, coinc_triggers, rho_coinc = coherent_snr(
coinc_triggers, coinc_idx, opt.coinc_threshold,
projection_matrix, rho_coinc)
logging.info("%s points above coherent threshold"
% str(len(rho_coh)))
if len(coinc_idx) != 0:
logging.info("Max coherent SNR = %s" % str(max(rho_coh)))
#Find the null snr
null, rho_coh, rho_coinc, coinc_idx, coinc_triggers =\
null_snr(rho_coh, rho_coinc, snrv=coinc_triggers,
index=coinc_idx)
if len(coinc_idx) != 0:
logging.info("Max null SNR = %s" % str(max(null)))
logging.info("%s points above null threshold: "
% str(len(null)))
# We are now going to find the individual detector chi2 values.
# To do this it is useful to find the indexes of coinc triggers
# in the detector frame.
if len(coinc_idx) != 0:
coinc_idx_det_frame = {ifo : coinc_idx + time_delay_idx[ifo]
for ifo in opt.instruments}
coherent_ifo_triggers = {ifo : snr[ifo][coinc_idx]
for ifo in opt.instruments}
# Calculate the power and autochi2 values for the coinc indexes
# (this uses the snr timeseries before the time delay, so we
# need to undo it. Same for normalisation)
chisq = {}
chisq_dof = {}
for ifo in opt.instruments:
chisq[ifo], chisq_dof[ifo] = power_chisq.values(
corr_dict[ifo],
coherent_ifo_triggers[ifo] / norm_dict[ifo],
norm_dict[ifo],
stilde[ifo].psd,
coinc_idx + stilde[ifo].analyze.start,
template)
# Calculate network chisq value
network_chisq_dict = network_chisq(chisq, chisq_dof,
coherent_ifo_triggers)
# Calculate chisq reweighted SNR
if nifo > 2:
reweighted_snr = pycbc_reweight_snr(rho_coh,
network_chisq_dict)
# Calculate null reweighted SNR
reweighted_snr = reweight_snr_by_null(reweighted_snr, null)
elif nifo == 2:
reweighted_snr = pycbc_reweight_snr(rho_coinc,
network_chisq_dict)
else:
reweighted_snr = pycbc_reweight_snr(
abs(snr[opt.instruments[0]][coinc_idx]), network_chisq_dict)
# Need all out vals to be the same length. This means the
# entries that are single values need to be repeated once
# per event.
num_events = len(reweighted_snr)
for ifo in opt.instruments:
ifo_out_vals['bank_chisq'], ifo_out_vals['bank_chisq_dof'] =\
bank_chisq.values(template, stilde[ifo].psd, stilde[ifo],
coherent_ifo_triggers[ifo] /
norm_dict[ifo], norm_dict[ifo],
coinc_idx+stilde[ifo].analyze.start)
ifo_out_vals['cont_chisq'] = autochisq.values(
snr[ifo], coinc_idx+stilde[ifo].analyze.start,
template, stilde[ifo].psd, norm_dict[ifo],
stilde=stilde[ifo], low_frequency_cutoff=flow)
ifo_out_vals['chisq'] = chisq[ifo]
ifo_out_vals['chisq_dof'] = chisq_dof[ifo]
ifo_out_vals['time_index'] = coinc_idx_det_frame[ifo] + \
stilde[ifo].cumulative_index
ifo_out_vals['snr'] = coherent_ifo_triggers[ifo]
# IFO is stored as an int
ifo_out_vals['ifo'] = [event_mgr.ifo_dict[ifo]] * num_events
event_mgr.add_template_events_to_ifo(
ifo, ifo_names, [ifo_out_vals[n] for n in ifo_names])
if nifo>2:
network_out_vals['coherent_snr'] = np.real(rho_coh)
network_out_vals['null_snr'] = np.real(null)
elif nifo==2:
network_out_vals['coherent_snr'] = np.real(rho_coinc)
else:
network_out_vals['coherent_snr'] = \
abs(snr[opt.instruments[0]][coinc_idx])
network_out_vals['reweighted_snr'] = reweighted_snr
network_out_vals['time_index'] = \
coinc_idx+stilde[ifo].cumulative_index
network_out_vals['nifo'] = [nifo] * num_events
network_out_vals['latitude'] = [opt.latitude] * num_events
network_out_vals['longitude'] = [opt.longitude] * num_events
event_mgr.add_template_events_to_network( network_names,
[network_out_vals[n] for n in network_names])
event_mgr.cluster_template_network_events(
"time_index",
"reweighted_snr",
cluster_window
)
event_mgr.finalize_template_events()
event_mgr.write_events(opt.output)
logging.info("Finished")
logging.info("Time to complete analysis: %s" % str(time.time() - time_init))
![swh spinner](/static/img/swh-spinner.gif)
Computing file changes ...