Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion configFile.ini
Original file line number Diff line number Diff line change
Expand Up @@ -20,4 +20,4 @@ writeText = True
[gracedb]
gracedb_url = https://gracedb.ligo.org/api/
tagnames = em_follow lvem
gdbwrite = True
gdbwrite = False
2 changes: 1 addition & 1 deletion genDiskMassProbability.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,4 +98,4 @@ def computeEMBrightProb(self):
'''
remMass = self.computeRemMass()
prob = (self.mPrimary < self.max_ns_g_mass)*(self.mSecondary < self.max_ns_g_mass) + (remMass > self.threshold)
return 100.0*np.sum(prob > 0.) / len(prob)
return prob
24 changes: 20 additions & 4 deletions getEllipsoidSamples.py
Original file line number Diff line number Diff line change
Expand Up @@ -220,6 +220,8 @@ def getSamples(graceid, mass1, mass2, chi1, network_snr, samples, PSD, fmin=30,
NN_total = 0
cart_grid = [[0., 0., 0.]]
sph_grid = [[0., 0., 0.]]
likelihood_grid = [1.] ## Koh Ueno added

while NN < Nrandpts:
NN_total += 1
r = np.random.rand()
Expand All @@ -235,21 +237,35 @@ def getSamples(graceid, mass1, mass2, chi1, network_snr, samples, PSD, fmin=30,
### CHECK ####
cart_grid_point = [x1, x2, x3]
cart_grid_point = np.array(np.real( np.dot(rot, cart_grid_point)) )

rand_Mc = cart_grid_point[0] * lal.MSUN_SI + McSIG # Mc (kg)
rand_eta = cart_grid_point[1] + etaSIG # eta
rand_chi = cart_grid_point[2] + chiSIG
### CHECK ####


### Koh Ueno: I added the followings to introduce the correct weight for each sample.
### I think we can do this computation only after "if joint_condition"
### if you want to make the computational cost as small as possible.
### I am wondering if we can use the modified "gam"
### (i.e., the gam which gam_prior is added to)
### for the likelihood computation.
diff_rand_sample = [(rand_Mc - McSIG) / lal.MSUN_SI, rand_eta - etaSIG, rand_chi - chiSIG]
likelihood_tmp = np.dot( np.dot(gam, diff_rand_sample), diff_rand_sample)
likelihood = np.exp( -0.5 * network_snr**2 * likelihood_tmp.item(0) )
### Koh Ueno: ends

condition1 = rand_eta > 0
condition2 = rand_eta <= 0.25
condition3 = np.abs(rand_chi) < 1.0
joint_condition = condition1 * condition2 * condition3
if joint_condition:
cart_grid.append( [cart_grid_point[0], cart_grid_point[1], cart_grid_point[2]] ) ## CHECK
likelihood_grid.append(likelihood) ### Koh Ueno added
NN += 1
cart_grid = np.array(cart_grid)
sph_grid = np.array(sph_grid)
likelihood_grid = np.array(likelihood_grid)

if logFile:
log.writelines(str(datetime.datetime.today()) + '\t' + 'Selected ' + str(NN) + ' points from ' + str(NN_total) + ' random samples within the ellipsoid \n')
else:
Expand All @@ -266,7 +282,7 @@ def getSamples(graceid, mass1, mass2, chi1, network_snr, samples, PSD, fmin=30,
rand_Mcs = rand_dMcs_MSUN * lal.MSUN_SI + McSIG # Mc (kg)
rand_etas = rand_detas + etaSIG # eta
rand_chis = rand_dChis + chiSIG

# Prune points with unphysical values of eta from cart_grid
rand_etas = np.array(map(partial(lsu.sanitize_eta, exception=np.NAN), rand_etas))
cart_grid = np.transpose((rand_Mcs,rand_etas,rand_chis)) #### CHECK ####
Expand Down Expand Up @@ -330,7 +346,7 @@ def getSamples(graceid, mass1, mass2, chi1, network_snr, samples, PSD, fmin=30,
if show:
pl.show()

return outgrid
return outgrid, likelihood_grid # KU



Expand Down
10 changes: 5 additions & 5 deletions sourceClassDriver.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,17 +146,17 @@ def readCoinc(CoincFile):
File.writelines(graceid + '\t' + str(mass1) + '\t' + str(mass2) + '\t' + str(chi1) + '\t' + str(snr) + '\n')
File.close()

samples_sngl = getSamples(graceid, mass1, mass2, chi1, snr, ellipsoidSample, {ifo + '=' + psd_path + '/psd_' + graceid + '.xml.gz'}, fmin=f_low, NMcs=10, NEtas=10, NChis=10, mass1_cut=mass1_cut, chi1_cut=chi1_cut, lowMass_approx=lowMass_approx, highMass_approx=highMass_approx, Forced=forced, logFile=logFileName, saveData=True)
samples_sngl, likelihood = getSamples(graceid, mass1, mass2, chi1, snr, ellipsoidSample, {ifo + '=' + psd_path + '/psd_' + graceid + '.xml.gz'}, fmin=f_low, NMcs=10, NEtas=10, NChis=10, mass1_cut=mass1_cut, chi1_cut=chi1_cut, lowMass_approx=lowMass_approx, highMass_approx=highMass_approx, Forced=forced, logFile=logFileName, saveData=True)
log.writelines(str(datetime.datetime.today()) + '\t' + 'Created ambiguity ellipsoid samples\n')

### Currently NaNs are generated when the ellipsoid generation failed. This will be changed in subsequent version. ###
if ~np.any( np.isnan(samples_sngl[0]) ):
diskMassObject_sngl = genDiskMassProbability.genDiskMass(samples_sngl, 'test', remMassThreshold)
[NS_prob_1_sngl, NS_prob_2_sngl, diskMass_sngl] = diskMassObject_sngl.fromEllipsoidSample()
#em_bright_prob_sngl = np.sum((diskMass_sngl > 0.)*100./len(diskMass_sngl))
em_bright_prob_sngl = diskMassObject_sngl.computeEMBrightProb() # RE: Probability using new EM bright boundary
#NS_prob_1_sngl, NS_prob_2_sngl, em_bright_prob_sngl = diskMassObject_sngl.computeEMBrightProb()

prob = diskMassObject_sngl.computeEMBrightProb() # RE: Probability using new EM bright boundary
likelihood_br = likelihood[np.where(prob > 0.)] # KU
em_bright_prob_sngl_old = np.sum(prob > 0.) / len(prob)
em_bright_prob_sngl_new = np.sum(likelihood_br) / np.sum(likelihood)
else:
log.writelines(str(datetime.datetime.today()) + '\t' + 'Return was NaNs\n')
[NS_prob_2_sngl, em_bright_prob_sngl] = [0., 0.]
Expand Down