From 9586147f71c23d5bdf9f5d11dc00d0965d497f43 Mon Sep 17 00:00:00 2001 From: mbustama <43756071+mbustama@users.noreply.github.com> Date: Tue, 12 Nov 2019 02:02:55 +0100 Subject: [PATCH] Minor changes --- dev-likelihood-sl/full_likelihood.pyx | 12 +++++++----- dev-likelihood-sl/likelihood_analysis_parser.py | 15 ++++++++------- 2 files changed, 15 insertions(+), 12 deletions(-) diff --git a/dev-likelihood-sl/full_likelihood.pyx b/dev-likelihood-sl/full_likelihood.pyx index cdad828..fb8961f 100644 --- a/dev-likelihood-sl/full_likelihood.pyx +++ b/dev-likelihood-sl/full_likelihood.pyx @@ -4,7 +4,7 @@ import sys cimport numpy as np import numpy as np -from libc.math cimport log10 +from libc.math cimport log from libc.math cimport exp from global_tools import Write_Data_File @@ -17,6 +17,8 @@ from global_tools import Write_Data_File # declination_tr, RA_tr, Med_tr = \ # Read_Data_File(os.getcwd()+'/ic_data/data_track.txt') +# ll_den = 273.673 # Log(80!) + cdef double Partial_Likelihood_Showers(int event_index, double gamma, double log10_g, double log10_M, double N_a, double N_conv, double N_pr, @@ -62,7 +64,7 @@ cdef double Partial_Likelihood_Tracks(int event_index, double gamma, return likelihood -def Log10_Likelihood(double gamma, double log10_g, double log10_M, +def Log_Likelihood(double gamma, double log10_g, double log10_M, double N_a, double N_conv, double N_pr, double N_mu, list interp_astro_pdf_sh, list pdf_atm_conv_sh, list pdf_atm_pr_sh, list interp_astro_pdf_tr, list pdf_atm_conv_tr, list pdf_atm_pr_tr, @@ -73,7 +75,7 @@ def Log10_Likelihood(double gamma, double log10_g, double log10_M, cdef double fl_sh cdef double fl_tr - cdef double log10_likelihood + cdef double log_likelihood cdef int i # Showers @@ -88,9 +90,9 @@ def Log10_Likelihood(double gamma, double log10_g, double log10_M, for i in range(num_ic_tr)]) - log10_likelihood = log10(exp(-N_a-N_conv-N_pr-N_mu)*fl_sh*fl_tr) + log_likelihood = log(exp(-N_a-N_conv-N_pr-N_mu)*fl_sh*fl_tr) - return log10_likelihood + return log_likelihood diff --git a/dev-likelihood-sl/likelihood_analysis_parser.py b/dev-likelihood-sl/likelihood_analysis_parser.py index 23fc096..266f644 100644 --- a/dev-likelihood-sl/likelihood_analysis_parser.py +++ b/dev-likelihood-sl/likelihood_analysis_parser.py @@ -8,7 +8,7 @@ from interp_atm_pdf import Initialize_Atmospheric_PDFs from interp_astro_pdf import Initialize_Interpolator_Astrophysical_PDF -from full_likelihood import Log10_Likelihood +from full_likelihood import Log_Likelihood # Recommended run: # python likelihood_analysis_parser.py --verbose=1 --n_live_points=200 --evidence_tolerance=0.01 @@ -41,7 +41,7 @@ def Prior(cube, ndim, nparams): # Spectral index. Uniform prior between 1.8 and 3. - cube[0] = 1.8+cube[0]*1.2 + cube[0] = 2.0+cube[0] #1.8+cube[0]*1.2 # Log10 of mass of mediator [GeV]. Log uniform prior between -3.0 and -1.0. cube[1] = -3.0+2.0*cube[1] @@ -64,7 +64,7 @@ def Prior(cube, ndim, nparams): return 0 -def Log10_Likelihood_MultiNest(cube, ndim, nparams): +def Log_Likelihood_MultiNest(cube, ndim, nparams): gamma = cube[0] log10_g = cube[1] @@ -74,7 +74,7 @@ def Log10_Likelihood_MultiNest(cube, ndim, nparams): N_pr = cube[5] N_mu = cube[6] - ll = Log10_Likelihood(gamma, log10_g, log10_M, N_a, N_conv, N_pr, N_mu, + ll = Log_Likelihood(gamma, log10_g, log10_M, N_a, N_conv, N_pr, N_mu, interp_astro_pdf_sh, pdf_atm_conv_sh, pdf_atm_pr_sh, interp_astro_pdf_tr, pdf_atm_conv_tr, pdf_atm_pr_tr, pdf_atm_muon_tr, num_ic_sh=58, num_ic_tr=22, verbose=verbose) @@ -97,10 +97,11 @@ def Log10_Likelihood_MultiNest(cube, ndim, nparams): # Run MultiNest -pymultinest.run(Log10_Likelihood_MultiNest, Prior, n_params, +pymultinest.run(Log_Likelihood_MultiNest, Prior, n_params, outputfiles_basename='out/likelihood/', - resume=False, verbose=verbose, n_live_points=n_live_points, - seed=1, evidence_tolerance=evidence_tolerance, + resume=resume, verbose=verbose, n_live_points=n_live_points, + seed=-1, evidence_tolerance=evidence_tolerance, + sampling_efficiency=0.8, importance_nested_sampling=True, log_zero=-300.0) # const_efficiency_mode=True, sampling_efficiency=1)