Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Store evidence #156

Open
wants to merge 5 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
4 changes: 4 additions & 0 deletions ompy/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -558,6 +558,10 @@ class ResultsNormalized(Model):
#: (List[Dict[str, Any]]): Samples from the posterior of the parameters
samples: List[Dict[str, Any]] = field(default_factory=list,
metadata='Samples from the posterior of the parameters') # noqa
#: (Tuple of List[Tuple]): Evidence and error in evidence for model
evidence: Union[Tuple[float, float], List[Tuple[float, float]]] \
= field(default_factory=list,
metadata='Global evidence for the model')
#: (List[Callable[..., Any]]): nld model for each nld
nld_model: List[Callable[..., Any]] = field(default_factory=list,
metadata='nld model') # noqa
Expand Down
6 changes: 4 additions & 2 deletions ompy/normalizer_nld.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,7 @@ def normalize(self, *, limit_low: Optional[Tuple[float, float]] = None,
# Use DE to get an inital guess before optimizing
args, guess = self.initial_guess(limit_low, limit_high)
# Optimize using multinest
popt, samples = self.optimize(num, args, guess)
popt, samples, evidence = self.optimize(num, args, guess)

transformed = nld.transform(popt['A'][0], popt['alpha'][0],
inplace=False)
Expand All @@ -223,6 +223,7 @@ def normalize(self, *, limit_low: Optional[Tuple[float, float]] = None,
self.res.nld = transformed
self.res.pars = popt
self.res.samples = samples
self.res.evidence = evidence
ext_model = lambda E: self.model(E, T=popt['T'][0],
Eshift=popt['Eshift'][0])
self.res.nld_model = ext_model
Expand Down Expand Up @@ -377,6 +378,7 @@ def loglike(cube, ndim, nparams):
outputfiles_basename=str(path))

stats = analyzer.get_stats()
evidence = (stats['global evidence'], stats['global evidence error'])

samples = analyzer.get_equal_weighted_posterior()[:, :-1]
samples = dict(zip(names, samples.T))
Expand All @@ -397,7 +399,7 @@ def loglike(cube, ndim, nparams):
self.LOG.info("Multinest results:\n%s", tt.to_string([vals],
header=['A', 'α [MeV⁻¹]', 'T [MeV]', 'Eshift [MeV]']))

return popt, samples
return popt, samples, evidence

def plot(self, *, ax: Any = None,
add_label: bool = True,
Expand Down
6 changes: 4 additions & 2 deletions ompy/normalizer_simultan.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,10 +155,11 @@ def normalize(self, *, num: int = 0,
# Use DE to get an inital guess before optimizing
args_nld, guess = self.initial_guess()
# Optimize using multinest
popt, samples = self.optimize(num, args_nld, guess)
popt, samples, evidence = self.optimize(num, args_nld, guess)

self.res.pars = popt
self.res.samples = samples
self.res.evidence = evidence

# reset
if self.std_fake_nld is True:
Expand Down Expand Up @@ -318,6 +319,7 @@ def loglike(cube, ndim, nparams):
outputfiles_basename=str(path))

stats = analyzer.get_stats()
evidence = (stats['global evidence'], stats['global evidence error'])

samples = analyzer.get_equal_weighted_posterior()[:, :-1]
samples = dict(zip(names, samples.T))
Expand All @@ -342,7 +344,7 @@ def loglike(cube, ndim, nparams):
# reset state
self.normalizer_gsf.norm_pars = norm_pars_org

return popt, samples
return popt, samples, evidence

def lnlike(self, x: Tuple[float, float, float, float, float],
args_nld: Iterable) -> float:
Expand Down
2 changes: 2 additions & 0 deletions release_note.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ Added:

Changed:
- Fixed a bug where the `std` attribute of `Vector` was not saved to file.
- Added `evidence` attribute to the `ResultsNormalized` class such that the global evidence of the Bayesian fit
are stored with the results.
- Reimplemented PPF for normal distribution and truncated normal distribution in C++ for improved performance (about 300% faster than the SciPy implementation!).

Deprecated:
Expand Down