Coverage for bilby/core/sampler/pymc.py: 10%
430 statements
« prev ^ index » next coverage.py v7.6.1, created at 2025-05-06 04:57 +0000
« prev ^ index » next coverage.py v7.6.1, created at 2025-05-06 04:57 +0000
1import numpy as np
3from ...gw.likelihood import BasicGravitationalWaveTransient, GravitationalWaveTransient
4from ..likelihood import (
5 ExponentialLikelihood,
6 GaussianLikelihood,
7 PoissonLikelihood,
8 StudentTLikelihood,
9)
10from ..prior import Cosine, DeltaFunction, MultivariateGaussian, PowerLaw, Sine
11from ..utils import derivatives, infer_args_from_method
12from .base_sampler import MCMCSampler
15class Pymc(MCMCSampler):
16 """bilby wrapper of the PyMC sampler (https://www.pymc.io/)
18 All keyword arguments (i.e., the kwargs) passed to `run_sampler` will be
19 propapated to `pymc.sample` where appropriate, see documentation for that
20 class for further help. Under Other Parameters, we list commonly used
21 kwargs and the bilby, or where appropriate, PyMC defaults.
23 Parameters
24 ==========
25 draws: int, (1000)
26 The number of sample draws from the posterior per chain.
27 chains: int, (2)
28 The number of independent MCMC chains to run.
29 cores: int, (1)
30 The number of CPU cores to use.
31 tune: int, (500)
32 The number of tuning (or burn-in) samples per chain.
33 discard_tuned_samples: bool, True
34 Set whether to automatically discard the tuning samples from the final
35 chains.
36 step: str, dict
37 Provide a step method name, or dictionary of step method names keyed to
38 particular variable names (these are case insensitive). If passing a
39 dictionary of methods, the values keyed on particular variables can be
40 lists of methods to form compound steps. If no method is provided for
41 any particular variable then PyMC will automatically decide upon a
42 default, with the first option being the NUTS sampler. The currently
43 allowed methods are 'NUTS', 'HamiltonianMC', 'Metropolis',
44 'BinaryMetropolis', 'BinaryGibbsMetropolis', 'Slice', and
45 'CategoricalGibbsMetropolis'. Note: you cannot provide a PyMC step
46 method function itself here as it is outside of the model context
47 manager.
48 step_kwargs: dict
49 Options for steps methods other than NUTS. The dictionary is keyed on
50 lowercase step method names with values being dictionaries of keywords
51 for the given step method.
53 """
55 sampler_name = "pymc"
56 default_kwargs = dict(
57 draws=500,
58 step=None,
59 init="auto",
60 n_init=200000,
61 initvals=None,
62 trace=None,
63 chains=2,
64 cores=1,
65 tune=500,
66 progressbar=True,
67 model=None,
68 random_seed=None,
69 discard_tuned_samples=True,
70 compute_convergence_checks=True,
71 nuts_kwargs=None,
72 step_kwargs=None,
73 )
75 default_nuts_kwargs = dict(
76 target_accept=None,
77 max_treedepth=None,
78 step_scale=None,
79 Emax=None,
80 gamma=None,
81 k=None,
82 t0=None,
83 adapt_step_size=None,
84 early_max_treedepth=None,
85 scaling=None,
86 is_cov=None,
87 potential=None,
88 )
90 default_kwargs.update(default_nuts_kwargs)
92 sampling_seed_key = "random_seed"
94 def __init__(
95 self,
96 likelihood,
97 priors,
98 outdir="outdir",
99 label="label",
100 use_ratio=False,
101 plot=False,
102 skip_import_verification=False,
103 **kwargs,
104 ):
105 # add default step kwargs
106 _, STEP_METHODS, _ = self._import_external_sampler()
107 self.default_step_kwargs = {m.__name__.lower(): None for m in STEP_METHODS}
108 self.default_kwargs.update(self.default_step_kwargs)
110 super(Pymc, self).__init__(
111 likelihood=likelihood,
112 priors=priors,
113 outdir=outdir,
114 label=label,
115 use_ratio=use_ratio,
116 plot=plot,
117 skip_import_verification=skip_import_verification,
118 **kwargs,
119 )
120 self.draws = self._kwargs["draws"]
121 self.chains = self._kwargs["chains"]
123 @staticmethod
124 def _import_external_sampler():
125 import pymc
126 from pymc import floatX
127 from pymc.step_methods import STEP_METHODS
129 return pymc, STEP_METHODS, floatX
131 @staticmethod
132 def _import_tensor():
133 try:
134 import pytensor as tensor # noqa
135 import pytensor.tensor as tt
136 from pytensor.compile.ops import as_op # noqa
137 except ImportError:
138 import aesara as tensor # noqa
139 import aesara.tensor as tt
140 from aesara.compile.ops import as_op # noqa
142 return tensor, tt, as_op
144 def _verify_parameters(self):
145 """
146 Change `_verify_parameters()` to just pass, i.e., don't try and
147 evaluate the likelihood for PyMC.
148 """
149 pass
151 def _verify_use_ratio(self):
152 """
153 Change `_verify_use_ratio() to just pass.
154 """
155 pass
157 def setup_prior_mapping(self):
158 """
159 Set the mapping between predefined bilby priors and the equivalent
160 PyMC distributions.
161 """
163 prior_map = {}
164 self.prior_map = prior_map
166 # predefined PyMC distributions
167 prior_map["Gaussian"] = {
168 "pymc": "Normal",
169 "argmap": {"mu": "mu", "sigma": "sigma"},
170 }
171 prior_map["TruncatedGaussian"] = {
172 "pymc": "TruncatedNormal",
173 "argmap": {
174 "mu": "mu",
175 "sigma": "sigma",
176 "minimum": "lower",
177 "maximum": "upper",
178 },
179 }
180 prior_map["HalfGaussian"] = {"pymc": "HalfNormal", "argmap": {"sigma": "sigma"}}
181 prior_map["Uniform"] = {
182 "pymc": "Uniform",
183 "argmap": {"minimum": "lower", "maximum": "upper"},
184 }
185 prior_map["LogNormal"] = {
186 "pymc": "Lognormal",
187 "argmap": {"mu": "mu", "sigma": "sigma"},
188 }
189 prior_map["Exponential"] = {
190 "pymc": "Exponential",
191 "argmap": {"mu": "lam"},
192 "argtransform": {"mu": lambda mu: 1.0 / mu},
193 }
194 prior_map["StudentT"] = {
195 "pymc": "StudentT",
196 "argmap": {"df": "nu", "mu": "mu", "scale": "sigma"},
197 }
198 prior_map["Beta"] = {
199 "pymc": "Beta",
200 "argmap": {"alpha": "alpha", "beta": "beta"},
201 }
202 prior_map["Logistic"] = {
203 "pymc": "Logistic",
204 "argmap": {"mu": "mu", "scale": "s"},
205 }
206 prior_map["Cauchy"] = {
207 "pymc": "Cauchy",
208 "argmap": {"alpha": "alpha", "beta": "beta"},
209 }
210 prior_map["Gamma"] = {
211 "pymc": "Gamma",
212 "argmap": {"k": "alpha", "theta": "beta"},
213 "argtransform": {"theta": lambda theta: 1.0 / theta},
214 }
215 prior_map["ChiSquared"] = {"pymc": "ChiSquared", "argmap": {"nu": "nu"}}
216 prior_map["Interped"] = {
217 "pymc": "Interpolated",
218 "argmap": {"xx": "x_points", "yy": "pdf_points"},
219 }
220 prior_map["Normal"] = prior_map["Gaussian"]
221 prior_map["TruncatedNormal"] = prior_map["TruncatedGaussian"]
222 prior_map["HalfNormal"] = prior_map["HalfGaussian"]
223 prior_map["LogGaussian"] = prior_map["LogNormal"]
224 prior_map["Lorentzian"] = prior_map["Cauchy"]
225 prior_map["FromFile"] = prior_map["Interped"]
227 # GW specific priors
228 prior_map["UniformComovingVolume"] = prior_map["Interped"]
230 # internally defined mappings for bilby priors
231 prior_map["DeltaFunction"] = {"internal": self._deltafunction_prior}
232 prior_map["Sine"] = {"internal": self._sine_prior}
233 prior_map["Cosine"] = {"internal": self._cosine_prior}
234 prior_map["PowerLaw"] = {"internal": self._powerlaw_prior}
235 prior_map["LogUniform"] = {"internal": self._powerlaw_prior}
236 prior_map["MultivariateGaussian"] = {
237 "internal": self._multivariate_normal_prior
238 }
239 prior_map["MultivariateNormal"] = {"internal": self._multivariate_normal_prior}
241 def _deltafunction_prior(self, key, **kwargs):
242 """
243 Map the bilby delta function prior to a single value for PyMC.
244 """
246 # check prior is a DeltaFunction
247 if isinstance(self.priors[key], DeltaFunction):
248 return self.priors[key].peak
249 else:
250 raise ValueError(f"Prior for '{key}' is not a DeltaFunction")
252 def _sine_prior(self, key):
253 """
254 Map the bilby Sine prior to a PyMC style function
255 """
257 # check prior is a Sine
258 pymc, _, floatX = self._import_external_sampler()
259 _, tt, _ = self._import_tensor()
260 if isinstance(self.priors[key], Sine):
262 class PymcSine(pymc.Continuous):
263 def __init__(self, lower=0.0, upper=np.pi):
264 if lower >= upper:
265 raise ValueError("Lower bound is above upper bound!")
267 # set the mode
268 self.lower = lower = tt.as_tensor_variable(floatX(lower))
269 self.upper = upper = tt.as_tensor_variable(floatX(upper))
270 self.norm = tt.cos(lower) - tt.cos(upper)
271 self.mean = (
272 tt.sin(upper)
273 + lower * tt.cos(lower)
274 - tt.sin(lower)
275 - upper * tt.cos(upper)
276 ) / self.norm
278 transform = pymc.distributions.transforms.interval(lower, upper)
280 super(PymcSine, self).__init__(transform=transform)
282 def logp(self, value):
283 upper = self.upper
284 lower = self.lower
285 return pymc.distributions.dist_math.bound(
286 tt.log(tt.sin(value) / self.norm),
287 lower <= value,
288 value <= upper,
289 )
291 return PymcSine(
292 key, lower=self.priors[key].minimum, upper=self.priors[key].maximum
293 )
294 else:
295 raise ValueError(f"Prior for '{key}' is not a Sine")
297 def _cosine_prior(self, key):
298 """
299 Map the bilby Cosine prior to a PyMC style function
300 """
302 # check prior is a Cosine
303 pymc, _, floatX = self._import_external_sampler()
304 _, tt, _ = self._import_tensor()
305 if isinstance(self.priors[key], Cosine):
307 class PymcCosine(pymc.Continuous):
308 def __init__(self, lower=-np.pi / 2.0, upper=np.pi / 2.0):
309 if lower >= upper:
310 raise ValueError("Lower bound is above upper bound!")
312 self.lower = lower = tt.as_tensor_variable(floatX(lower))
313 self.upper = upper = tt.as_tensor_variable(floatX(upper))
314 self.norm = tt.sin(upper) - tt.sin(lower)
315 self.mean = (
316 upper * tt.sin(upper)
317 + tt.cos(upper)
318 - lower * tt.sin(lower)
319 - tt.cos(lower)
320 ) / self.norm
322 transform = pymc.distributions.transforms.interval(lower, upper)
324 super(PymcCosine, self).__init__(transform=transform)
326 def logp(self, value):
327 upper = self.upper
328 lower = self.lower
329 return pymc.distributions.dist_math.bound(
330 tt.log(tt.cos(value) / self.norm),
331 lower <= value,
332 value <= upper,
333 )
335 return PymcCosine(
336 key, lower=self.priors[key].minimum, upper=self.priors[key].maximum
337 )
338 else:
339 raise ValueError(f"Prior for '{key}' is not a Cosine")
341 def _powerlaw_prior(self, key):
342 """
343 Map the bilby PowerLaw prior to a PyMC style function
344 """
346 # check prior is a PowerLaw
347 pymc, _, floatX = self._import_external_sampler()
348 _, tt, _ = self._import_tensor()
349 if isinstance(self.priors[key], PowerLaw):
351 # check power law is set
352 if not hasattr(self.priors[key], "alpha"):
353 raise AttributeError("No 'alpha' attribute set for PowerLaw prior")
355 if self.priors[key].alpha < -1.0:
356 # use Pareto distribution
357 palpha = -(1.0 + self.priors[key].alpha)
359 return pymc.Bound(pymc.Pareto, upper=self.priors[key].minimum)(
360 key, alpha=palpha, m=self.priors[key].maximum
361 )
362 else:
364 class PymcPowerLaw(pymc.Continuous):
365 def __init__(self, lower, upper, alpha, testval=1):
366 falpha = alpha
367 self.lower = lower = tt.as_tensor_variable(floatX(lower))
368 self.upper = upper = tt.as_tensor_variable(floatX(upper))
369 self.alpha = alpha = tt.as_tensor_variable(floatX(alpha))
371 if falpha == -1:
372 self.norm = 1.0 / (tt.log(self.upper / self.lower))
373 else:
374 beta = 1.0 + self.alpha
375 self.norm = 1.0 / (
376 beta
377 * (tt.pow(self.upper, beta) - tt.pow(self.lower, beta))
378 )
380 transform = pymc.distributions.transforms.interval(lower, upper)
382 super(PymcPowerLaw, self).__init__(
383 transform=transform, testval=testval
384 )
386 def logp(self, value):
387 upper = self.upper
388 lower = self.lower
389 alpha = self.alpha
391 return pymc.distributions.dist_math.bound(
392 alpha * tt.log(value) + tt.log(self.norm),
393 lower <= value,
394 value <= upper,
395 )
397 return PymcPowerLaw(
398 key,
399 lower=self.priors[key].minimum,
400 upper=self.priors[key].maximum,
401 alpha=self.priors[key].alpha,
402 )
403 else:
404 raise ValueError(f"Prior for '{key}' is not a Power Law")
406 def _multivariate_normal_prior(self, key):
407 """
408 Map the bilby MultivariateNormal prior to a PyMC style function.
409 """
411 # check prior is a PowerLaw
412 pymc, _, _ = self._import_external_sampler()
413 if isinstance(self.priors[key], MultivariateGaussian):
414 # get names of multivariate Gaussian parameters
415 mvpars = self.priors[key].mvg.names
417 # set the prior on multiple parameters if not present yet
418 if not np.all([p in self.multivariate_normal_sets for p in mvpars]):
419 mvg = self.priors[key].mvg
421 # get bounds
422 lower = [bound[0] for bound in mvg.bounds.values()]
423 upper = [bound[1] for bound in mvg.bounds.values()]
425 # test values required for mixture
426 testvals = []
427 for bound in mvg.bounds.values():
428 if np.isinf(bound[0]) and np.isinf(bound[1]):
429 testvals.append(0.0)
430 elif np.isinf(bound[0]):
431 testvals.append(bound[1] - 1.0)
432 elif np.isinf(bound[1]):
433 testvals.append(bound[0] + 1.0)
434 else:
435 # half-way between the two bounds
436 testvals.append(bound[0] + (bound[1] - bound[0]) / 2.0)
438 # if bounds are at +/-infinity set to 100 sigmas as infinities
439 # cause problems for the Bound class
440 maxmu = np.max(mvg.mus, axis=0)
441 minmu = np.min(mvg.mus, axis=0)
442 maxsigma = np.max(mvg.sigmas, axis=0)
443 for i in range(len(mvpars)):
444 if np.isinf(lower[i]):
445 lower[i] = minmu[i] - 100.0 * maxsigma[i]
446 if np.isinf(upper[i]):
447 upper[i] = maxmu[i] + 100.0 * maxsigma[i]
449 # create a bounded MultivariateNormal distribution
450 BoundedMvN = pymc.Bound(pymc.MvNormal, lower=lower, upper=upper)
452 comp_dists = [] # list of any component modes
453 for i in range(mvg.nmodes):
454 comp_dists.append(
455 BoundedMvN(
456 f"comp{i}",
457 mu=mvg.mus[i],
458 cov=mvg.covs[i],
459 shape=len(mvpars),
460 ).distribution
461 )
463 # create a Mixture model
464 setname = f"mixture{self.multivariate_normal_num_sets}"
465 mix = pymc.Mixture(
466 setname,
467 w=mvg.weights,
468 comp_dists=comp_dists,
469 shape=len(mvpars),
470 testval=testvals,
471 )
473 for i, p in enumerate(mvpars):
474 self.multivariate_normal_sets[p] = {}
475 self.multivariate_normal_sets[p]["prior"] = mix[i]
476 self.multivariate_normal_sets[p]["set"] = setname
477 self.multivariate_normal_sets[p]["index"] = i
479 self.multivariate_normal_num_sets += 1
481 # return required parameter
482 return self.multivariate_normal_sets[key]["prior"]
484 else:
485 raise ValueError(f"Prior for '{key}' is not a MultivariateGaussian")
487 def run_sampler(self):
488 # set the step method
489 pymc, STEP_METHODS, floatX = self._import_external_sampler()
490 step_methods = {m.__name__.lower(): m.__name__ for m in STEP_METHODS}
491 if "step" in self._kwargs:
492 self.step_method = self._kwargs.pop("step")
494 # 'step' could be a dictionary of methods for different parameters,
495 # so check for this
496 if self.step_method is None:
497 pass
498 elif isinstance(self.step_method, dict):
499 for key in self.step_method:
500 if key not in self._search_parameter_keys:
501 raise ValueError(
502 f"Setting a step method for an unknown parameter '{key}'"
503 )
504 else:
505 # check if using a compound step (a list of step
506 # methods for a particular parameter)
507 if isinstance(self.step_method[key], list):
508 sms = self.step_method[key]
509 else:
510 sms = [self.step_method[key]]
511 for sm in sms:
512 if sm.lower() not in step_methods:
513 raise ValueError(
514 f"Using invalid step method '{self.step_method[key]}'"
515 )
516 else:
517 # check if using a compound step (a list of step
518 # methods for a particular parameter)
519 if isinstance(self.step_method, list):
520 sms = self.step_method
521 else:
522 sms = [self.step_method]
524 for i in range(len(sms)):
525 if sms[i].lower() not in step_methods:
526 raise ValueError(f"Using invalid step method '{sms[i]}'")
527 else:
528 self.step_method = None
530 # initialise the PyMC model
531 self.pymc_model = pymc.Model()
533 # set the prior
534 self.set_prior()
536 # if a custom log_likelihood function requires a `sampler` argument
537 # then use that log_likelihood function, with the assumption that it
538 # takes in a Pymc Sampler, with a pymc_model attribute, and defines
539 # the likelihood within that context manager
540 likeargs = infer_args_from_method(self.likelihood.log_likelihood)
541 if "sampler" in likeargs:
542 self.likelihood.log_likelihood(sampler=self)
543 else:
544 # set the likelihood function from predefined functions
545 self.set_likelihood()
547 # get the step method keyword arguments
548 step_kwargs = self.kwargs.pop("step_kwargs")
549 if step_kwargs is not None:
550 # remove all individual default step kwargs if passed together using
551 # step_kwargs keywords
552 for key in self.default_step_kwargs:
553 self.kwargs.pop(key)
554 else:
555 # remove any None default step keywords and place others in step_kwargs
556 step_kwargs = {}
557 for key in self.default_step_kwargs:
558 if self.kwargs[key] is None:
559 self.kwargs.pop(key)
560 else:
561 step_kwargs[key] = self.kwargs.pop(key)
563 nuts_kwargs = self.kwargs.pop("nuts_kwargs")
564 if nuts_kwargs is not None:
565 # remove all individual default nuts kwargs if passed together using
566 # nuts_kwargs keywords
567 for key in self.default_nuts_kwargs:
568 self.kwargs.pop(key)
569 else:
570 # remove any None default nuts keywords and place others in nut_kwargs
571 nuts_kwargs = {}
572 for key in self.default_nuts_kwargs:
573 if self.kwargs[key] is None:
574 self.kwargs.pop(key)
575 else:
576 nuts_kwargs[key] = self.kwargs.pop(key)
577 methodslist = []
579 # set the step method
580 if isinstance(self.step_method, dict):
581 # create list of step methods (any not given will default to NUTS)
582 self.kwargs["step"] = []
583 with self.pymc_model:
584 for key in self.step_method:
585 # check for a compound step list
586 if isinstance(self.step_method[key], list):
587 for sms in self.step_method[key]:
588 curmethod = sms.lower()
589 methodslist.append(curmethod)
590 nuts_kwargs = self._create_nuts_kwargs(
591 curmethod,
592 key,
593 nuts_kwargs,
594 pymc,
595 step_kwargs,
596 step_methods,
597 )
598 else:
599 curmethod = self.step_method[key].lower()
600 methodslist.append(curmethod)
601 nuts_kwargs = self._create_nuts_kwargs(
602 curmethod,
603 key,
604 nuts_kwargs,
605 pymc,
606 step_kwargs,
607 step_methods,
608 )
609 else:
610 with self.pymc_model:
611 # check for a compound step list
612 if isinstance(self.step_method, list):
613 compound = []
614 for sms in self.step_method:
615 curmethod = sms.lower()
616 methodslist.append(curmethod)
617 args, nuts_kwargs = self._create_args_and_nuts_kwargs(
618 curmethod, nuts_kwargs, step_kwargs
619 )
620 compound.append(pymc.__dict__[step_methods[curmethod]](**args))
621 self.kwargs["step"] = compound
622 else:
623 self.kwargs["step"] = None
624 if self.step_method is not None:
625 curmethod = self.step_method.lower()
626 methodslist.append(curmethod)
627 args, nuts_kwargs = self._create_args_and_nuts_kwargs(
628 curmethod, nuts_kwargs, step_kwargs
629 )
630 self.kwargs["step"] = pymc.__dict__[step_methods[curmethod]](
631 **args
632 )
634 # check whether only NUTS step method has been assigned
635 if np.all([sm.lower() == "nuts" for sm in methodslist]):
636 # in this case we can let PyMC autoinitialise NUTS, so remove the step methods and re-add nuts_kwargs
637 self.kwargs["step"] = None
639 if len(nuts_kwargs) > 0:
640 # add NUTS kwargs to standard kwargs
641 self.kwargs.update(nuts_kwargs)
643 with self.pymc_model:
644 # perform the sampling and then convert to inference data
645 trace = pymc.sample(**self.kwargs, return_inferencedata=False)
646 ikwargs = dict(
647 model=self.pymc_model,
648 save_warmup=not self.kwargs["discard_tuned_samples"],
649 log_likelihood=True,
650 )
651 trace = pymc.to_inference_data(trace, **ikwargs)
653 posterior = trace.posterior.to_dataframe().reset_index()
654 self.result.samples = posterior[self.search_parameter_keys]
655 self.result.log_likelihood_evaluations = np.sum(
656 trace.log_likelihood.likelihood.values, axis=-1
657 ).flatten()
658 self.result.sampler_output = np.nan
659 self.calculate_autocorrelation(self.result.samples)
660 self.result.log_evidence = np.nan
661 self.result.log_evidence_err = np.nan
662 self.calc_likelihood_count()
663 return self.result
665 def _create_args_and_nuts_kwargs(self, curmethod, nuts_kwargs, step_kwargs):
666 if curmethod == "nuts":
667 args, nuts_kwargs = self._get_nuts_args(nuts_kwargs, step_kwargs)
668 else:
669 args = step_kwargs.get(curmethod, {})
670 return args, nuts_kwargs
672 def _create_nuts_kwargs(
673 self, curmethod, key, nuts_kwargs, pymc, step_kwargs, step_methods
674 ):
675 if curmethod == "nuts":
676 args, nuts_kwargs = self._get_nuts_args(nuts_kwargs, step_kwargs)
677 else:
678 if step_kwargs is not None:
679 args = step_kwargs.get(curmethod, {})
680 else:
681 args = {}
682 self.kwargs["step"].append(
683 pymc.__dict__[step_methods[curmethod]](vars=[self.pymc_priors[key]], **args)
684 )
685 return nuts_kwargs
687 @staticmethod
688 def _get_nuts_args(nuts_kwargs, step_kwargs):
689 if nuts_kwargs is not None:
690 args = nuts_kwargs
691 elif step_kwargs is not None:
692 args = step_kwargs.pop("nuts", {})
693 # add values into nuts_kwargs
694 nuts_kwargs = args
695 else:
696 args = {}
697 return args, nuts_kwargs
699 def set_prior(self):
700 """
701 Set the PyMC prior distributions.
702 """
704 self.setup_prior_mapping()
706 self.pymc_priors = dict()
707 pymc, _, _ = self._import_external_sampler()
709 # initialise a dictionary of multivariate Gaussian parameters
710 self.multivariate_normal_sets = {}
711 self.multivariate_normal_num_sets = 0
713 # set the parameter prior distributions (in the model context manager)
714 with self.pymc_model:
715 for key in self.priors:
716 # if the prior contains ln_prob method that takes a 'sampler' argument
717 # then try using that
718 lnprobargs = infer_args_from_method(self.priors[key].ln_prob)
719 if "sampler" in lnprobargs:
720 try:
721 self.pymc_priors[key] = self.priors[key].ln_prob(sampler=self)
722 except RuntimeError:
723 raise RuntimeError((f"Problem setting PyMC prior for '{key}'"))
724 else:
725 # use Prior distribution name
726 distname = self.priors[key].__class__.__name__
728 if distname in self.prior_map:
729 # check if we have a predefined PyMC distribution
730 if (
731 "pymc" in self.prior_map[distname]
732 and "argmap" in self.prior_map[distname]
733 ):
734 # check the required arguments for the PyMC distribution
735 pymcdistname = self.prior_map[distname]["pymc"]
737 if pymcdistname not in pymc.__dict__:
738 raise ValueError(
739 f"Prior '{pymcdistname}' is not a known PyMC distribution."
740 )
742 reqargs = infer_args_from_method(
743 pymc.__dict__[pymcdistname].dist
744 )
746 # set keyword arguments
747 priorkwargs = {}
748 for (targ, parg) in self.prior_map[distname][
749 "argmap"
750 ].items():
751 if hasattr(self.priors[key], targ):
752 if parg in reqargs:
753 if "argtransform" in self.prior_map[distname]:
754 if (
755 targ
756 in self.prior_map[distname][
757 "argtransform"
758 ]
759 ):
760 tfunc = self.prior_map[distname][
761 "argtransform"
762 ][targ]
763 else:
765 def tfunc(x):
766 return x
768 else:
770 def tfunc(x):
771 return x
773 priorkwargs[parg] = tfunc(
774 getattr(self.priors[key], targ)
775 )
776 else:
777 raise ValueError(f"Unknown argument {parg}")
778 else:
779 if parg in reqargs:
780 priorkwargs[parg] = None
781 self.pymc_priors[key] = pymc.__dict__[pymcdistname](
782 key, **priorkwargs
783 )
784 elif "internal" in self.prior_map[distname]:
785 self.pymc_priors[key] = self.prior_map[distname][
786 "internal"
787 ](key)
788 else:
789 raise ValueError(
790 f"Prior '{distname}' is not a known distribution."
791 )
792 else:
793 raise ValueError(
794 f"Prior '{distname}' is not a known distribution."
795 )
797 def set_likelihood(self):
798 """
799 Convert any bilby likelihoods to PyMC distributions.
800 """
802 # create Op for the log likelihood if not using a predefined model
803 pymc, _, _ = self._import_external_sampler()
804 _, tt, _ = self._import_tensor()
806 class LogLike(tt.Op):
808 itypes = [tt.dvector]
809 otypes = [tt.dscalar]
811 def __init__(self, parameters, loglike, priors):
812 self.parameters = parameters
813 self.likelihood = loglike
814 self.priors = priors
816 # set the fixed parameters
817 for key in self.priors.keys():
818 if isinstance(self.priors[key], float):
819 self.likelihood.parameters[key] = self.priors[key]
821 self.logpgrad = LogLikeGrad(
822 self.parameters, self.likelihood, self.priors
823 )
825 def perform(self, node, inputs, outputs):
826 (theta,) = inputs
827 for i, key in enumerate(self.parameters):
828 self.likelihood.parameters[key] = theta[i]
830 outputs[0][0] = np.array(self.likelihood.log_likelihood())
832 def grad(self, inputs, g):
833 (theta,) = inputs
834 return [g[0] * self.logpgrad(theta)]
836 # create Op for calculating the gradient of the log likelihood
837 class LogLikeGrad(tt.Op):
839 itypes = [tt.dvector]
840 otypes = [tt.dvector]
842 def __init__(self, parameters, loglike, priors):
843 self.parameters = parameters
844 self.Nparams = len(parameters)
845 self.likelihood = loglike
846 self.priors = priors
848 # set the fixed parameters
849 for key in self.priors.keys():
850 if isinstance(self.priors[key], float):
851 self.likelihood.parameters[key] = self.priors[key]
853 def perform(self, node, inputs, outputs):
854 (theta,) = inputs
856 # define version of likelihood function to pass to derivative function
857 def lnlike(values):
858 for i, key in enumerate(self.parameters):
859 self.likelihood.parameters[key] = values[i]
860 return self.likelihood.log_likelihood()
862 # calculate gradients
863 grads = derivatives(
864 theta, lnlike, abseps=1e-5, mineps=1e-12, reltol=1e-2
865 )
867 outputs[0][0] = grads
869 with self.pymc_model:
870 # check if it is a predefined likelhood function
871 if isinstance(self.likelihood, GaussianLikelihood):
872 # check required attributes exist
873 if (
874 not hasattr(self.likelihood, "sigma")
875 or not hasattr(self.likelihood, "x")
876 or not hasattr(self.likelihood, "y")
877 ):
878 raise ValueError(
879 "Gaussian Likelihood does not have all the correct attributes!"
880 )
882 if "sigma" in self.pymc_priors:
883 # if sigma is suppled use that value
884 if self.likelihood.sigma is None:
885 self.likelihood.sigma = self.pymc_priors.pop("sigma")
886 else:
887 del self.pymc_priors["sigma"]
889 for key in self.pymc_priors:
890 if key not in self.likelihood.function_keys:
891 raise ValueError(f"Prior key '{key}' is not a function key!")
893 model = self.likelihood.func(self.likelihood.x, **self.pymc_priors)
895 # set the distribution
896 pymc.Normal(
897 "likelihood",
898 mu=model,
899 sigma=self.likelihood.sigma,
900 observed=self.likelihood.y,
901 )
902 elif isinstance(self.likelihood, PoissonLikelihood):
903 # check required attributes exist
904 if not hasattr(self.likelihood, "x") or not hasattr(
905 self.likelihood, "y"
906 ):
907 raise ValueError(
908 "Poisson Likelihood does not have all the correct attributes!"
909 )
911 for key in self.pymc_priors:
912 if key not in self.likelihood.function_keys:
913 raise ValueError(f"Prior key '{key}' is not a function key!")
915 # get rate function
916 model = self.likelihood.func(self.likelihood.x, **self.pymc_priors)
918 # set the distribution
919 pymc.Poisson("likelihood", mu=model, observed=self.likelihood.y)
920 elif isinstance(self.likelihood, ExponentialLikelihood):
921 # check required attributes exist
922 if not hasattr(self.likelihood, "x") or not hasattr(
923 self.likelihood, "y"
924 ):
925 raise ValueError(
926 "Exponential Likelihood does not have all the correct attributes!"
927 )
929 for key in self.pymc_priors:
930 if key not in self.likelihood.function_keys:
931 raise ValueError(f"Prior key '{key}' is not a function key!")
933 # get mean function
934 model = self.likelihood.func(self.likelihood.x, **self.pymc_priors)
936 # set the distribution
937 pymc.Exponential(
938 "likelihood", lam=1.0 / model, observed=self.likelihood.y
939 )
940 elif isinstance(self.likelihood, StudentTLikelihood):
941 # check required attributes exist
942 if (
943 not hasattr(self.likelihood, "x")
944 or not hasattr(self.likelihood, "y")
945 or not hasattr(self.likelihood, "nu")
946 or not hasattr(self.likelihood, "sigma")
947 ):
948 raise ValueError(
949 "StudentT Likelihood does not have all the correct attributes!"
950 )
952 if "nu" in self.pymc_priors:
953 # if nu is suppled use that value
954 if self.likelihood.nu is None:
955 self.likelihood.nu = self.pymc_priors.pop("nu")
956 else:
957 del self.pymc_priors["nu"]
959 for key in self.pymc_priors:
960 if key not in self.likelihood.function_keys:
961 raise ValueError(f"Prior key '{key}' is not a function key!")
963 model = self.likelihood.func(self.likelihood.x, **self.pymc_priors)
965 # set the distribution
966 pymc.StudentT(
967 "likelihood",
968 nu=self.likelihood.nu,
969 mu=model,
970 sigma=self.likelihood.sigma,
971 observed=self.likelihood.y,
972 )
973 elif isinstance(
974 self.likelihood,
975 (GravitationalWaveTransient, BasicGravitationalWaveTransient),
976 ):
977 # set theano Op - pass _search_parameter_keys, which only contains non-fixed variables
978 logl = LogLike(
979 self._search_parameter_keys, self.likelihood, self.pymc_priors
980 )
982 parameters = dict()
983 for key in self._search_parameter_keys:
984 try:
985 parameters[key] = self.pymc_priors[key]
986 except KeyError:
987 raise KeyError(
988 f"Unknown key '{key}' when setting GravitationalWaveTransient likelihood"
989 )
991 # convert to tensor variable
992 values = tt.as_tensor_variable(list(parameters.values()))
994 pymc.DensityDist(
995 "likelihood", lambda v: logl(v), observed={"v": values}
996 )
997 else:
998 raise ValueError("Unknown likelihood has been provided")