Skip to content

Instantly share code, notes, and snippets.

@viniciusmss
Created February 4, 2020 08:11
Show Gist options
  • Save viniciusmss/bc9bb6d73f516cb111b28a361b18e8de to your computer and use it in GitHub Desktop.
Save viniciusmss/bc9bb6d73f516cb111b28a361b18e8de to your computer and use it in GitHub Desktop.
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"cell_type": "code",
"execution_count": 32,
"metadata": {
"ExecuteTime": {
"end_time": "2020-02-02T08:22:19.865285Z",
"start_time": "2020-02-02T08:22:19.862283Z"
}
},
"outputs": [],
"source": [
"# imports\n",
"import scipy.stats as sts\n",
"import random\n",
"from math import *"
]
},
{
"cell_type": "code",
"execution_count": 33,
"metadata": {
"ExecuteTime": {
"end_time": "2020-02-02T08:22:20.047577Z",
"start_time": "2020-02-02T08:22:20.041596Z"
}
},
"outputs": [],
"source": [
"# Prior and log likelihood\n",
"prior = sts.uniform(-4, 8)\n",
"logL = lambda x : log(0.3*sts.norm(-1.5, 0.2).pdf(x) + 0.7*sts.norm(2, 0.2).pdf(x))"
]
},
{
"cell_type": "code",
"execution_count": 39,
"metadata": {
"ExecuteTime": {
"end_time": "2020-02-02T08:23:26.995388Z",
"start_time": "2020-02-02T08:23:26.990399Z"
}
},
"outputs": [],
"source": [
"# Sample from prior and store log likelihood\n",
"class Parameter:\n",
" def __init__(self) :\n",
" self.value = prior.rvs() # uniform in (-4,4)\n",
" self.logL = logL(self.value)\n",
" \n",
" def __repr__(self) :\n",
" return \"{:.5f}\".format(self.value)\n",
"\n",
"def sample_from_prior():\n",
" theta = Parameter() \n",
" return theta"
]
},
{
"cell_type": "code",
"execution_count": 35,
"metadata": {
"ExecuteTime": {
"end_time": "2020-02-02T08:22:20.803494Z",
"start_time": "2020-02-02T08:22:20.798508Z"
}
},
"outputs": [],
"source": [
"# Explore parameter space by sampling randomly from the prior\n",
"def explore_at_random(theta, logLstar):\n",
" new_theta = Parameter()\n",
" while new_theta.logL <= logLstar:\n",
" new_theta = Parameter()\n",
" return new_theta"
]
},
{
"cell_type": "code",
"execution_count": 40,
"metadata": {
"ExecuteTime": {
"end_time": "2020-02-02T08:23:29.880614Z",
"start_time": "2020-02-02T08:23:29.863631Z"
}
},
"outputs": [],
"source": [
"# NESTED SAMPLING MAIN PROGRAM\n",
"# (GNU General Public License software, (C) Sivia and Skilling 2006)\n",
"# This file was translated to Python by Issac Trotts in 2007.\n",
"\n",
"# or so\n",
"DBL_MAX = 1e300\n",
"\n",
"# ~U[0,1)\n",
"uniform = random.random\n",
"\n",
"# logarithmic addition log(exp(x)+exp(y))\n",
"def plus(x,y):\n",
" if x>y:\n",
" return x+log(1+exp(y-x)) \n",
" else:\n",
" return y+log(1+exp(x-y))\n",
"\n",
"# n = number of objects to evolve\n",
"def nested_sampling(n, max_iter, sample_from_prior, explore):\n",
" \"\"\"\n",
" This is an implementation of John Skilling's Nested Sampling algorithm\n",
" for computing the normalizing constant of a probability distribution\n",
" (usually the posterior in Bayesian inference).\n",
"\n",
" The return value is a dictionary with the following entries:\n",
" \"samples\"\n",
" \"num_iterations\"\n",
" \"logZ\"\n",
" \"logZ_sdev\"\n",
" \"info_nats\"\n",
" \"info_sdev\"\n",
"\n",
" More information is available here:\n",
" http://www.inference.phy.cam.ac.uk/bayesys/\n",
" \"\"\"\n",
" # FIXME: Add a simple example to the doc string.\n",
"\n",
" Obj = [] # Collection of n objects\n",
" Samples = [] # Objects stored for posterior results\n",
" logwidth = None # ln(width in prior mass)\n",
" logLstar = None # ln(Likelihood constraint)\n",
" H = 0.0 # Information, initially 0\n",
" logZ =-DBL_MAX # ln(Evidence Z, initially 0)\n",
" logZnew = None # Updated logZ\n",
" copy = None # Duplicated object\n",
" worst = None # Worst object\n",
" nest = None # Nested sampling iteration count\n",
"\n",
" # Set prior objects\n",
" for i in range(n):\n",
" Obj.append(sample_from_prior())\n",
"\n",
" # Outermost interval of prior mass\n",
" logwidth = log(1.0 - exp(-1.0 / n));\n",
"\n",
" # NESTED SAMPLING LOOP ___________________________________________\n",
" for nest in range(max_iter):\n",
"\n",
" # Worst object in collection, with Weight = width * Likelihood\n",
" worst = 0;\n",
" for i in range(1,n):\n",
" if Obj[i].logL < Obj[worst].logL:\n",
" worst = i\n",
"\n",
" Obj[worst].logWt = logwidth + Obj[worst].logL;\n",
"\n",
" # Update Evidence Z and Information H\n",
" logZnew = plus(logZ, Obj[worst].logWt)\n",
" H = exp(Obj[worst].logWt - logZnew) * Obj[worst].logL + \\\n",
" exp(logZ - logZnew) * (H + logZ) - logZnew;\n",
" logZ = logZnew;\n",
"\n",
" # Posterior Samples (optional)\n",
" Samples.append(Obj[worst])\n",
"\n",
" # Kill worst object in favour of copy of different survivor\n",
" if n>1: # don't kill if n is only 1\n",
" while True:\n",
" copy = int(n * uniform()) % n # force 0 <= copy < n\n",
" if copy != worst:\n",
" break\n",
"\n",
" logLstar = Obj[worst].logL; # new likelihood constraint\n",
" Obj[worst] = Obj[copy]; # overwrite worst object\n",
"\n",
" # Evolve copied object within constraint\n",
" updated = explore(Obj[worst], logLstar);\n",
" assert(updated != None) # Make sure explore didn't update in-place\n",
" Obj[worst] = updated\n",
"\n",
" # Shrink interval\n",
" logwidth -= 1.0 / n;\n",
"\n",
" # Exit with evidence Z, information H, and optional posterior Samples\n",
" sdev_H = H/log(2.)\n",
" sdev_logZ = sqrt(H/n)\n",
" return {\"samples\":Samples, \n",
" \"num_iterations\":(nest+1), \n",
" \"logZ\":logZ,\n",
" \"logZ_sdev\":sdev_logZ,\n",
" \"info_nats\":H,\n",
" \"info_sdev\":sdev_H}\n"
]
},
{
"cell_type": "code",
"execution_count": 57,
"metadata": {
"ExecuteTime": {
"end_time": "2020-02-04T03:56:48.731024Z",
"start_time": "2020-02-04T03:56:48.431778Z"
}
},
"outputs": [
{
"data": {
"text/plain": [
"{'samples': [-3.82923,\n",
" -3.38026,\n",
" -3.33579,\n",
" -3.32836,\n",
" 0.13662,\n",
" 3.62494,\n",
" 3.62127,\n",
" 0.05282,\n",
" 0.43399,\n",
" -2.97526,\n",
" 3.35354,\n",
" -0.17938,\n",
" 0.71085,\n",
" -0.24504,\n",
" -2.74856,\n",
" 3.25189,\n",
" 3.21143,\n",
" -0.38452,\n",
" 0.98393,\n",
" 1.10373,\n",
" 2.88524,\n",
" -0.67277,\n",
" -0.71255,\n",
" 2.82793,\n",
" 1.20702,\n",
" 2.75609,\n",
" -0.79421,\n",
" 2.75008,\n",
" -0.79954,\n",
" 1.25418,\n",
" 1.25534,\n",
" 2.74305,\n",
" -0.81762,\n",
" 1.29807,\n",
" -0.85965,\n",
" 1.31375,\n",
" -0.91287,\n",
" 1.35825,\n",
" 2.61223,\n",
" 1.41231,\n",
" -0.99891,\n",
" -1.01506,\n",
" 1.47125,\n",
" -1.06025,\n",
" 1.49288,\n",
" -1.08578,\n",
" -1.89698,\n",
" 1.54318,\n",
" -1.14346,\n",
" -1.15345],\n",
" 'num_iterations': 50,\n",
" 'logZ': -5.480975494774487,\n",
" 'logZ_sdev': 0.35633013517884493,\n",
" 'info_nats': 2.539423304731478,\n",
" 'info_sdev': 3.6636134084539664}"
]
},
"execution_count": 57,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"results = nested_sampling(n=5, max_iter = 20, sample_from_prior = sample_from_prior, explore = explore_at_random)\n",
"results"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.4"
},
"toc": {
"base_numbering": 1,
"nav_menu": {},
"number_sections": true,
"sideBar": true,
"skip_h1_title": false,
"title_cell": "Table of Contents",
"title_sidebar": "Contents",
"toc_cell": false,
"toc_position": {},
"toc_section_display": true,
"toc_window_display": false
},
"varInspector": {
"cols": {
"lenName": 16,
"lenType": 16,
"lenVar": 40
},
"kernels_config": {
"python": {
"delete_cmd_postfix": "",
"delete_cmd_prefix": "del ",
"library": "var_list.py",
"varRefreshCmd": "print(var_dic_list())"
},
"r": {
"delete_cmd_postfix": ") ",
"delete_cmd_prefix": "rm(",
"library": "var_list.r",
"varRefreshCmd": "cat(var_dic_list()) "
}
},
"types_to_exclude": [
"module",
"function",
"builtin_function_or_method",
"instance",
"_Feature"
],
"window_display": false
}
},
"nbformat": 4,
"nbformat_minor": 2
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment