Skip to content

Instantly share code, notes, and snippets.

@danyaljj
Created November 18, 2024 04:27
Show Gist options
  • Save danyaljj/88e9f4a5c9942781b72dcf570d836f2c to your computer and use it in GitHub Desktop.
Save danyaljj/88e9f4a5c9942781b72dcf570d836f2c to your computer and use it in GitHub Desktop.
import numpy as np
import matplotlib.pyplot as plt
# Parameters
T = 35 # Number of trials
beta = 0.5 # Probability parameter for z
alpha_1 = 0.2 # Probability parameter for x if z=0
alpha_2 = 0.6 # Probability parameter for x if z=1
# Simulate z
z = np.random.binomial(1, beta) # z ~ Bernoulli(beta)
# Simulate x based on z
if z == 0:
x = np.random.binomial(1, alpha_1, T) # x ~ Bernoulli(alpha_1) for T trials
else:
x = np.random.binomial(1, alpha_2, T) # x ~ Bernoulli(alpha_2) for T trials
# Display simulated data
print(f"Sampled value of z: {z}")
print(f"Sampled values of x: {x}")
# Create plots for T trials
t_values = np.arange(1, T + 1)
posterior_values = []
entropy_values = []
variance_values = []
for t in t_values: # Loop includes 0 to T
# Calculate posterior measures
S = np.sum(x[:t]) # Total successes
P_z0_given_x = (alpha_1 ** S * (1 - alpha_1) ** (t - S) * (1 - beta)) / (
alpha_1 ** S * (1 - alpha_1) ** (t - S) * (1 - beta) +
alpha_2 ** S * (1 - alpha_2) ** (t - S) * beta
)
P_z1_given_x = 1 - P_z0_given_x
posterior_values.append(1 - P_z0_given_x)
variance_values.append(P_z0_given_x * P_z1_given_x)
entropy_values.append(-P_z0_given_x * np.log(P_z0_given_x + 1e-10) - P_z1_given_x * np.log(P_z1_given_x + 1e-10))
# Posterior probability plot
plt.figure(figsize=(12, 4))
plt.subplot(1, 3, 1)
plt.scatter(t_values, posterior_values, color='blue', label=r'$P(z=1|\mathbf{x})$', alpha=0.6)
plt.title('Posterior Probability $P(z=1|\mathbf{x})$')
plt.xlabel('Trial')
plt.ylabel('Probability')
plt.legend()
plt.grid()
# Posterior variance plot
plt.subplot(1, 3, 2)
plt.scatter(t_values, variance_values, color='green', label=r'Var$(z|\mathbf{x})$', alpha=0.6)
plt.title('Posterior Variance')
plt.xlabel('Trial')
plt.ylabel('Variance')
plt.legend()
plt.grid()
# Posterior entropy plot
plt.subplot(1, 3, 3)
plt.scatter(t_values, entropy_values, color='red', label=r'$H(z|\mathbf{x})$', alpha=0.6)
plt.title('Posterior Entropy $H(z|\mathbf{x})$')
plt.xlabel('Trial')
plt.ylabel('Entropy')
plt.legend()
plt.grid()
plt.tight_layout()
plt.show()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment