##### Introduction to Bayes II ##### ##### ##### Experiments 1 # # Estimate pi from a Geometric distribution. Flip a coin until # a heads comes up. That will be the value of x. # # # # Bayes 1: Uninformative Prior xx = seq(0,1, length=10000) pi1 = dunif(xx) ## Prior Distribution of pi x = 2; n=10 ## Observed data pi2 = dbeta(xx,3,9) ## Posterior distribution of pi plot(xx,pi1, type="l",col="gold4", ylim=c(0,4), xlab=expression(pi), ylab="density") lines(xx,pi2, col="blue") abline(v=x/n) ### Bayes 2: Informative Prior xx = seq(0,1, length=10000) pi1 = dbeta(xx, 5,5) ## Prior Distribution of pi x = 6 ## Observed data pi2 = dbeta(xx,5+1,5+x) ## Posterior distribution of pi plot(xx,pi1, type="l",col="gold4", ylim=c(0,4), xlab=expression(pi), ylab="density") lines(xx,pi2, col="blue") abline(v=1/x) # Credible interval qbeta(c(0.025,0.975), 7,13) # Random numbers pd = rbeta(1e6, 6,11) mean(pd) ## Estimated posterior mean (squared error loss) median(pd) ## Estimated posterior median (absolute error loss) ##### Experiment 2 # # Estimate lambda, the average number of fleas on a tick. # # The likelihood of X is from a Poisson distribution. # # # Bayes 1: Low-Information Prior xx = seq(0,8, length=10000) a0=0.007; b0=0.01 ## mean is 0.7, var = 70 (high variance!) lambda1 = dgamma(xx, a0,b0) ## Prior Distribution of pi x = 6 ## Observed data lambda2 = dgamma(xx, a0+x,b0+1) ## Posterior distribution of pi plot(xx,lambda1, type="l",col="gold4", ylim=c(0,3), xlab=expression(pi), ylab="density") lines(xx,lambda2, col="blue") abline(v=x) # Credible interval qgamma(c(0.025,0.975), a0+x,b0+1) # Random numbers pd = rgamma(1e6, a0+x,b0+1) mean(pd) ## Estimated posterior mean (squared error loss) median(pd) ## Estimated posterior median (absolute error loss) a0/b0 ## Prior mean (a0+x)/(b0+1) ## Posterior mean a0/b0^2 ## Prior variance (a0+x)/(b0+1)^2 ## Posterior variance # Bayes 2: Higher-Informative Prior xx = seq(0,8, length=10000) a0=7; b0=10 ## mean is 0.7, var = 0.07 (low variance!) lambda1 = dgamma(xx, a0,b0) ## Prior Distribution of pi x = 6 ## Observed data lambda2 = dgamma(xx, a0+x,b0+1) ## Posterior distribution of pi plot(xx,lambda1, type="l",col="gold4", ylim=c(0,3), xlab=expression(pi), ylab="density") lines(xx,lambda2, col="blue") abline(v=x) # Credible interval qgamma(c(0.025,0.975), a0+x,b0+1) # Random numbers pd = rgamma(1e6, a0+x,b0+1) mean(pd) ## Estimated posterior mean (squared error loss) median(pd) ## Estimated posterior median (absolute error loss) a0/b0 ## Prior mean (a0+x)/(b0+1) ## Posterior mean a0/b0^2 ## Prior variance (a0+x)/(b0+1)^2 ## Posterior variance # Bayes 3: High-Informative Prior xx = seq(0,8, length=10000) a0=700; b0=1000 ## mean is 0.7, var = 0.0007 (low variance!) lambda1 = dgamma(xx, a0,b0) ## Prior Distribution of pi x = 6 ## Observed data lambda2 = dgamma(xx, a0+x,b0+1) ## Posterior distribution of pi plot(xx,lambda1, type="l",col="gold4", ylim=c(0,3), xlab=expression(pi), ylab="density") lines(xx,lambda2, col="blue") abline(v=x) # Credible interval qgamma(c(0.025,0.975), a0+x,b0+1) # Random numbers pd = rgamma(1e6, a0+x,b0+1) mean(pd) ## Estimated posterior mean (squared error loss) median(pd) ## Estimated posterior median (absolute error loss) a0/b0 ## Prior mean (a0+x)/(b0+1) ## Posterior mean a0/b0^2 ## Prior variance (a0+x)/(b0+1)^2 ## Posterior variance ##### # Side question, from these last three priors: # # What is the effect of prior variance on the posterior estimates?