#Plot of the predictive density under #1) Plug-in #2) Pseudo-Bayes using logit(p) #3) Bootstrap predictive #4) Fully Bayesian: Jeffreys prior #5) Fully Bayesian: U(0,1) prior n=1000000 ylim=25 #Plug in p1=0.1 y1=rbinom(n,100,p1) #Pseudo-Bayes logitp=rnorm(n,log(1/9),1/sqrt(100*0.1*0.9)) p2=exp(logitp)/(1+exp(logitp)) y2=rbinom(n,100,p2) #Bootstrap predictive p3=rbinom(n,100,p1)/100 y3=rbinom(n,100,p3) #Fully Bayesian with Jeffrey's prior p4=rbeta(n,10.5,90.5) #Jeffrey's prior y4=rbinom(n,100,p4) #Fully Bayesian with Uniform prior p5=rbeta(n,11,91) #Uniform prior y5=rbinom(n,100,p5) #Side by side plot of probability masses #First, omit extreme y values to improve plot trim=function(y,ymax=25) y[y