#### All algorithms for monotone functions: # DGL-tMVN with Matern kernel; DGL-tMVN with squared-exponential kernel; IGL-tMVN; # tMVN with global shrinkage: with improper prior on \tau^2 and with half Cauchy prior on \tau # tMVN with no shrinkage (fix \tau=1): with hyperparameter updates and without hyperparameter updates library(MASS); library(FastGP); library(tmg); ##################### Model with prior DGL (Matern kernel) ################# mon.shrink.hyp=function(y,x,N,eta,mcmc,brn,thin,delta_tau,nu.in,l.in,tau.in,sig.in,xi0.in,xi.in,lam.in, sseed,verbose,return.plot,return.traplot){ # y:Response variable; x: vector to form design matrix \Psi (n X N+1) # N: (N+1) is the number of knots # eta:parameter of the approximation function of the indicator functions # mcmc, brn, thin : mcmc samples, burning and thinning for MCMC # delta_tau : The hyperparameter associated with the half Cauchy prior on \tau # Note: default value is 1, the larger the value of delta_tau, the heavier the tail of the prior # nu.in, l.in, tau.in, sig.in, xi0.in, xi.in, lam.in : initial values (supplied by user) # sseed : seed value to reproduce results (supplied by user) # verbose : logical; if TRUE, prints current status; default is TRUE # return.plot : logical; if true a plot of estimate with 95% CI is returned; default is TRUE # return.traplot : logical; if true traceplots are returned; default is TRUE if(length(y)!=length(x)) stop("y and x should be of same length!") n=length(y) if(missing(N)) N = ceiling(n/8) - 1 int_length=1/N my_knots=seq(0,1,by=int_length) X=des.mat1(x,my_knots,int_length) if(missing(return.plot)) return.plot=TRUE if(missing(return.traplot)) return.traplot=TRUE if(!missing(sseed)) set.seed(sseed) if(missing(verbose)) verbose=TRUE if(missing(eta)) eta=50 if(missing(mcmc)) mcmc=1000 if(missing(brn)) brn=1000 if(missing(thin)) thin=1 em=mcmc+brn ef=mcmc/thin if(missing(tau.in)) tau.in=1 if(missing(sig.in)) sig.in=1 if(missing(xi0.in)) xi0.in=0 if(missing(xi.in)) xi.in=rep(1,N+1) if(missing(lam.in)) lam.in=rep(1,N+1) if(missing(nu.in)) nu.in=0.75 if(missing(l.in)) l.in=l_est(nu.in,c(0,1),0.05) if(missing(delta_tau)){ delta = 1 }else{ delta = delta_tau } tau=tau.in sig=sig.in xi0=xi0.in xi_in=xi.in lam_in=lam.in nu_in=nu.in l_in=l.in gam_in=rep(1,N+1) l0=rep(0,ncol(X)) l1=rep(1,ncol(X)) xi_sam=matrix(0,N+1,ef) lam_sam=matrix(0,N+1,ef) gam_sam=matrix(0,N+1,ef) xi0_sam=rep(0,ef) tau_sam=rep(0,ef) sig_sam=rep(0,ef) nu_sam=rep(0,ef) ell_sam=rep(0,ef) fhat_sam=matrix(0,n,ef) if(verbose) print("MCMC sample draws:") cnt=0 ptm=proc.time(); for(i in 1:em){ # sampling from \nu and \ell MH.out = nu.MH2(nu_in,l_in,tau,xi_in,my_knots,seed=i) nu_out = MH.out$nu l_out = MH.out$l cnt = cnt + MH.out$count L_inv = MH.out$L_inv # renewing the initial values: nu_in = nu_out l_in = l_out # sampling Xi: y_tilde = y - xi0 nu.xi = as.vector(samp.WC(my_knots,nu_out,l_out,tau,sseed=i)) xi_out = ESS(xi_in,nu.xi,y_tilde,t(t(X)*lam_in),sig,eta,seeds=i) xi_out = sapply(xi_out,function(z) return(max(0,z))) # renewing the initial value: xi_in = xi_out set.seed(2*i) # sampling lambda: nu.lam = rnorm(l1,l0,sd=1/sqrt(gam_in)) lam_out = ESS(lam_in,nu.lam,y_tilde,t(t(X)*xi_in),sig,eta,seeds=i) lam_out = sapply(lam_out,function(z) return(max(0,z))) # renewing the intial value: lam_in = lam_out # sampling xi_0: Xxi = X%*%(xi_out*lam_in) y_star = y - Xxi xi0 = rnorm(1,mean(y_star),sqrt(sig/n)) # sampling gamma_j's: gam_out = rexp(l1,rate = 0.5*(1+lam_in^2)) gam_in=gam_out # sampling \sigma^2: y0 = y_star - xi0 sig = 1/rgamma(1,shape = n/2,rate = sum(y0^2)/2) # sampling \tau^2 from horseshoe code: tempt = (sum((t(L_inv)%*%xi_out)^2))/2 et = delta/tau utau = runif(1,0,1/(1+et)) ubt = min(1,(1-utau)/utau) Fubt = pgamma(ubt,((N+1)+1)/2,scale=1/tempt) Fubt = max(Fubt,1e-10) # for numerical stability ut = runif(1,0,Fubt) et = qgamma(ut,((N+1)+1)/2,scale=1/tempt) tau = delta/et # storing MCMC samples: if(i > brn && (i-brn)%%thin == 0){ xi_sam[,(i-brn)/thin]=xi_out lam_sam[,(i-brn)/thin]=lam_out gam_sam[,(i-brn)/thin]=gam_out xi0_sam[(i-brn)/thin]=xi0 sig_sam[(i-brn)/thin]=sig tau_sam[(i-brn)/thin]=tau nu_sam[(i-brn)/thin]=nu_out ell_sam[(i-brn)/thin]=l_out fhat_sam[,(i-brn)/thin]=xi0 + Xxi } if(i%%100==0 && verbose){ print(i) } }; tm=proc.time()-ptm if(return.traplot){ library(mcmcplots) mx=min(N+1,25) xi_mat=matrix(xi_sam[1:mx,],nrow = mx, dimnames = list(c(paste("xi[",1:mx,"]",sep = "")),NULL)) traplot(t(xi_mat)) lam_mat=matrix(lam_sam[1:mx,],nrow = mx, dimnames = list(c(paste("lambda[",1:mx,"]",sep = "")),NULL)) traplot(t(lam_mat)) par(mfrow=c(1,1)) traplot(as.mcmc(xi0_sam),main=expression(paste(xi[0]))) traplot(as.mcmc(sig_sam),main=expression(paste(sigma^2))) traplot(as.mcmc(tau_sam),main=expression(paste(tau^2))) traplot(as.mcmc(nu_sam),main=expression(paste(nu))) traplot(as.mcmc(ell_sam),main="\u2113") par(mfrow=c(1,1)) } fmean=rowMeans(fhat_sam) fsort=fhat_sam for(i in 1:nrow(fhat_sam)){ fsort[i,]=sort(fhat_sam[i,]) } f_low=fsort[,ef*0.025] f_upp=fsort[,ef*0.975] if(return.plot){ par(mfrow=c(1,1)) ub=max(y,f_low,f_upp,fmean) lb=min(y,f_low,f_upp,fmean) plot(x,y,type="p",pch=20,col="green3",ylim=c(lb,ub), main="Black: Estimate, Blue: 95% CI, Green: Data") lines(x,fmean,lwd=2,lty=1,col="black") lines(x,f_low,lwd=2,lty=2,col="blue") lines(x,f_upp,lwd=2,lty=2,col="blue") } return(list("time"=tm,"rej.prop"=round(cnt/em,3),"nu_sam"=nu_sam,"ell_sam"=ell_sam, "xi_sam"=xi_sam,"lam_sam"=lam_sam,"xi0_sam"=xi0_sam,"sig_sam"=sig_sam, "tau_sam"=tau_sam,"fhat_sam"=fhat_sam,"fmean"=fmean,"f_low"=f_low,"f_upp"=f_upp)) } ############### Constrained model with prior DGL (Squared-exponential kernel) ################### sek.shrink.hyp=function(y,x,N,eta,mcmc,brn,thin,delta_tau,l.in,tau.in,sig.in,xi0.in,xi.in,lam.in, sseed,verbose,return.plot,return.traplot){ # y:Response variable; x: vector to form design matrix \Psi (n X N+1) # N: (N+1) is the number of knots # eta:parameter of the approximation function of the indicator functions # mcmc, brn, thin : mcmc samples, burning and thinning for MCMC # delta_tau: The hyperparameter associated with the half Cauchy prior on \tau^2 # Note: default value is 1. Smaller value delta_tau (<1) indicates stronger global shrinkage # l.in, tau.in, sig.in, xi0.in, xi.in, lam.in : initial values (supplied by user) # sseed : seed value to reproduce results (supplied by user) # verbose : logical; if TRUE, prints current status; default is TRUE # return.plot : logical; if true a plot of estimate with 95% CI is returned; default is TRUE # return.traplot : logical; if true traceplots are returned; default is TRUE if(length(y)!=length(x)) stop("y and x should be of same length!") n=length(y) if(missing(N)) N = ceiling(n/8) - 1 int_length=1/N my_knots=seq(0,1,by=int_length) X=des.mat1(x,my_knots,int_length) if(missing(return.plot)) return.plot=TRUE if(missing(return.traplot)) return.traplot=TRUE if(!missing(sseed)) set.seed(sseed) if(missing(verbose)) verbose=TRUE if(missing(eta)) eta=50 if(missing(mcmc)) mcmc=1000 if(missing(brn)) brn=1000 if(missing(thin)) thin=1 em=mcmc+brn ef=mcmc/thin if(missing(tau.in)) tau.in=1 if(missing(sig.in)) sig.in=1 if(missing(xi0.in)) xi0.in=0 if(missing(xi.in)) xi.in=rep(1,N+1) if(missing(lam.in)) lam.in=rep(1,N+1) if(missing(l.in)) l.in=l_est2(c(0,1),0.05) if(missing(delta_tau)){ delta = 1 }else{ delta = delta_tau } tau=tau.in sig=sig.in xi0=xi0.in xi_in=xi.in lam_in=lam.in l_in=l.in gam_in=rep(1,N+1) l0=rep(0,ncol(X)) l1=rep(1,ncol(X)) xi_sam=matrix(0,N+1,ef) lam_sam=matrix(0,N+1,ef) gam_sam=matrix(0,N+1,ef) xi0_sam=rep(0,ef) tau_sam=rep(0,ef) sig_sam=rep(0,ef) ell_sam=rep(0,ef) fhat_sam=matrix(0,n,ef) if(verbose) print("MCMC sample draws:") cnt=0 ptm=proc.time(); for(i in 1:em){ # sampling from \ell MH.out = l.MH(l_in,tau,xi_in,my_knots,seed = i) l_out = MH.out$l cnt = cnt + MH.out$count L_inv = MH.out$L_inv # renewing the initial values: l_in = l_out # sampling Xi: y_tilde = y - xi0 nu.xi = as.vector(samp.WC(my_knots,nu=NULL,l_out,tau,sseed=i,mk=FALSE)) xi_out = ESS(xi_in,nu.xi,y_tilde,t(t(X)*lam_in),sig,eta,seeds=i) xi_out = sapply(xi_out,function(z) return(max(0,z))) # renewing the initial value: xi_in = xi_out set.seed(2*i) # sampling lambda: nu.lam = rnorm(l1,l0,sd=1/sqrt(gam_in)) lam_out = ESS(lam_in,nu.lam,y_tilde,t(t(X)*xi_in),sig,eta,seeds=i) lam_out = sapply(lam_out,function(z) return(max(0,z))) # renewing the initial value: lam_in = lam_out # sampling xi_0: Xxi = X%*%(xi_out*lam_in) y_star = y - Xxi xi0 = rnorm(1,mean(y_star),sqrt(sig/n)) # sampling gamma_j's: gam_out = rexp(l1,rate = 0.5*(1+lam_in^2)) gam_in=gam_out # sampling \sigma^2: y0 = y_star - xi0 sig = 1/rgamma(1,shape = n/2,rate = sum(y0^2)/2) # sampling \tau^2 from horseshoe code: tempt = (sum((t(L_inv)%*%xi_out)^2))/2 et = delta/tau utau = runif(1,0,1/(1+et)) ubt = min(1,(1-utau)/utau) Fubt = pgamma(ubt,((N+1)+1)/2,scale=1/tempt) Fubt = max(Fubt,1e-10) # for numerical stability ut = runif(1,0,Fubt) et = qgamma(ut,((N+1)+1)/2,scale=1/tempt) tau = delta/et # storing MCMC samples: if(i > brn && (i-brn)%%thin == 0){ xi_sam[,(i-brn)/thin]=xi_out lam_sam[,(i-brn)/thin]=lam_out gam_sam[,(i-brn)/thin]=gam_out xi0_sam[(i-brn)/thin]=xi0 sig_sam[(i-brn)/thin]=sig tau_sam[(i-brn)/thin]=tau ell_sam[(i-brn)/thin]=l_out fhat_sam[,(i-brn)/thin]=xi0 + Xxi } if(i%%100==0 && verbose){ print(i) } }; tm=proc.time()-ptm if(return.traplot){ library(mcmcplots) mx=min(N+1,25) xi_mat=matrix(xi_sam[1:mx,],nrow = mx, dimnames = list(c(paste("xi[",1:mx,"]",sep = "")),NULL)) traplot(t(xi_mat)) lam_mat=matrix(lam_sam[1:mx,],nrow = mx, dimnames = list(c(paste("lambda[",1:mx,"]",sep = "")),NULL)) traplot(t(lam_mat)) par(mfrow=c(1,1)) traplot(as.mcmc(xi0_sam),main=expression(paste(xi[0]))) traplot(as.mcmc(sig_sam),main=expression(paste(sigma^2))) traplot(as.mcmc(tau_sam),main=expression(paste(tau^2))) traplot(as.mcmc(ell_sam),main="\u2113") par(mfrow=c(1,1)) } fmean=rowMeans(fhat_sam) fsort=fhat_sam for(i in 1:nrow(fhat_sam)){ fsort[i,]=sort(fhat_sam[i,]) } f_low=fsort[,ef*0.025] f_upp=fsort[,ef*0.975] if(return.plot){ par(mfrow=c(1,1)) ub=max(y,f_low,f_upp,fmean) lb=min(y,f_low,f_upp,fmean) plot(x,y,type="p",pch=20,col="green3",ylim=c(lb,ub), main="Black: Estimate, Blue: 95% CI, Green: Data") lines(x,fmean,lwd=2,lty=1,col="black") lines(x,f_low,lwd=2,lty=2,col="blue") lines(x,f_upp,lwd=2,lty=2,col="blue") } return(list("time"=tm,"rej.prop"=round(cnt/em,3),"ell_sam"=ell_sam, "xi_sam"=xi_sam,"lam_sam"=lam_sam,"xi0_sam"=xi0_sam,"sig_sam"=sig_sam, "tau_sam"=tau_sam,"fhat_sam"=fhat_sam,"fmean"=fmean,"f_low"=f_low,"f_upp"=f_upp)) } ########### (Non-increasing) Constrained model with prior DGL (Matern kernel) ############## dec.shrink.hyp=function(y,x,N,eta,mcmc,brn,thin,delta_tau,nu.in,l.in,tau.in,sig.in,xi0.in,xi.in,lam.in, sseed,verbose,return.plot,return.traplot){ # y:Response variable; x: vector to form design matrix \Psi (n X N+1) # N: (N+1) is the number of knots # eta:parameter of the approximation function of the indicator functions # mcmc, brn, thin : mcmc samples, burning and thinning for MCMC # delta_tau: The hyperparameter associated with the half Cauchy prior on \tau^2 # Note: default value is 1. Smaller value delta_tau (<1) indicates stronger global shrinkage # nu.in, l.in, tau.in, sig.in, xi0.in, xi.in, lam.in : initial values (supplied by user) # sseed : seed value to reproduce results (supplied by user) # verbose : logical; if TRUE, prints current status; default is TRUE # return.plot : logical; if true a plot of estimate with 95% CI is returned; default is TRUE # return.traplot : logical; if true traceplots are returned; default is TRUE if(length(y)!=length(x)) stop("y and x should be of same length!") n=length(y) if(missing(N)) N = ceiling(n/8) - 1 int_length=1/N my_knots=seq(0,1,by=int_length) X=des.mat1(x,my_knots,int_length) if(missing(return.plot)) return.plot=TRUE if(missing(return.traplot)) return.traplot=TRUE if(!missing(sseed)) set.seed(sseed) if(missing(verbose)) verbose=TRUE if(missing(eta)) eta=50 if(missing(mcmc)) mcmc=10000 if(missing(brn)) brn=1000 if(missing(thin)) thin=1 em=mcmc+brn ef=mcmc/thin if(missing(delta_tau)){ delta = 1 }else{ delta = delta_tau } if(missing(tau.in)) tau.in=1 if(missing(sig.in)) sig.in=1 if(missing(xi0.in)) xi0.in=0 if(missing(xi.in)) xi.in=rep(-1,N+1) if(missing(lam.in)) lam.in=rep(1,N+1) if(missing(nu.in)) nu.in=0.75 if(missing(l.in)) l.in=l_est(nu.in,c(0,1),0.05) tau=tau.in sig=sig.in xi0=xi0.in xi_in=xi.in lam_in=lam.in nu_in=nu.in l_in=l.in gam_in=rep(1,N+1) l0=rep(0,ncol(X)) l1=rep(1,ncol(X)) xi_sam=matrix(0,N+1,ef) lam_sam=matrix(0,N+1,ef) gam_sam=matrix(0,N+1,ef) xi0_sam=rep(0,ef) tau_sam=rep(0,ef) sig_sam=rep(0,ef) nu_sam=rep(0,ef) ell_sam=rep(0,ef) fhat_sam=matrix(0,n,ef) if(verbose) print("MCMC sample draws:") cnt=0 ptm=proc.time(); for(i in 1:em){ # sampling from \nu and \ell MH.out = nu.MH2(nu_in,l_in,tau,xi_in,my_knots,seed=i) nu_out = MH.out$nu l_out = MH.out$l cnt = cnt + MH.out$count L_inv = MH.out$L_inv # renewing the initial values: nu_in = nu_out l_in = l_out # sampling Xi: y_tilde = y - xi0 nu.xi = as.vector(samp.WC(my_knots,nu_out,l_out,tau,sseed=i)) xi_out = ESS.dec(xi_in,nu.xi,y_tilde,t(t(X)*lam_in),sig,eta,seeds=i) xi_out = sapply(xi_out,function(z) return(min(0,z))) # renewing the initial value: xi_in = xi_out set.seed(2*i) # sampling lambda: nu.lam = rnorm(l1,l0,sd=1/sqrt(gam_in)) lam_out = ESS(lam_in,nu.lam,y_tilde,t(t(X)*xi_in),sig,eta,seeds=i) lam_out = sapply(lam_out,function(z) return(max(0,z))) # renewing the initial value: lam_in = lam_out # sampling xi_0: Xxi = X%*%(xi_out*lam_in) y_star = y - Xxi xi0 = rnorm(1,mean(y_star),sqrt(sig/n)) # sampling gamma_j's: gam_out = rexp(l1,rate = 0.5*(1+lam_in^2)) gam_in=gam_out # sampling \sigma^2: y0 = y_star - xi0 sig = 1/rgamma(1,shape = n/2,rate = sum(y0^2)/2) # sampling \tau^2 from horseshoe code: tempt = (sum((t(L_inv)%*%xi_out)^2))/2 et = delta/tau utau = runif(1,0,1/(1+et)) ubt = min(1,(1-utau)/utau) Fubt = pgamma(ubt,((N+1)+1)/2,scale=1/tempt) Fubt = max(Fubt,1e-10) # for numerical stability ut = runif(1,0,Fubt) et = qgamma(ut,((N+1)+1)/2,scale=1/tempt) tau = delta/et # storing MCMC samples: if(i > brn && (i-brn)%%thin == 0){ xi_sam[,(i-brn)/thin]=xi_out lam_sam[,(i-brn)/thin]=lam_out gam_sam[,(i-brn)/thin]=gam_out xi0_sam[(i-brn)/thin]=xi0 sig_sam[(i-brn)/thin]=sig tau_sam[(i-brn)/thin]=tau nu_sam[(i-brn)/thin]=nu_out ell_sam[(i-brn)/thin]=l_out fhat_sam[,(i-brn)/thin]=xi0 + Xxi } if(i%%100==0 && verbose){ print(i) } }; tm=proc.time()-ptm if(return.traplot){ library(mcmcplots) mx=min(N+1,25) xi_mat=matrix(xi_sam[1:mx,],nrow = mx, dimnames = list(c(paste("xi[",1:mx,"]",sep = "")),NULL)) traplot(t(xi_mat)) lam_mat=matrix(lam_sam[1:mx,],nrow = mx, dimnames = list(c(paste("lambda[",1:mx,"]",sep = "")),NULL)) traplot(t(lam_mat)) par(mfrow=c(1,1)) traplot(as.mcmc(xi0_sam),main=expression(paste(xi[0]))) traplot(as.mcmc(sig_sam),main=expression(paste(sigma^2))) traplot(as.mcmc(tau_sam),main=expression(paste(tau^2))) traplot(as.mcmc(nu_sam),main=expression(paste(nu))) traplot(as.mcmc(ell_sam),main="\u2113") par(mfrow=c(1,1)) } fmean=rowMeans(fhat_sam) fsort=fhat_sam for(i in 1:nrow(fhat_sam)){ fsort[i,]=sort(fhat_sam[i,]) } f_low=fsort[,ef*0.025] f_upp=fsort[,ef*0.975] if(return.plot){ par(mfrow=c(1,1)) ub=max(y,f_low,f_upp,fmean) lb=min(y,f_low,f_upp,fmean) plot(x,y,type="p",pch=20,col="green3",ylim=c(lb,ub), main="Black: Estimate, Blue: 95% CI, Green: Data") lines(x,fmean,lwd=2,lty=1,col="black") lines(x,f_low,lwd=2,lty=2,col="blue") lines(x,f_upp,lwd=2,lty=2,col="blue") } return(list("time"=tm,"rej.prop"=round(cnt/em,3),"nu_sam"=nu_sam,"ell_sam"=ell_sam, "xi_sam"=xi_sam,"lam_sam"=lam_sam,"xi0_sam"=xi0_sam,"sig_sam"=sig_sam, "tau_sam"=tau_sam,"fhat_sam"=fhat_sam,"fmean"=fmean,"f_low"=f_low,"f_upp"=f_upp)) } ############ Constrained model with prior IGL ##################### mon.horseshoe.hyp=function(y,x,N,eta,mcmc,brn,delta_tau,thin,tau.in,sig.in,xi0.in,xi.in,sseed, verbose,return.plot,return.traplot){ # y:Response variable; x: vector to form design matrix \Psi (n X N+1) # N: (N+1) is the number of knots # eta:parameter of the spproximation function of the indicator functions # mcmc, brn, thin : mcmc samples, burning and thinning for MCMC # delta_tau: The hyperparameter associated with the half Cauchy prior on \tau^2 # Note: the default is 1. Smaller value delta_tau (<1) indicates stronger global shrinkage # tau.in, sig.in, xi0.in, xi.in : initial values (supplied by user) # sseed : seed value to reproduce results (supplied by user) # verbose : logical; if TRUE, prints current status; default is TRUE # return.plot : logical; if true a plot of estimate with 95% CI is returned; default is TRUE # return.traplot : logical; if true traceplots are returned; default is TRUE if(length(y)!=length(x)) stop("y and x should be of same length!") n=length(y) if(missing(N)) N = ceiling(n/8) - 1 int_length=1/N my_knots=seq(0,1,by=int_length) X=des.mat1(x,my_knots,int_length) XTX=t(X)%*%X if(missing(return.plot)) return.plot=TRUE if(missing(return.traplot)) return.traplot=TRUE if(!missing(sseed)) set.seed(sseed) if(missing(verbose)) verbose=TRUE if(missing(eta)) eta=50 if(missing(mcmc)) mcmc=1000 if(missing(brn)) brn=1000 if(missing(thin)) thin=1 em=mcmc+brn ef=mcmc/thin if(missing(tau.in)) tau.in=1 if(missing(sig.in)) sig.in=1 if(missing(xi0.in)) xi0.in=0 if(missing(xi.in)) xi.in=rep(1,N+1) if(missing(delta_tau)){ delta = 1 }else{ delta = delta_tau } f=diag(N+1) g=rep(0,N+1) lam=rep(1,N+1) tau=tau.in sig=sig.in xi0=xi0.in xi_in=xi.in xi_sam=matrix(0,N+1,ef) xi0_sam=rep(0,ef) tau_sam=rep(0,ef) sig_sam=rep(0,ef) lam_sam=matrix(0,N+1,ef) fhat_sam=matrix(0,n,ef) if(verbose) print("MCMC sample draws:") ptm=proc.time(); for(i in 1:em){ set.seed(i) # sampling Xi: y_tilde = y - xi0 K_inv = diag(1/lam,N+1) M = XTX/sig + K_inv/tau r = as.vector(t(X)%*%y_tilde)/sig xi_out = as.vector(rtmg(1,M,r,xi_in,f,g,burn.in=0)) # renewing the initial value: xi_in = xi_out # sampling xi_0: Xxi = X%*%xi_out y_star = y - Xxi xi0 = rnorm(1,mean(y_star),sqrt(sig/ntr)) # sampling \sigma^2: y0 = y_star - xi0 sig = 1/rgamma(1,shape = ntr/2,rate = sum(y0^2)/2) # sampling \lambda_j^2: ## updating \lambda_j's in a block using slice sampling ## etj = 1/lam upsi = runif(N+1,0,1/(1+etj)) tempps = xi_out^2/(2*tau) ub = (1-upsi)/upsi # now sample eta from exp(tempv) truncated between 0 & (1-upsi)/upsi Fub = 1 - exp(-tempps*ub); # exp cdf at ub Fub[Fub < (1e-4)] = 1e-4; # for numerical stability up = runif(N+1,0,Fub) etj = -log(1-up)/tempps lam = 1/etj # sampling \tau^2: ## updating \tau using slice sampling ## tempt = sum(xi_out^2/lam)/2 et = delta/tau utau = runif(1,0,1/(1+et)) ubt = min(1,(1-utau)/utau) Fubt = pgamma(ubt,((N+1)+1)/2,scale=1/tempt) Fubt = max(Fubt,1e-10) # for numerical stability ut = runif(1,0,Fubt) et = qgamma(ut,((N+1)+1)/2,scale=1/tempt) tau = delta/et # storing MCMC samples: if(i > brn && i%%thin == 0){ xi_sam[,(i-brn)/thin]=xi_out lam_sam[,(i-brn)/thin]=lam xi0_sam[(i-brn)/thin]=xi0 sig_sam[(i-brn)/thin]=sig tau_sam[(i-brn)/thin]=tau fhat_sam[,(i-brn)/thin]=xi0 + Xxi } if(i%%100==0 && verbose){ print(i) } }; tm=proc.time()-ptm if(return.traplot){ library(mcmcplots) mx=min(N+1,25) xi_mat=matrix(xi_sam[1:mx,],nrow = mx, dimnames = list(c(paste("xi[",1:mx,"]",sep = "")),NULL)) traplot(t(xi_mat)) lam_mat=matrix(lam_sam[1:mx,],nrow = mx, dimnames = list(c(paste("lambda[",1:mx,"]",sep = "")),NULL)) traplot(t(lam_mat)) par(mfrow=c(1,1)) traplot(as.mcmc(xi0_sam),main=expression(paste(xi[0]))) traplot(as.mcmc(sig_sam),main=expression(paste(sigma^2))) traplot(as.mcmc(tau_sam),main=expression(paste(tau^2))) } fmean=rowMeans(fhat_sam) fsort=fhat_sam for(i in 1:nrow(fhat_sam)){ fsort[i,]=sort(fhat_sam[i,]) } f_low=fsort[,ef*0.025] f_upp=fsort[,ef*0.975] if(return.plot){ par(mfrow=c(1,1)) ub=max(y,f_low,f_upp,fmean) lb=min(y,f_low,f_upp,fmean) plot(x,y,type="p",pch=20,col="green3",ylim=c(lb,ub), main="Black: Estimate, Blue: 95% CI, Green:data") lines(x,fmean,lwd=2,lty=1,col="black") lines(x,f_low,lwd=2,lty=2,col="blue") lines(x,f_upp,lwd=2,lty=2,col="blue") } return(list("time"=tm,"lam_sam"=lam_sam,"xi_sam"=xi_sam,"xi0_sam"=xi0_sam,"sig_sam"=sig_sam, "tau_sam"=tau_sam,"fhat_sam"=fhat_sam,"fmean"=fmean,"f_low"=f_low,"f_upp"=f_upp)) } ########### Constrained model with tMVN and improper prior on \tau^2 ############### mon.ESS.hyp=function(y,x,N,eta,mcmc,brn,thin,nu.in,l.in,tau.in,sig.in,xi0.in,xi.in,sseed, verbose,return.plot,return.traplot){ # y:Response variable; x: vector to form design matrix \Psi (n X N+1) # N: (N+1) is the number of knots # eta:parameter of the approximation function of the indicator functions # mcmc, brn, thin : mcmc samples, burning and thinning for MCMC # nu.in, l.in, tau.in, sig.in, xi0.in, xi.in : initial values (supplied by user) # sseed : seed value to reproduce results (supplied by user) # verbose : logical; if TRUE, prints current status; default is TRUE # return.plot : logical; if true a plot of estimate with 95% CI is returned; default is TRUE # return.traplot : logical; if true traceplots are returned; default is TRUE if(length(y)!=length(x)) stop("y and x should be of same length!") n=length(y) if(missing(N)) N = ceiling(n/8) - 1 int_length=1/N my_knots=seq(0,1,by=int_length) X=des.mat1(x,my_knots,int_length) if(missing(return.plot)) return.plot=TRUE if(missing(return.traplot)) return.traplot=TRUE if(!missing(sseed)) set.seed(sseed) if(missing(verbose)) verbose=TRUE if(missing(eta)) eta=50 if(missing(mcmc)) mcmc=1000 if(missing(brn)) brn=1000 if(missing(thin)) thin=1 em=mcmc+brn ef=mcmc/thin if(missing(tau.in)) tau.in=1 if(missing(sig.in)) sig.in=1 if(missing(xi0.in)) xi0.in=0 if(missing(xi.in)) xi.in=rep(1,N+1) if(missing(nu.in)) nu.in=0.75 if(missing(l.in)) l.in=l_est(nu.in,c(0,1),0.05) tau=tau.in sig=sig.in xi0=xi0.in xi_in=xi.in nu_in=nu.in l_in=l.in xi_sam=matrix(0,N+1,ef) xi0_sam=rep(0,ef) tau_sam=rep(0,ef) sig_sam=rep(0,ef) nu_sam=rep(0,ef) ell_sam=rep(0,ef) fhat_sam=matrix(0,n,ef) if(verbose) print("MCMC sample draws:") cnt=0 ptm=proc.time(); for(i in 1:em){ # sampling from \nu and \ell MH.out = nu.MH2(nu_in,l_in,tau,xi_in,my_knots,seed = i) nu_out = MH.out$nu l_out = MH.out$l cnt = cnt + MH.out$count L_inv = MH.out$L_inv # sampling Xi: y_tilde = y - xi0 nu.ess = as.vector(samp.WC(my_knots,nu_out,l_out,tau,sseed = i)) xi_out = ESS(xi_in,nu.ess,y_tilde,X,sig,eta) xi_out = sapply(xi_out,function(z) return(max(0,z))) set.seed(2*i) # sampling xi_0: Xxi = as.vector(X %*% xi_out) y_star = y - Xxi xi0 = rnorm(1,mean(y_star),sqrt(sig/n)) # sampling \sigma^2: y0 = y_star - xi0 sig = 1/rgamma(1,shape = n/2,rate = sum(y0^2)/2) # sampling \tau^2 (with improper prior on \tau^2): tau = 1/rgamma(1, shape = (N+1)/2, rate = (sum((t(L_inv)%*%xi_out)^2))/2) # storing MCMC samples: if(i > brn && i%%thin == 0){ xi_sam[,(i-brn)/thin]=xi_out xi0_sam[(i-brn)/thin]=xi0 sig_sam[(i-brn)/thin]=sig tau_sam[(i-brn)/thin]=tau nu_sam[(i-brn)/thin]=nu_out ell_sam[(i-brn)/thin]=l_out fhat_sam[,(i-brn)/thin]=xi0 + Xxi } if(i%%100==0 && verbose){ print(i) } # renewing the initial value: xi_in = xi_out nu_in = nu_out l_in = l_out }; tm=proc.time()-ptm if(return.traplot){ library(mcmcplots) mx=min(N+1,25) xi_mat=matrix(xi_sam[1:mx,],nrow = mx, dimnames = list(c(paste("xi[",1:mx,"]",sep = "")),NULL)) traplot(t(xi_mat)) par(mfrow=c(1,1)) traplot(as.mcmc(xi0_sam),main=expression(paste(xi[0]))) traplot(as.mcmc(sig_sam),main=expression(paste(sigma^2))) traplot(as.mcmc(tau_sam),main=expression(paste(tau^2))) traplot(as.mcmc(nu_sam),main=expression(paste(nu))) traplot(as.mcmc(ell_sam),main="\u2113") } fmean=rowMeans(fhat_sam) fsort=fhat_sam for(i in 1:nrow(fhat_sam)){ fsort[i,]=sort(fhat_sam[i,]) } f_low=fsort[,ef*0.025] f_upp=fsort[,ef*0.975] if(return.plot){ par(mfrow=c(1,1)) ub=max(y,f_low,f_upp,fmean) lb=min(y,f_low,f_upp,fmean) plot(x,y,type="p",pch=20,col="green3",ylim=c(lb,ub), main="Black: Estimate, Blue: 95% CI, Green:data") lines(x,fmean,lwd=2,lty=1,col="black") lines(x,f_low,lwd=2,lty=2,col="blue") lines(x,f_upp,lwd=2,lty=2,col="blue") } return(list("time"=tm,"rej.prop"=round(cnt/em,3),"nu_sam"=nu_sam,"ell_sam"=ell_sam,"xi_sam"=xi_sam,"xi0_sam"=xi0_sam, "sig_sam"=sig_sam,"tau_sam"=tau_sam,"fhat_sam"=fhat_sam,"fmean"=fmean,"f_low"=f_low,"f_upp"=f_upp)) } ########### (Non-increasing) Constrained model with tMVN and improper prior on \tau^2 ############### mon.dec.ESS.hyp=function(y,x,N,eta,mcmc,brn,thin,nu.in,l.in,tau.in,sig.in,xi0.in,xi.in, sseed,verbose,return.plot,return.traplot){ # y:Response variable; x: vector to form design matrix \Psi (n X N+1) # N: (N+1) is the number of knots # eta:parameter of the approximation function of the indicator functions # mcmc, brn, thin : mcmc samples, burning and thinning for MCMC # nu.in, l.in, tau.in, sig.in, xi0.in, xi.in : initial values (supplied by user) # sseed : seed value to reproduce results (supplied by user) # verbose : logical; if TRUE, prints current status; default is TRUE # return.plot : logical; if true a plot of estimate with 95% CI is returned; default is TRUE # return.traplot : logical; if true traceplots are returned; default is TRUE if(length(y)!=length(x)) stop("y and x should be of same length!") n=length(y) if(missing(N)) N = ceiling(n/8) - 1 int_length=1/N my_knots=seq(0,1,by=int_length) X=des.mat1(x,my_knots,int_length) if(missing(return.plot)) return.plot=TRUE if(missing(return.traplot)) return.traplot=TRUE if(!missing(sseed)) set.seed(sseed) if(missing(verbose)) verbose=TRUE if(missing(eta)) eta=50 if(missing(mcmc)) mcmc=1000 if(missing(brn)) brn=1000 if(missing(thin)) thin=1 em=mcmc+brn ef=mcmc/thin if(missing(tau.in)) tau.in=1 if(missing(sig.in)) sig.in=1 if(missing(xi0.in)) xi0.in=0 if(missing(xi.in)) xi.in=rep(-1,N+1) if(missing(nu.in)) nu.in=0.75 if(missing(l.in)) l.in=l_est(nu.in,c(0,1),0.05) tau=tau.in sig=sig.in xi0=xi0.in xi_in=xi.in nu_in=nu.in l_in=l.in xi_sam=matrix(0,N+1,ef) xi0_sam=rep(0,ef) tau_sam=rep(0,ef) sig_sam=rep(0,ef) nu_sam=rep(0,ef) ell_sam=rep(0,ef) fhat_sam=matrix(0,n,ef) if(verbose) print("MCMC sample draws:") cnt=0 ptm=proc.time(); for(i in 1:em){ # sampling from \nu and \ell MH.out = nu.MH2(nu_in,l_in,tau,xi_in,my_knots,seed = i) nu_out = MH.out$nu l_out = MH.out$l cnt = cnt + MH.out$count L_inv = MH.out$L_inv # sampling Xi: y_tilde = y - xi0 nu.ess = as.vector(samp.WC(my_knots,nu_out,l_out,tau,sseed = i)) xi_out = ESS.dec(xi_in,nu.ess,y_tilde,X,sig,eta) xi_out = sapply(xi_out,function(z) return(min(0,z))) set.seed(2*i) # sampling xi_0: Xxi = as.vector(X %*% xi_out) y_star = y - Xxi xi0 = rnorm(1,mean(y_star),sqrt(sig/n)) # sampling \sigma^2: y0 = y_star - xi0 sig = 1/rgamma(1,shape = n/2,rate = sum(y0^2)/2) # sampling \tau^2: tau = 1/rgamma(1, shape = (N+1)/2, rate = (sum((t(L_inv)%*%xi_out)^2))/2) # storing MCMC samples: if(i > brn && i%%thin == 0){ xi_sam[,(i-brn)/thin]=xi_out xi0_sam[(i-brn)/thin]=xi0 sig_sam[(i-brn)/thin]=sig tau_sam[(i-brn)/thin]=tau nu_sam[(i-brn)/thin]=nu_out ell_sam[(i-brn)/thin]=l_out fhat_sam[,(i-brn)/thin]=xi0 + Xxi } if(i%%100==0 && verbose){ print(i) } # renewing the intial value: xi_in = xi_out nu_in = nu_out l_in = l_out }; tm=proc.time()-ptm if(return.traplot){ library(mcmcplots) mx=min(N+1,25) xi_mat=matrix(xi_sam[1:mx,],nrow = mx, dimnames = list(c(paste("xi[",1:mx,"]",sep = "")),NULL)) traplot(t(xi_mat)) par(mfrow=c(1,1)) traplot(as.mcmc(xi0_sam),main=expression(paste(xi[0]))) traplot(as.mcmc(sig_sam),main=expression(paste(sigma^2))) traplot(as.mcmc(tau_sam),main=expression(paste(tau^2))) traplot(as.mcmc(nu_sam),main=expression(paste(nu))) traplot(as.mcmc(ell_sam),main="\u2113") } fmean=rowMeans(fhat_sam) fsort=fhat_sam for(i in 1:nrow(fhat_sam)){ fsort[i,]=sort(fhat_sam[i,]) } f_low=fsort[,ef*0.025] f_upp=fsort[,ef*0.975] ub=max(f_low,f_upp,fmean) lb=min(f_low,f_upp,fmean) if(return.plot){ par(mfrow=c(1,1)) ub=max(y,f_low,f_upp,fmean) lb=min(y,f_low,f_upp,fmean) plot(x,y,type="p",pch=20,col="green3",ylim=c(lb,ub), main="Black: Estimate, Blue: 95% CI, Green:data") lines(x,fmean,lwd=2,lty=1,col="black") lines(x,f_low,lwd=2,lty=2,col="blue") lines(x,f_upp,lwd=2,lty=2,col="blue") } return(list("time"=tm,"rej.prop"=round(cnt/em,3),"nu_sam"=nu_sam,"ell_sam"=ell_sam,"xi_sam"=xi_sam,"xi0_sam"=xi0_sam, "sig_sam"=sig_sam,"tau_sam"=tau_sam,"fhat_sam"=fhat_sam,"fmean"=fmean,"f_low"=f_low,"f_upp"=f_upp)) } ########### Constrained model with tMVN and half-Cauchy prior on \tau ############### glb.ESS.hyp=function(y,x,N,eta,mcmc,brn,thin,delta_tau,nu.in,l.in,tau.in,sig.in,xi0.in,xi.in,sseed, verbose,return.plot,return.traplot){ # y:Response variable; x: vector to form design matrix \Psi (n X N+1) # N: (N+1) is the number of knots # eta:parameter of the approximation function of the indicator functions # mcmc, brn, thin : mcmc samples, burning and thinning for MCMC # delta_tau: The hyperparameter associated with the half Cauchy prior on \tau^2 # Note: default is 1. Smaller value delta_tau (<1) indicates stronger global shrinkage # nu.in, l.in, tau.in, sig.in, xi0.in, xi.in : initial values (supplied by user) # sseed : seed value to reproduce results (supplied by user) # verbose : logical; if TRUE, prints current status; default is TRUE # return.plot : logical; if true a plot of estimate with 95% CI is returned; default is TRUE # return.traplot : logical; if true traceplots are returned; default is TRUE if(length(y)!=length(x)) stop("y and x should be of same length!") n=length(y) if(missing(N)) N = ceiling(n/8) - 1 int_length=1/N my_knots=seq(0,1,by=int_length) X=des.mat1(x,my_knots,int_length) if(missing(return.plot)) return.plot=TRUE if(missing(return.traplot)) return.traplot=TRUE if(!missing(sseed)) set.seed(sseed) if(missing(verbose)) verbose=TRUE if(missing(eta)) eta=50 if(missing(mcmc)) mcmc=1000 if(missing(brn)) brn=1000 if(missing(thin)) thin=1 em=mcmc+brn ef=mcmc/thin if(missing(tau.in)) tau.in=1 if(missing(sig.in)) sig.in=1 if(missing(xi0.in)) xi0.in=0 if(missing(xi.in)) xi.in=rep(1,N+1) if(missing(nu.in)) nu.in=0.75 if(missing(l.in)) l.in=l_est(nu.in,c(0,1),0.05) if(missing(delta_tau)){ delta = 1 }else{ delta = delta_tau } tau=tau.in sig=sig.in xi0=xi0.in xi_in=xi.in nu_in=nu.in l_in=l.in xi_sam=matrix(0,N+1,ef) xi0_sam=rep(0,ef) tau_sam=rep(0,ef) sig_sam=rep(0,ef) nu_sam=rep(0,ef) ell_sam=rep(0,ef) fhat_sam=matrix(0,n,ef) if(verbose) print("MCMC sample draws:") cnt=0 ptm=proc.time(); for(i in 1:em){ # sampling from \nu and \ell MH.out = nu.MH2(nu_in,l_in,tau,xi_in,my_knots,seed = i) nu_out = MH.out$nu l_out = MH.out$l cnt = cnt + MH.out$count L_inv = MH.out$L_inv # sampling Xi: y_tilde = y - xi0 nu.ess = as.vector(samp.WC(my_knots,nu_out,l_out,tau,sseed = i)) xi_out = ESS(xi_in,nu.ess,y_tilde,X,sig,eta) xi_out = sapply(xi_out,function(z) return(max(0,z))) set.seed(2*i) # sampling xi_0: Xxi = as.vector(X %*% xi_out) y_star = y - Xxi xi0 = rnorm(1,mean(y_star),sqrt(sig/n)) # sampling \sigma^2: y0 = y_star - xi0 sig = 1/rgamma(1,shape = n/2,rate = sum(y0^2)/2) # sampling \tau^2 (with improper prior on \tau^2): tempt = (sum((t(L_inv)%*%xi_out)^2))/2 et = delta/tau utau = runif(1,0,1/(1+et)) ubt = min(1,(1-utau)/utau) Fubt = pgamma(ubt,((N+1)+1)/2,scale=1/tempt) Fubt = max(Fubt,1e-10) # for numerical stability ut = runif(1,0,Fubt) et = qgamma(ut,((N+1)+1)/2,scale=1/tempt) tau = delta/et # storing MCMC samples: if(i > brn && i%%thin == 0){ xi_sam[,(i-brn)/thin]=xi_out xi0_sam[(i-brn)/thin]=xi0 sig_sam[(i-brn)/thin]=sig tau_sam[(i-brn)/thin]=tau nu_sam[(i-brn)/thin]=nu_out ell_sam[(i-brn)/thin]=l_out fhat_sam[,(i-brn)/thin]=xi0 + Xxi } if(i%%100==0 && verbose){ print(i) } # renewing the initial value: xi_in = xi_out nu_in = nu_out l_in = l_out }; tm=proc.time()-ptm if(return.traplot){ library(mcmcplots) mx=min(N+1,25) xi_mat=matrix(xi_sam[1:mx,],nrow = mx, dimnames = list(c(paste("xi[",1:mx,"]",sep = "")),NULL)) traplot(t(xi_mat)) par(mfrow=c(1,1)) traplot(as.mcmc(xi0_sam),main=expression(paste(xi[0]))) traplot(as.mcmc(sig_sam),main=expression(paste(sigma^2))) traplot(as.mcmc(tau_sam),main=expression(paste(tau^2))) traplot(as.mcmc(nu_sam),main=expression(paste(nu))) traplot(as.mcmc(ell_sam),main="\u2113") } fmean=rowMeans(fhat_sam) fsort=fhat_sam for(i in 1:nrow(fhat_sam)){ fsort[i,]=sort(fhat_sam[i,]) } f_low=fsort[,ef*0.025] f_upp=fsort[,ef*0.975] if(return.plot){ par(mfrow=c(1,1)) ub=max(y,f_low,f_upp,fmean) lb=min(y,f_low,f_upp,fmean) plot(x,y,type="p",pch=20,col="green3",ylim=c(lb,ub), main="Black: Estimate, Blue: 95% CI, Green:data") lines(x,fmean,lwd=2,lty=1,col="black") lines(x,f_low,lwd=2,lty=2,col="blue") lines(x,f_upp,lwd=2,lty=2,col="blue") } return(list("time"=tm,"rej.prop"=round(cnt/em,3),"nu_sam"=nu_sam,"ell_sam"=ell_sam,"xi_sam"=xi_sam,"xi0_sam"=xi0_sam, "sig_sam"=sig_sam,"tau_sam"=tau_sam,"fhat_sam"=fhat_sam,"fmean"=fmean,"f_low"=f_low,"f_upp"=f_upp)) } ############ Constrained model with tMVN prior and no shrinkage (i.e., \tau^2=1) with hyperparameter updates ########## fix.ESS.hyp=function(y,x,N,eta,mcmc,brn,thin,nu.in,l.in,sig.in,xi0.in,xi.in, sseed,verbose,return.plot,return.traplot){ # y:Response variable; x: vector to form design matrix \Psi (n X N+1) # N: (N+1) is the number of knots # eta:parameter of the approximation function of the indicator functions # mcmc, brn, thin : mcmc samples, burning and thinning for MCMC # nu.in, l.in, sig.in, xi0.in, xi.in : initial values (supplied by user) # sseed : seed value to reproduce results (supplied by user) # verbose : logical; if TRUE, prints current status; default is TRUE # return.plot : logical; if true a plot of estimate with 95% CI is returned; default is TRUE # return.traplot : logical; if true traceplots are returned; default is TRUE if(length(y)!=length(x)) stop("y and x should be of same length!") n=length(y) if(missing(N)) N = ceiling(n/8) - 1 int_length=1/N my_knots=seq(0,1,by=int_length) X=des.mat1(x,my_knots,int_length) XTX=t(X)%*%X if(missing(return.plot)) return.plot=TRUE if(missing(return.traplot)) return.traplot=TRUE if(!missing(sseed)) set.seed(sseed) if(missing(verbose)) verbose=TRUE if(missing(eta)) eta=50 if(missing(mcmc)) mcmc=1000 if(missing(brn)) brn=1000 if(missing(thin)) thin=1 em=mcmc+brn ef=mcmc/thin if(missing(sig.in)) sig.in=1 if(missing(xi0.in)) xi0.in=0 if(missing(xi.in)) xi.in=rep(1,N+1) if(missing(nu.in)) nu.in=0.75 if(missing(l.in)) l.in=l_est(nu.in,c(0,1),0.05) tau=1 sig=sig.in xi0=xi0.in xi_in=rep(1,N+1) nu_in = nu.in l_in = l.in f=diag(N+1) g=rep(0,N+1) # Store variables xi_sam=matrix(0,N+1,ef) xi0_sam=rep(0,ef) sig_sam=rep(0,ef) nu_sam=rep(0,ef) l_sam=rep(0,ef) fhat_sam=matrix(0,n,ef) if(verbose) print("MCMC sample draws:") ptm=proc.time(); for(i in 1:em){ ## MK MH.out = nu.MH2(nu_in,l_in,tau=1,xi_in,my_knots,seed = i) nu_out = MH.out$nu l_out = MH.out$l K = covmat(my_knots,nu_out,l_out) K_inv = tinv(K) set.seed(2*i) # sampling Xi: y_tilde = y - xi0 M=XTX/sig + K_inv r=as.vector(t(X)%*%y_tilde)/sig xi_out=as.vector(rtmg(1,M,r,xi_in,f,g,burn.in = 0)) # sampling xi_0: Xxi = as.vector(X %*% xi_out) y_star = y - Xxi xi0 = rnorm(1,mean(y_star),sqrt(sig/n)) # sampling \sigma^2: y0 = y_star - xi0 sig = 1/rgamma(1,shape = n/2,rate = sum(y0^2)/2) # storing MCMC samples: if(i > brn && i%%thin == 0){ xi_sam[,(i-brn)/thin]=xi_out xi0_sam[(i-brn)/thin]=xi0 sig_sam[(i-brn)/thin]=sig nu_sam[(i-brn)/thin]=nu_out l_sam[(i-brn)/thin]=l_out fhat_sam[,(i-brn)/thin]=xi0 + Xxi } # renewing the initial value: xi_in = xi_out; nu_in = nu_out; l_in = l_out if(i%%100==0 && verbose){ print(i) } }; tm=proc.time()-ptm if(return.traplot){ library(mcmcplots) mx=min(N+1,25) xi_mat=matrix(xi_sam[1:mx,],nrow = mx, dimnames = list(c(paste("xi[",1:mx,"]",sep = "")),NULL)) traplot(t(xi_mat)) par(mfrow=c(1,1)) traplot(as.mcmc(xi0_sam),main=expression(paste(xi[0]))) traplot(as.mcmc(sig_sam),main=expression(paste(sigma^2))) } fmean=rowMeans(fhat_sam) fsort=fhat_sam for(i in 1:nrow(fhat_sam)){ fsort[i,]=sort(fhat_sam[i,]) } f_low=fsort[,ef*0.025] f_upp=fsort[,ef*0.975] if(return.plot){ par(mfrow=c(1,1)) ub=max(y,f_low,f_upp,fmean) lb=min(y,f_low,f_upp,fmean) plot(x,y,type="p",pch=20,col="green3",ylim=c(lb,ub), main="Black: Estimate, Blue: 95% CI, Green:data") lines(x,fmean,lwd=2,lty=1,col="black") lines(x,f_low,lwd=2,lty=2,col="blue") lines(x,f_upp,lwd=2,lty=2,col="blue") } return(list("time"=tm,"xi_sam"=xi_sam,"xi0_sam"=xi0_sam,"sig_sam"=sig_sam, "fhat_sam"=fhat_sam,"fmean"=fmean,"f_low"=f_low,"f_upp"=f_upp)) } ############ Constrained model with tMVN prior and no shrinkage (with \tau^2=1) and no hyperparameter updates ########## fix.ESS=function(y,x,N,eta,mcmc,brn,thin,nu.fix,l.fix,sig.in,xi0.in,xi.in, sseed,verbose,return.plot,return.traplot){ # y:Response variable; x: vector to form design matrix \Psi (n X N+1) # N: (N+1) is the number of knots # eta:parameter of the approximation function of the indicator functions # mcmc, brn, thin : mcmc samples, burning and thinning for MCMC # nu.fix, l.fix : fixed values supplied by user # tau.in, sig.in, xi0.in, xi.in : initial values (supplied by user) # sseed : seed value to reproduce results (supplied by user) # verbose : logical; if TRUE, prints current status; default is TRUE # return.plot : logical; if true a plot of estimate with 95% CI is returned; default is TRUE # return.traplot : logical; if true traceplots are returned; default is TRUE if(length(y)!=length(x)) stop("y and x should be of same length!") n=length(y) if(missing(N)) N = ceiling(n/8) - 1 int_length=1/N my_knots=seq(0,1,by=int_length) X=des.mat1(x,my_knots,int_length) XTX=t(X)%*%X if(missing(return.plot)) return.plot=TRUE if(missing(return.traplot)) return.traplot=TRUE if(!missing(sseed)) set.seed(sseed) if(missing(verbose)) verbose=TRUE if(missing(eta)) eta=50 if(missing(mcmc)) mcmc=1000 if(missing(brn)) brn=1000 if(missing(thin)) thin=1 em=mcmc+brn ef=mcmc/thin if(missing(sig.in)) sig.in=1 if(missing(xi0.in)) xi0.in=0 if(missing(xi.in)) xi.in=rep(1,N+1) if(missing(nu.fix)) nu.fix=0.75 if(missing(l.fix)) l.fix=l_est(nu.fix,c(0,1),0.05) tau=1 sig=sig.in xi0=xi0.in xi_in=xi.in nu=nu.fix l=l.fix f=diag(N+1) g=rep(0,N+1) # prior covariance matrix K = covmat(my_knots,nu,l) ## MK # prior precision: K_inv = solve(K) # Store variables xi_sam=matrix(0,N+1,ef) xi0_sam=rep(0,ef) sig_sam=rep(0,ef) fhat_sam=matrix(0,n,ef) if(verbose) print("MCMC sample draws:") ptm=proc.time(); for(i in 1:em){ set.seed(i) # sampling Xi: y_tilde = y - xi0 M=XTX/sig + K_inv/tau r=as.vector(t(X)%*%y_tilde)/sig xi_out=as.vector(rtmg(1,M,r,xi_in,f,g,burn.in = 0)) # sampling xi_0: Xxi = as.vector(X %*% xi_out) y_star = y - Xxi xi0 = rnorm(1,mean(y_star),sqrt(sig/n)) # sampling \sigma^2: y0 = y_star - xi0 sig = 1/rgamma(1,shape = n/2,rate = sum(y0^2)/2) # storing MCMC samples: if(i > brn && i%%thin == 0){ xi_sam[,(i-brn)/thin]=xi_out xi0_sam[(i-brn)/thin]=xi0 sig_sam[(i-brn)/thin]=sig fhat_sam[,(i-brn)/thin]=xi0 + Xxi } # renewing the initial value: xi_in = xi_out if(i%%100==0){ print(i) } }; tm=proc.time()-ptm if(return.traplot){ library(mcmcplots) mx=min(N+1,25) xi_mat=matrix(xi_sam[1:mx,],nrow = mx, dimnames = list(c(paste("xi[",1:mx,"]",sep = "")),NULL)) traplot(t(xi_mat)) par(mfrow=c(1,1)) traplot(as.mcmc(xi0_sam),main=expression(paste(xi[0]))) traplot(as.mcmc(sig_sam),main=expression(paste(sigma^2))) } fmean=rowMeans(fhat_sam) fsort=fhat_sam for(i in 1:nrow(fhat_sam)){ fsort[i,]=sort(fhat_sam[i,]) } f_low=fsort[,ef*0.025] f_upp=fsort[,ef*0.975] if(return.plot){ par(mfrow=c(1,1)) ub=max(y,f_low,f_upp,fmean) lb=min(y,f_low,f_upp,fmean) plot(x,y,type="p",pch=20,col="green3",ylim=c(lb,ub), main="Black: Estimate, Blue: 95% CI, Green:data") lines(x,fmean,lwd=2,lty=1,col="black") lines(x,f_low,lwd=2,lty=2,col="blue") lines(x,f_upp,lwd=2,lty=2,col="blue") } return(list("time"=tm,"xi_sam"=xi_sam,"xi0_sam"=xi0_sam, "sig_sam"=sig_sam,"fhat_sam"=fhat_sam,"fmean"=fmean,"f_low"=f_low,"f_upp"=f_upp)) } # END