#knots function default.knots=function(x,num.knots) { if (missing(num.knots)) num.knots=max(5,min(floor(length(unique(x))/4),35)) return(quantile(unique(x),seq(0,1,length=(num.knots+2))[-c(1,(num.knots+2))]))} #variance smoother newtonr=function(lambda2, eta, W2, R, tol=10^(-2), maxiter=25){ dif=1; k=0 while(dif>tol & ktol && k < maxiter){ k=k+1 temp=newtonr(lambda2,eta, W2, R) nlambda2=temp$lambda dif=abs(nlambda2-lambda2) lambda2=nlambda2 } return(list(eta=temp$eta, lambda2=lambda2))} #EM algorithm EM_f=function(beta.hat, sigma, lambda1, lambda3, lambdanu, D,tol=10^(-2), maxiter=25){ Dnu=solve(solve(D)+lambdanu*D3) dif=1; k=0 while(dif>tol & k.01 & k10^5) REML[j]=10^5 } vec=cbind(REML, (1:nlambda))[order(REML),] return(1.5^(-1/2*nlambda+vec[1,2]))} #choice of smoothing parameter for subject specific curves lambdanuf=function(nlambda=25, sigma, lambda1){ REML=rep(0,nlambda) for(j in 1: nlambda){ lambdanu=1.5^(-1/2*nlambda+j) r=Y-M%*%nbeta.hat Dnu=solve(solve(D)+lambdanu*D3) temp1=0 for(i in 1:n){ varm=diag(sigma[ID==i]) covm=(varm+Znu[ID==i,]%*%Dnu%*%t(Znu[ID==i,])) invcovm=solve(covm) REML[j]=REML[j]+log(det(covm))+t(r[ID==i])%*%invcovm%*%r[ID==i] temp1=temp1+t(M[ID==i,])%*%invcovm%*%M[ID==i,]} REML[j]=REML[j]+log(det(temp1)) if(is.na(REML[j])) REML[j]=10^5 } vec=cbind(REML, (1:nlambda))[order(REML),] return(1.5^(-1/2*nlambda+vec[1,2]))} ### Main procedure ######### #mean function knots1=default.knots(T, K1) X1=cbind(rep(1,N1), T, T^2) Z1=outer(T, knots1,"-") Z1=(Z1*(Z1>0))^2 W1=cbind(X1,Z1) D1=diag(c(rep(0,p),rep(1,K1))) #varying coefficient GX1=X1*trt GZ1=Z1*trt G=cbind(X1,Z1)*trt D12=diag(c(rep(0,p),rep(1,K1),rep(0,p),rep(1,K1))) M1=cbind(X1,GX1) M=cbind(W1,G) #random subject specific curves knotsnu=default.knots(T, Knu) Xnu=cbind(rep(1,N1), T, T^2) Znu=outer(T, knotsnu,"-") Znu=(Znu*(Znu>0))^2 Znu=cbind(Xnu,Znu) D3=diag(c(rep(0,pnu),rep(1,Knu))) #variance function knots2=default.knots(T, K2) X2=cbind(rep(1,N1), T) Z2=outer(T, knots2,"-") Z2=(Z2*(Z2>0)) W2=cbind(X2,Z2) D2=diag(c(rep(0,p2),rep(1,K2))) library(nlme) group=rep(1,N1) fit=lme(Y~-1+X1+GX1, random=list(group=pdIdent(~-1+Z1+GZ1))) beta.hat=c(fit$coef$fixed[1:p],unlist(fit$coef$random)[1:K1],fit$coef$fixed[(p+1):(2*p)],unlist(fit$coef$random)[(1+K1):(2*K1)]) lambda1=as.numeric(VarCorr(fit)[(2*K1+1),1])/as.numeric(VarCorr(fit)[K1,1]) lambdanu=1; lambda3=lambda1 D=diag(rep(1,(pnu+Knu))) sigma.hat=rep(as.numeric(VarCorr(fit)[(2*K1+1),1]), N1) temp=EM_f(beta.hat, sigma.hat, lambda1, lambda3, lambdanu, D) D=temp$D nbeta.hat=temp$nbeta.hat b.hat=temp$b.hat for(i in 1:n){R[ID==i]=Y[ID==i]-M[ID==i,]%*%beta.hat-Znu[ID==i,]%*%b.hat[i,]} fit2=lme(log(R^2)~-1+X2, random=list(group=pdIdent(~-1+Z2))) eta=c(fit2$coef$fixed, unlist(fit2$coef$random)) lambda2=as.numeric(VarCorr(fit2)[(K2+1),1])/as.numeric(VarCorr(fit2)[K2,1]) variance=eta_f(lambda2, eta, W2,R) eta1=variance$eta lambda2=variance$lambda2 sigma.hat=exp(W2%*%eta1) count=0; maxiter=5; dif=1 while(dif>.01 && count