{cat("--------------- Regression ---------------\n");T}
{
# Function: lm
# Data: Zar, Example 16.1 p. 319
# Reference: Zar, J.H. Biostatistical Analysis, 3rd edition. 1996.
#      Prentice-Hall Inc.
# Description: simple linear regression; check coefficients
tol <- 1e-6
x <- c(3, 4, 5, 6, 8, 9, 10, 11, 12, 14, 15, 16, 17)
y <- c(1.4, 1.5, 2.2, 2.4, 3.1, 3.2, 3.2, 3.9, 4.1, 4.7, 4.5, 5.2, 5.0)
lm.xy <- lm(y~x)
all(c(abs(lm.xy$coef[1] - 0.7130945) < tol,
      abs(lm.xy$coef[2] - 0.270229) < tol))
}
{
# Function(s): lm, anova.lm
# Data: Zar, Example 16.1 p. 319
# Description: simple linear regression continued from previous test; perform
#      analysis of variance testing; check SS's, df's, MS's and p-value;
#      check F value from analysis of variance testing using a
#      different tolerance
tol1 <- 1e-6
tol2 <- 0.15
lm.test <- anova(lm.xy)
all(c(abs(lm.test$"Sum of Sq" - c(19.132214, 0.524709)) < tol1,
      lm.test$Df == c(1,11),
      abs(lm.test$"Mean Sq" - c(19.132214, 0.047701)) < tol1,
      abs(lm.test$"Pr(F)"[1] - 0.00000000053) < tol1,
      abs(lm.test$"F Value"[1] - 401.1) < tol2))
}
{
# Function: lm
# Data: Longley economic data: regressor variables consist of major economic
#      indicators; regressand is total employment
# Reference(s): Longley, J.W. 1967. An Appraisal of Least Squares Programs
#      for the Electronic Computer from the Point of View of the User.
#      American Statistical Association Journal.
# Description: multiple regression with 6 regressors; check coefficients of 
#      regressors; check coefficient of constant using a different
#      tolerance
tol1 <- 1e-8
tol2 <- 1e-2
l.x <- matrix(c(83.0,88.5,88.2,89.5,96.2,98.1,99.0,100.0,101.2,104.6,108.4,
                   110.8,112.6,114.2,115.7,116.9,234289,259426,258054,284599,
                   328975,346999,365385,363112,397469,419180,442769,444546,
                   482704,502601,518173,554894,2356,2325,3682,3351,2099,1932,
                   1870,3578,2904,2822,2936,4681,3813,3931,4806,4007,1590,
                   1456,1616,1650,3099,3594,3547,3350,3048,2857,2798,2637,
                   2552,2514,2572,2827,107608,108632,109773,110929,112075,
                   113270,115094,116219,117388,118734,120445,121950,123366,
                   125368,127852,130081,1947,1948,1949,1950,1951,1952,1953,
                   1954,1955,1956,1957,1958,1959,1960,1961,1962),ncol=6)
l.y <- c(60323,61122,60171,61187,63221,63639,64989,63761,66019,67857,68169,
            66513,68655,69564,69331,70551)
lm.test <- lm(l.y ~ l.x[,1] + l.x[,2] + l.x[,3] + l.x[,4] + l.x[,5] + l.x[,6])
all(c(abs(lm.test$coef[2] - 15.06187227) < tol1,
      abs(lm.test$coef[3] + 0.03581917) < tol1,
      abs(lm.test$coef[4] + 2.02022980) < tol1,
      abs(lm.test$coef[5] + 1.03322686) < tol1,
      abs(lm.test$coef[6] + 0.05110410) < tol1,
      abs(lm.test$coef[7] - 1829.15146461) < tol1,
      abs(lm.test$coef[1] + 3482258.6330) < tol2))
}
{
# Function(s): lm, poly, poly.transform
# Data: Zar, Example 20.1 p. 449
# Reference: Zar, J.H. Biostatistical Analysis, 3rd edition. 1996.
#      Prentice-Hall Inc.
# Description: polynomial regression; check coefficients for quartic regression
tol <- 1e-3
x <- c(1.22, 1.34, 1.51, 1.66, 1.72, 1.93, 2.14, 2.39, 2.51, 2.78, 2.97,
       3.17, 3.32, 3.50, 3.53, 3.85, 3.95, 4.11, 4.18)
y <- c(40.9, 41.8, 42.4, 43.0, 43.4, 43.9, 44.3, 44.7, 45.0, 45.1, 45.4,
       46.2, 47.0, 48.6, 49.0, 49.7, 50.0, 50.8, 51.1)
lm.test <- lm(y ~ poly(x,4))
lm.coef <- poly.transform(poly(x,4),lm.test$coef)
all(c(abs(lm.coef[1] - 6.9265) < tol,
      abs(lm.coef[2] - 55.835) < tol,
      abs(lm.coef[3] + 31.487) < tol,
      abs(lm.coef[4] - 7.7625) < tol,
      abs(lm.coef[5] + 0.67507) < tol))
}
{
# Function: lm
# Data: Brennan and Carroll in Uyar and Erdem, Table 1 p. 296
#      indicators; regressand is total employment
# References: Uyar, B. and O. Erdem. 1990. Regression Procedures in SAS:
#      Problems? The American Statistician, 44(4), 296-301.
# 
#      Brennan, M.J. and T.M. Thomas. 1987. Preface to Quantitative
#      Economics and Econometrics. Cincinnati, OH: South-Western.
# Description: multiple regression with 5 regressors, no intercept; 
#      check coefficients of regressors, Root MSE, and R-square; 
#      NOTE: coefficient published in Uyar and Erdem for x.s 
#      (equation 5) has the 1 and 7 transposed
tol <- 5e-6
pu80 <- c(21.8,33.6,15.8,16.0,27.0,18.1,22.9,25.1,11.7,15.0,27.9,18.5,30.6,
          30.4,22.0,15.5,24.0,16.4,24.1,22.6,24.9,37.4,26.2,16.3,27.6,29.2,
          18.2,23.8,15.8,25.6,18.9,38.7, 9.6,17.1,31.5,15.3,26.0,34.6,28.4,
           7.8,14.7,19.1,11.4,17.8,18.0,14.7,34.4,34.4,28.6,18.6)
x.ne <- c(0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,1,1,0,1,
          0,0,0,0,0,1,1,0,0,0,0,0,1,0,0,0,0,0)
x.s <- c(1,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,
         0,0,1,0,0,0,1,0,1,1,0,0,1,0,1,0,0)
x.mw <- c(0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,
          1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0)
x.w <- c(0,1,1,0,1,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,1,0,0,
         0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,0,1)
x.rtw <- c(1,0,1,1,0,0,0,0,1,1,0,0,0,0,1,1,0,1,0,0,0,0,0,1,0,0,1,1,0,0,0,0,
           1,1,0,0,0,0,0,1,1,1,1,1,0,1,0,0,0,1)
lm.test <- lm(pu80 ~ -1 + x.ne + x.s + x.mw + x.w + x.rtw)
all(c(abs(lm.test$coefficients - 
          c(25.51818,24.52713,29.14545,26.88894,-9.98907)) < tol,
      abs(summary(lm.test)$r.squared - 0.9538) < tol,
      abs(summary(lm.test)$sigma - 5.363056) < tol))
}
{
# Functions: lm, lm.anova
# Data: Brennan and Carroll in Uyar and Erdem, Table 1 p. 296
#      indicators; regressand is total employment
# References: Uyar, B. and O. Erdem. 1990. Regression Procedures in SAS:
#      Problems? The American Statistician, 44(4), 296-301.
# 
#      Brennan, M.J. and T.M. Thomas. 1987. Preface to Quantitative
#      Economics and Econometrics. Cincinnati, OH: South-Western.
# Description: multiple regression with 5 regressors and no intercept, continued
#      from previous test; check df's, sum of squares and mean squares;
#      check F value and p-value using different tolerance;
#      NOTE: published ANOVA results are only reported as model and
#      error components
tol1 <- 1e-5
tol2 <- 1e-3
lm.anova <- anova(lm.test)
all(c(abs(sum(lm.anova$"Sum of Sq"[1:5]) - 26723.19322) < tol1,
      abs(lm.anova$"Sum of Sq"[6] - 1294.30678) < tol1,
      abs((sum(lm.anova$"Sum of Sq"[1:5])/5) - 5344.63864) < tol1,
      abs(lm.anova$"Mean Sq"[6] - 28.76237281) < tol1,
      sum(lm.anova$"Df"[1:5]) == 5,
      lm.anova$"Df"[6] == 45,
      abs(((sum(lm.anova$"Sum of Sq"[1:5])/5)/lm.anova$"Mean Sq"[6]) -
            185.821) < tol2,
      abs(1 - pf((sum(lm.anova$"Mean Sq"[1:5])/lm.anova$"Mean Sq"[6]),5,45)
            - 0.0001) < tol2))
}
{
# Function: glm
# Data: Fisher and Van Belle, Table 13.1 p. 632
# Reference: Fisher, L. and G. Van Belle. 1993. Biostatistics: A Methodology
#      for the Health Sciences
# Description: logistic regression (family=binomial); check coefficients
tol <- 1e-3
y <- c(0,0,0,0,0,1,0,1,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,1,0,0,1,
       0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,
       1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,1,0,0,0,1,0,0,0,0,0,
       0,1,0,1,1,0,0,0,0,0)
x1 <- c(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,1,0,0,0,0,1,0,0,0,
        1,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
        0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,
        0,0,0,0,0,0,0,1,0,0)
x2 <- c(0,0,0,0,0,1,0,0,0,1,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,
        0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,1,1,0,0,0,1,0,0,0,1,0,0,0,1,0,1,
        1,1,0,1,1,0,0,1,0,0,0,0,0,1,0,1,0,1,1,1,0,0,1,1,0,0,0,0,0,0,0,1,
        0,0,0,0,1,1,0,1,0,1)
x3 <- c(0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
        0,0,1,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
        1,1,1,0,0,0,0,0,1,0,1,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,1,0,0,0,0,1,
        0,0,0,1,1,1,0,0,1,1)
x4 <- c(56,80,61,26,53,87,21,69,57,76,66,48,18,46,22,33,38,27,60,31,59,29,
        60,63,80,23,71,87,70,22,17,49,50,51,37,76,60,78,60,57,28,94,43,70,
        70,26,19,80,66,55,36,28,59,50,20,74,54,68,25,27,77,54,43,27,66,47,
        37,36,76,33,40,90,45,75,70,36,57,22,33,75,22,80,85,90,71,51,67,77,
        20,52,60,29,30,20,36,54,65,47,22,69,68,49,25,44,56,42)
x5 <- c(0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,
        0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,
        1,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,
        0,0,0,0,0,0,0,0,0,0)
x4c <- trunc(x4/10)
y.df <- data.frame(y,x1,x2,x3,x4,x5,x4c)
glm.test <- glm(y ~ x1 + x3 + x4 + x5, family=binomial, data=y.df)
all(c(abs(glm.test$coefficients[1] + 8.895) < tol,
      abs(glm.test$coefficients[2] - 3.701) < tol,
      abs(glm.test$coefficients[3] - 3.186) < tol,
      abs(glm.test$coefficients[4] - 0.08983) < tol,
      abs(glm.test$coefficients[5] - 2.386) < tol))
}
{
# Functions: glm, anova.glm
# Data: Fisher and Van Belle, Table 13.1 p. 632
# Reference: Fisher, L. and G. Van Belle. 1993. Biostatistics: A Methodology
#      for the Health Sciences
# Description: logistic regression continued from previous test; test model 
#      using chi-square goodness of fit; check deviances and df's; 
#      NOTE: published results use a model based on x4c instead of
#      x4 
tol <- 1e-3
glm2.test <- glm(y ~ x1 + x3 + x4c + x5, family=binomial, data=y.df)
glm.anova <- anova(glm2.test,test="Chi")
all(c(abs(glm.anova$"Resid. Dev"[-3] - c(105.528,91.976,65.682,59.984)) < tol, 
      glm.anova$Df[-1] == c(1,1,1,1),
      abs(glm.anova$Deviance[-1] - c(13.552,15.006,11.289,5.698)) < tol))
}
{
# Function: glm
# Data: Fisher and Van Belle, Example 7.7 p. 268
# Reference: Fisher, L. and G. Van Belle. 1993. Biostatistics: A Methodology
#      for the Health Sciences
# Description: log-linear regression (family=poisson); check coefficients and
#      fitted values; NOTE: Since we are using a glm, the response 
#      variable used is counts/sum(counts) in order to get the 
#      coefficients published by Fisher and Van Belle; likewise, the 
#      glm fitted values are multiplied by the total counts (594) in 
#      order to get published fitted values 
tol1 <- 7.2e-4
tol2 <- 6.2e-3
e.test <- factor(c(rep(1,6),rep(2,6)))
e.waves <- factor(c(rep(c(1,2),6)))
e.vessels <- factor(c(rep(1,2),rep(2,2),rep(3,2),rep(1,2),rep(2,2),rep(3,2)))
e.counts <- c(30,17,64,22,147,80,118,14,46,7,38,11)
e.df <- data.frame(e.test,e.waves,e.vessels,e.counts)
old.op <- options(contrasts=c("contr.sum","contr.poly"),
	TEMPORARY=T)
glm.test <- glm(e.counts/sum(e.counts) ~ e.test + e.waves + e.vessels +
     e.test:e.waves + e.test:e.vessels, family=poisson(link=log),
     data=e.df) 
options(old.op, TEMPORARY=T)
all(c(abs(glm.test$coefficients - c(-2.885,0.321,0.637,-0.046,-0.200,-0.284,
                                    -0.680,0.078)) < tol1,
      abs((glm.test$fitted.values*594) - c(31.46,15.54,57.57,28.43,151.97,
                                          75.04,113.95,18.05,45.75,7.25,
                                          42.30,6.70)) < tol2))
}
{
# Function: glm
# Data: Baker and Nelder, Appendix D.3 p. 8
# Reference: Baker, R.J. and J.A. Nelder. 1978. The GLIM System, Release 3.
#      Generalised Linear Interactive Modelling Manual. Royal Statistical
#      Society.
# Description: log-linear regression (family=poisson); contrasts based on
#      treatments; check coefficients; check fitted values using a
#      different tolerance
tol1 <- 1e-3
tol2 <- .04
y <- c(454,408,523,364,249,322,268,283,349,312,411,266,249,347,285,290)
y.cow <- factor(c(1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4))
y.site <- factor(c(1,2,3,4,1,2,3,4,1,2,3,4,1,2,3,4))
y.A <- factor(c(1,1,2,2,1,1,2,2,2,2,1,1,2,2,1,1))
y.B <- factor(c(1,2,1,2,2,1,2,1,1,2,1,2,2,1,2,1))
y.df <- data.frame(y,y.cow,y.site,y.A,y.B)
old.op <- options(contrasts=c("contr.treatment","contr.poly"),
	TEMPORARY=T)
glm.test <- glm(y ~ y.cow + y.site + y.B + y.A, family=poisson(link=log),
     data=y.df)
options(old.op, TEMPORARY=T)
pub.fitted.values <- c(458.7,409.4,525.5,355.4,239.7,325.3,274.6,282.4,
                       351.8,314.0,401.0,271.2,250.8,340.4,285.9,294.0)
all(c(abs(glm.test$coefficients[1] - 6.128) < tol1,
      abs(glm.test$coefficients[2] + 0.4395) < tol1,
      abs(glm.test$coefficients[3] + 0.2679) < tol1,
      abs(glm.test$coefficients[4] + 0.3968) < tol1,
      abs(glm.test$coefficients[5] - 0.09579) < tol1,
      abs(glm.test$coefficients[6] - 0.1334) < tol1,
      abs(glm.test$coefficients[7] + 0.04814) < tol1,
      abs(glm.test$coefficients[8] + 0.2095) < tol1,
      abs(glm.test$coefficients[9] - 0.002638) < tol1,
      abs(glm.test$fitted.values - pub.fitted.values) < tol2))
}
{
# Function: glm, anova.glm
# Data: Baker and Nelder, Appendix D.3 p. 8
# Reference: Baker, R.J. and J.A. Nelder. 1978. The GLIM System, Release 3.
#      Generalised Linear Interactive Modelling Manual. Royal Statistical
#      Society.
# Description: log-linear regression (family=poisson) continued from previous
#      two tests; test model using chi-square goodness of fit;
#      check residual deviances and df's;
tol <- 4e-3
glm.anova <- anova(glm.test,test="Chi")
all(c(abs(glm.anova$"Resid. Dev"[-2] - c(265.3,58.84,1.413,1.404)) < tol,
      glm.anova$"Resid. Df"[-2] == c(15,9,8,7)))
}
{
# Function: glm, summary.glm
# Data: Fisher and Van Belle, Table 11.2 p. 499, Example 11.1, p. 503
# Reference: Fisher, L. and G. Van Belle. 1993. Biostatistics: A Methodology
#      for the Health Sciences
# Description: gaussian linear model; check coefficients, residual deviance
#      and df
tol <- 5e-3
y <- c(36.7,51.3,40.8,58.3,42.2,34.6,77.8,17.2,-38.4,1.0,53.7,14.3,65.0,5.6,
       4.4,1.6,6.2,12.2,29.9,76.1,11.5,19.8,64.9,47.8,35.0,1.7,51.5,20.2,-9.3,
       13.9,-19.0,-2.3,41.6,18.4,9.9)
x1 <- c(4.0,6.0,1.5,4.0,2.5,3.0,3.0,2.5,3.0,3.0,2.0,8.0,5.0,2.0,2.5,2.0,1.5,
        1.0,3.0,4.0,3.0,3.0,7.0,6.0,2.0,4.0,2.0,1.0,1.0,2.0,1.0,3.0,4.0,8.0,
        2.0)
x2 <- c(3,3,2,2,2,2,2,2,3,3,3,3,4,2,2,2,2,1,3,3,3,3,4,4,2,2,2,1,1,1,1,1,3,4,2)
glm.test <- glm(y ~ x1 + x2)
all(c(abs(glm.test$coefficients - c(-2.55,1.105,10.376)) < tol,
      abs(summary(glm.test)$deviance - 21070.09) < tol,
      summary(glm.test)$df[2] == 32))
}
{
# Function: nls
# Data: Bates and Watts, Table A1.12 p. 279 
# Reference: Bates, D.M. and D.G. Watts. 1988. Nonlinear Regression Analysis 
#      and Its Applications
# Description: utilization of nitrite as a function of light intensity over 
#      2 days; fit a Michaelis-Menten model for day 1 with 
#      incremental parameters for day 2; check parameter estimates,
#      standard errors of estimates, and correlation matrix 
#      against published results and SAS implementation
tol1 <- 4e-4
tol2 <- 4.5e-3
light <- c(rep(rep(c(2.2,5.5,9.6,17.5,27.0,46.0,94.0,170.0),
     c(3,3,3,3,3,3,3,3)),2))
day <- c(rep(c(0,1),c(24,24)))
nitrite <- c(256,685,1537,2148,2583,3376,3634,4960,3814,6986,6903,7636,
             9884,11597,10221,17319,16539,15047,19250,20282,18357,19638,
             19043,17475,549,1550,1882,1888,3372,2362,4561,4939,4356,
             7548,7471,7642,9684,8988,8385,13505,15324,15430,17842,18185,
             17331,18202,18315,15605)
nitrite.df <- data.frame(light,day,nitrite)
parameters(nitrite.df) <- list(t1=25000,p1=-3000,t2=34,p2=1)
nls.out <- nls(nitrite ~ ((t1 + (p1 * day)) * light)/
     ((t2 + (p2 * day)) + light), data=nitrite.df) 
nls.se <- summary(nls.out)$parameters[,2]
nls.cor <- summary(nls.out)$correlation
all(c(abs((nls.out$parameters[1] - 24743)/nls.out$parameters[1]) < tol1,
      abs((nls.out$parameters[2] - -2329)/nls.out$parameters[2]) < tol1,
      abs(nls.out$parameters[3:4] - c(35.27,-2.174)) < tol2,
      abs((nls.se[1] - 1241)/nls.se[1]) < tol1,
      abs((nls.se[2] - 1720)/nls.se[2]) < tol1,
      abs(nls.se[3:4] - c(4.66,6.63)) < tol2,
      abs(nls.cor[2,1] + 0.72) < tol2,
      abs(nls.cor[3,1] - 0.88) < tol2,
      abs(nls.cor[4,1] + 0.62) < tol2,
      abs(nls.cor[3,2] + 0.64) < tol2,
      abs(nls.cor[4,2] - 0.88) < tol2,
      abs(nls.cor[4,3] + 0.70) < tol2))
}
{
# Function: lme
# Data: Zerbe, plasma inorganic phosphate for 13 control patients and 20 
#      obese patients at 0,.5,1,1.5,2,3,4, and 5 hours
#      8, 10, 12, and 14
# References: Zerbe, G.O. 1979. Randomization Analysis of the Completely
#      Randomized Design Extended to Growth and Response Curves.
#      Journal of the American Statistical Association. 74(365):215-221.
# 
#      Chi, E.M. and G.C. Reinsel. 1989. Models for Longitudinal
#      Data with Random Effects and AR(1) Errors. Journal of the 
#      American Statistical Association. 84(406):452-459.  
# Description: linear mixed effects model; Chi and Reinsel model (5.1) p. 456,
#      random intercept; check loglikelihood, variance of 
#      random intercept, residual variance, and fixed effects 
#      coefficients; compare results with results from a SAS run 
#      using proc MIXED
tol1 <- 3e-4
tol2 <- 2.2e-6
Subject <- as.numeric(c(rep(1:20,8)))
Time1 <- c(rep(c(0,.5,1,1.5,2,2,2,2),c(20,20,20,20,20,20,20,20)))
Time2 <- c(rep(c(0,0,0,0,0,1,2,3),c(20,20,20,20,20,20,20,20)))
Phosph <- c(4.3,5.0,4.6,4.3,3.1,4.8,3.7,5.4,3.0,4.9,4.8,4.4,4.9,5.1,4.8,4.2,
            6.6,3.6,4.5,4.6,3.3,4.9,4.4,3.9,3.1,5.0,3.1,4.7,2.5,5.0,4.3,4.2,
            4.3,4.1,4.6,3.5,6.1,3.4,4.0,4.4,3.0,4.1,3.9,3.1,3.3,2.9,3.3,3.9,
            2.3,4.1,4.7,4.2,4.0,4.6,4.6,3.8,5.2,3.1,3.7,3.8,2.6,3.7,3.9,3.1,
            2.6,2.8,2.8,4.1,2.2,3.7,4.6,3.4,4.0,4.1,4.4,3.6,4.1,2.8,3.3,3.8,
            2.2,3.7,3.7,3.1,2.6,2.2,2.9,2.8,2.1,3.7,4.7,3.5,3.3,3.4,4.1,3.3,
            4.3,2.1,2.4,3.8,2.5,4.1,4.2,3.1,1.9,3.1,3.6,3.7,2.6,4.1,3.7,3.4,
            4.1,4.2,4.0,3.1,3.8,2.4,2.3,3.6,2.4,4.7,4.8,3.6,2.3,3.5,4.3,3.5,
            3.2,4.7,3.6,3.9,4.2,4.4,3.8,3.5,4.2,2.5,3.1,3.8,3.4,4.9,5.0,4.0,
            2.7,3.6,4.4,3.7,3.5,4.9,3.9,4.0,4.3,4.9,3.8,3.9,4.8,3.5,3.3,3.8)


phosph.df <- data.frame(Subject,Time1,Time2,Phosph)

ph.fit1 <- lme(fixed=Phosph~ Time1+Time2,random=~1 | Subject,
               method="ML", data=phosph.df)

all(c(
   abs(ph.fit1$logLik + 111.406) < tol1,
   abs(as.numeric(VarCorr(ph.fit1)[, 1]) - c(0.35507238, 0.16383033)) < tol2,
   abs(fixed.effects(ph.fit1) - c(4.49797727, -0.67995455, 0.28511364)) < tol2
))
 
}
{
# Function: lme
# Data: Zerbe, plasma inorganic phosphate for 13 control patients and 20
#      obese patients at 0,.5,1,1.5,2,3,4, and 5 hours
#      8, 10, 12, and 14
# References: Zerbe, G.O. 1979. Randomization Analysis of the Completely
#      Randomized Design Extended to Growth and Response Curves.
#      Journal of the American Statistical Association. 74(365):215-221.
# 
#      Chi, E.M. and G.C. Reinsel. 1989. Models for Longitudinal
#      Data with Random Effects and AR(1) Errors. Journal of the
#      American Statistical Association. 84(406):452-459.
# Description: linear mixed effects model using phosphate data from previous
#      test; Chi and Reinsel model (5.2) p. 457, random intercept and
#      AR(1) errors; check loglikelihood, variance of random intercept,
#      residual variance, serial correlation parameter, and fixed
#      effects coefficients; compare results with results from a SAS
#      run using proc MIXED
tol1 <- 1.4e-5
tol2 <- 8.2e-6

ph.fit2 <- lme(fixed = Phosph ~ Time1 + Time2, random =  ~ 1 | Subject, method
         = "ML", cor = corAR1(), data = phosph.df)

all(c(
   abs(ph.fit2$logLik + 87.6371) < tol1, 
   abs(as.numeric(VarCorr(ph.fit2)[, 1]) - c(0.26449096, 0.25107215)) < tol2,
   abs(coef(ph.fit2$modelStruct$corStruct, FALSE) - 0.66827302) < tol1,
   abs(fixed.effects(ph.fit2) - c(4.50616342, -0.67420943, 0.27873604)) < tol2
   ))

}
{
# Function: lme
# Data: Zerbe, plasma inorganic phosphate for 13 control patients and 20
#      obese patients at 0,.5,1,1.5,2,3,4, and 5 hours
#      8, 10, 12, and 14
# References: Zerbe, G.O. 1979. Randomization Analysis of the Completely
#      Randomized Design Extended to Growth and Response Curves.
#      Journal of the American Statistical Association. 74(365):215-221.
# 
#      Chi, E.M. and G.C. Reinsel. 1989. Models for Longitudinal
#      Data with Random Effects and AR(1) Errors. Journal of the
#      American Statistical Association. 84(406):452-459.
# Description: linear mixed effects model using phosphate data from previous
#      2 tests; Chi and Reinsel model (5.3) p. 457, random intercept,
#      Time1 and Time2; check loglikelihood, variance of random
#      intercept, residual variance, and fixed effects coefficients;
#      compare results with results from a SAS run using proc MIXED
tol1 <- 3.1e-5
tol2 <- 5e-7

ph.fit3 <- lme(fixed = Phosph ~ Time1 + Time2, random =  ~ Time1 + Time2 | 
        Subject, method = "ML", data = phosph.df)

VC.fit3 <- VarCorr(ph.fit3, rdig=6)  # Calculate Variances and Correlations
Corrs <- as.numeric(c(VC.fit3[2:3,3],VC.fit3[3,4])) # Correlations
StDvs <- as.numeric (VC.fit3[,2])   # Std Deviations
Cov.fit3 <- Corrs * c(prod(StDvs[1:2]),prod(StDvs[c(1,3)]),prod(StDvs[2:3]))

all(c(
   abs(ph.fit3$logLik + 87.2143) < tol1,
   # Variances
   abs(as.numeric(VC.fit3[1:3,1])-c(0.58360416,0.09153937,0.03838265)) < tol2, 
   # Covariances
   abs(Cov.fit3 - c(-.12378559,-0.02389955,-0.01724908)) < tol2,      
   abs(ph.fit3$sigma^2  - 0.07300864) < tol2,
   abs(fixed.effects(ph.fit3) - c(4.49797727,-0.67995455,0.28511364)) < tol2
))

}
{
# Function: lme
# Data: Zerbe, plasma inorganic phosphate for 13 control patients and 20
#      obese patients at 0,.5,1,1.5,2,3,4, and 5 hours
#      8, 10, 12, and 14
# References: Zerbe, G.O. 1979. Randomization Analysis of the Completely
#      Randomized Design Extended to Growth and Response Curves.
#      Journal of the American Statistical Association. 74(365):215-221.
# 
#      Chi, E.M. and G.C. Reinsel. 1989. Models for Longitudinal
#      Data with Random Effects and AR(1) Errors. Journal of the
#      American Statistical Association. 84(406):452-459.
# Description: linear mixed effects model using phosphate data from previous
#      3 tests; Chi and Reinsel model (5.4) p. 457, random intercept,
#      Time1 and Time2 with AR(1) errors; check loglikelihood, variance
#      of random intercept, residual variance, serial correlation
#      parameter, and fixed effects coefficients; compare results
#      with results from a SAS run using proc MIXED
tol1 <- 1.5e-4
tol2 <- 7.9e-5

ph.fit4 <- lme(fixed=Phosph~ Time1+Time2,random=~Time1+Time2 | Subject,
      method ="ML", cor = corAR1(), data = phosph.df)

VC.fit4 <- VarCorr(ph.fit4, rdig=6)  # Calculate Variances and Correlations
Corrs <- as.numeric(c(VC.fit4[2:3,3],VC.fit4[3,4])) # Correlations
StDvs <- as.numeric (VC.fit4[,2])   # Std Deviations
Cov.fit4 <- Corrs * c(prod(StDvs[1:2]),prod(StDvs[c(1,3)]),prod(StDvs[2:3]))
 
all(c(
   abs(ph.fit4$logLik + 86.2251) < tol1,
   # Variances
   abs(as.numeric(VC.fit4[1:3,1])-c(0.53820947,0.06857939,0.02677853)) < tol2,
   # Covariances
   abs(Cov.fit4 - c(-0.09739809,-0.02546095,-0.01133221)) < tol2,
   abs(ph.fit4$sigma^2 - 0.10179602) < tol2,
   abs(coef(ph.fit4$modelStruct$corStruct, FALSE) - 0.29314985) < tol1,
   abs(fixed.effects(ph.fit4) - c(4.50130332,-0.68002515,0.28424439)) < tol2
   ))

}
{
# Clean up after regression
rm(tol, tol1, x, y, lm.xy, lm.test, l.x, l.y, lm.coef)
rm(day, light, nitrite, nitrite.df, nls.cor, nls.out, nls.se)
rm(tol2,pu80,x.ne,x.s,x.mw,x.w,x.rtw,lm.anova,x1,x2,x3,x4,x5)
rm(x4c, y.df, glm.test, glm2.test, glm.anova, e.test, e.waves, e.vessels)
rm(e.counts, e.df, old.op, y.cow, y.site, y.A, y.B, pub.fitted.values)
rm(Subject,Time1,Time2,Phosph,phosph.df,ph.fit1,ph.fit2,ph.fit3,ph.fit4)
rm(VC.fit3, VC.fit4, Corrs, StDvs, Cov.fit3, Cov.fit4)
T
}
