Rich Jones, Doug Tommet
July 9, 2022
These slide recreate the examples using:
These are the packages used by R for these examples
Characteristic | N = 2,4421 |
---|---|
sad | |
0 | 1,606 (66%) |
1 | 656 (27%) |
2 | 87 (3.6%) |
3 | 93 (3.8%) |
blues | |
0 | 1,906 (78%) |
1 | 355 (15%) |
2 | 75 (3.1%) |
3 | 106 (4.3%) |
depress | |
0 | 1,573 (64%) |
1 | 635 (26%) |
2 | 92 (3.8%) |
3 | 142 (5.8%) |
happy | |
0 | 1,638 (67%) |
1 | 247 (10%) |
2 | 380 (16%) |
3 | 177 (7.2%) |
enjoy | |
0 | 1,855 (76%) |
1 | 184 (7.5%) |
2 | 258 (11%) |
3 | 145 (5.9%) |
hopeful | |
0 | 1,409 (58%) |
1 | 169 (6.9%) |
2 | 386 (16%) |
3 | 478 (20%) |
age | |
67 | 696 (29%) |
72 | 702 (29%) |
77 | 466 (19%) |
82 | 373 (15%) |
87 | 205 (8.4%) |
male | 1,036 (42%) |
age_high | 1,044 (43%) |
1
n (%)
|
## lavaan 0.6-12 ended normally after 26 iterations
##
## Estimator ML
## Optimization method NLMINB
## Number of model parameters 12
##
## Number of observations 2442
##
## Model Test User Model:
##
## Test statistic 483.785
## Degrees of freedom 9
## P-value (Chi-square) 0.000
##
## Model Test Baseline Model:
##
## Test statistic 4064.686
## Degrees of freedom 15
## P-value 0.000
##
## User Model versus Baseline Model:
##
## Comparative Fit Index (CFI) 0.883
## Tucker-Lewis Index (TLI) 0.805
##
## Loglikelihood and Information Criteria:
##
## Loglikelihood user model (H0) -17278.438
## Loglikelihood unrestricted model (H1) -17036.545
##
## Akaike (AIC) 34580.876
## Bayesian (BIC) 34650.483
## Sample-size adjusted Bayesian (BIC) 34612.356
##
## Root Mean Square Error of Approximation:
##
## RMSEA 0.147
## 90 Percent confidence interval - lower 0.136
## 90 Percent confidence interval - upper 0.158
## P-value RMSEA <= 0.05 0.000
##
## Standardized Root Mean Square Residual:
##
## SRMR 0.065
##
## Parameter Estimates:
##
## Standard errors Standard
## Information Expected
## Information saturated (h1) model Structured
##
## Latent Variables:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## f =~
## sad 1.000 0.510 0.689
## blues 0.963 0.035 27.318 0.000 0.491 0.665
## depress 1.193 0.040 29.522 0.000 0.608 0.741
## happy 1.230 0.047 26.269 0.000 0.627 0.634
## enjoy 1.075 0.042 25.308 0.000 0.548 0.606
## hopeful 0.883 0.056 15.913 0.000 0.450 0.366
##
## Variances:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## .sad 0.288 0.010 27.415 0.000 0.288 0.526
## .blues 0.304 0.011 28.353 0.000 0.304 0.558
## .depress 0.304 0.012 24.797 0.000 0.304 0.451
## .happy 0.586 0.020 29.385 0.000 0.586 0.598
## .enjoy 0.516 0.017 30.144 0.000 0.516 0.632
## .hopeful 1.315 0.039 33.688 0.000 1.315 0.866
## f 0.260 0.015 17.510 0.000 1.000 1.000
ex0101mod.mplus <- MplusAutomation::mplusObject(
MODEL = "f by sad-hopeful; ",
OUTPUT = "STDYX; tech1",
usevariables = c("sad", "blues", "depress", "happy", "enjoy", "hopeful"),
rdata = epese
)
ex0101fit.mplus <- MplusAutomation::mplusModeler(ex0101mod.mplus,
dataout = "ex0101.dat",
modelout = "ex0101.inp",
run = TRUE)
##
## Running model: ex0101.inp
## System command: cd "/Users/douglastommet/Dropbox/work/ShortCourse/POSTED/SLIDES/LDA/lavaan" && "/Applications/Mplus/mplus" "ex0101.inp"
## Reading model: ex0101.out
## [1] TRUE
## Reading model: ex0101.out
## Estimated using ML
## Number of obs: 2442, number of (free) parameters: 18
##
## Model: Chi2(df = 9) = 483.785, p = 0
## Baseline model: Chi2(df = 15) = 4064.686, p = 0
##
## Fit Indices:
##
## CFI = 0.883, TLI = 0.805, SRMR = 0.057
## RMSEA = 0.147, 90% CI [0.136, 0.158], p < .05 = 0
## AIC = 34592.876, BIC = 34697.286
## NULL
## Reading model: ex0101.out
## paramHeader param est se est_se pval
## 1 F.BY SAD 1.000 0.000 999.000 999
## 2 F.BY BLUES 0.963 0.035 27.751 0
## 3 F.BY DEPRESS 1.193 0.040 30.075 0
## 4 F.BY HAPPY 1.230 0.049 24.975 0
## 5 F.BY ENJOY 1.075 0.045 23.950 0
## 6 F.BY HOPEFUL 0.883 0.057 15.489 0
## 7 Intercepts SAD 0.454 0.015 30.325 0
## 8 Intercepts BLUES 0.337 0.015 22.553 0
## 9 Intercepts DEPRESS 0.510 0.017 30.685 0
## 10 Intercepts HAPPY 0.630 0.020 31.451 0
## 11 Intercepts ENJOY 0.465 0.018 25.421 0
## 12 Intercepts HOPEFUL 0.973 0.025 39.018 0
## 13 Variances F 0.260 0.015 17.461 0
## 14 Residual.Variances SAD 0.288 0.011 27.263 0
## 15 Residual.Variances BLUES 0.304 0.011 27.677 0
## 16 Residual.Variances DEPRESS 0.304 0.013 23.883 0
## 17 Residual.Variances HAPPY 0.586 0.021 27.902 0
## 18 Residual.Variances ENJOY 0.516 0.018 28.700 0
## 19 Residual.Variances HOPEFUL 1.315 0.039 33.536 0
## Reading model: ex0101.out
## $parameterSpecification
## $nu
## SAD BLUES DEPRESS HAPPY ENJOY HOPEFUL
## 1 1 2 3 4 5 6
##
## $lambda
## F
## SAD 0
## BLUES 7
## DEPRESS 8
## HAPPY 9
## ENJOY 10
## HOPEFUL 11
##
## $theta
## SAD BLUES DEPRESS HAPPY ENJOY HOPEFUL
## SAD 12 NA NA NA NA NA
## BLUES 0 13 NA NA NA NA
## DEPRESS 0 0 14 NA NA NA
## HAPPY 0 0 0 15 NA NA
## ENJOY 0 0 0 0 16 NA
## HOPEFUL 0 0 0 0 0 17
##
## $alpha
## F
## 1 0
##
## $beta
## F
## F 0
##
## $psi
## F
## F 18
##
## attr(,"class")
## [1] "mplus.parameterSpecification" "list"
##
## $startingValues
## $nu
## SAD BLUES DEPRESS HAPPY ENJOY HOPEFUL
## 1 0.454 0.337 0.51 0.63 0.465 0.973
##
## $lambda
## F
## SAD 1.000
## BLUES 0.991
## DEPRESS 1.218
## HAPPY 1.176
## ENJOY 1.031
## HOPEFUL 0.847
##
## $theta
## SAD BLUES DEPRESS HAPPY ENJOY HOPEFUL
## SAD 0.274 NA NA NA NA NA
## BLUES 0.000 0.273 NA NA NA NA
## DEPRESS 0.000 0.000 0.337 NA NA NA
## HAPPY 0.000 0.000 0.000 0.49 NA NA
## ENJOY 0.000 0.000 0.000 0.00 0.408 NA
## HOPEFUL 0.000 0.000 0.000 0.00 0.000 0.759
##
## $alpha
## F
## 1 0
##
## $beta
## F
## F 0
##
## $psi
## F
## F 0.05
##
## attr(,"class")
## [1] "mplus.startingValues" "list"
##
## attr(,"class")
## [1] "mplus.tech1" "list"
## lavaan 0.6-12 ended normally after 43 iterations
##
## Estimator ML
## Optimization method NLMINB
## Number of model parameters 20
##
## Number of observations 2442
##
## Model Test User Model:
##
## Test statistic 496.985
## Degrees of freedom 19
## P-value (Chi-square) 0.000
##
## Model Test Baseline Model:
##
## Test statistic 4124.409
## Degrees of freedom 27
## P-value 0.000
##
## User Model versus Baseline Model:
##
## Comparative Fit Index (CFI) 0.883
## Tucker-Lewis Index (TLI) 0.834
##
## Loglikelihood and Information Criteria:
##
## Loglikelihood user model (H0) -17255.177
## Loglikelihood unrestricted model (H1) -17006.684
##
## Akaike (AIC) 34550.354
## Bayesian (BIC) 34666.365
## Sample-size adjusted Bayesian (BIC) 34602.820
##
## Root Mean Square Error of Approximation:
##
## RMSEA 0.101
## 90 Percent confidence interval - lower 0.094
## 90 Percent confidence interval - upper 0.109
## P-value RMSEA <= 0.05 0.000
##
## Standardized Root Mean Square Residual:
##
## SRMR 0.045
##
## Parameter Estimates:
##
## Standard errors Standard
## Information Expected
## Information saturated (h1) model Structured
##
## Latent Variables:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## f =~
## sad 1.000 0.510 0.689
## blues 0.963 0.035 27.356 0.000 0.491 0.665
## depress 1.190 0.040 29.547 0.000 0.607 0.739
## happy 1.231 0.047 26.325 0.000 0.628 0.634
## enjoy 1.075 0.042 25.359 0.000 0.548 0.607
## hopeful 0.884 0.055 15.942 0.000 0.451 0.366
##
## Regressions:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## f ~
## age 0.006 0.002 3.233 0.001 0.011 0.072
## male -0.134 0.023 -5.784 0.000 -0.263 -0.130
##
## Intercepts:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## .sad 0.082 0.134 0.608 0.543 0.082 0.111
## .blues -0.021 0.130 -0.166 0.868 -0.021 -0.029
## .depress 0.067 0.160 0.417 0.677 0.067 0.081
## .happy 0.172 0.166 1.035 0.301 0.172 0.173
## .enjoy 0.064 0.145 0.445 0.656 0.064 0.071
## .hopeful 0.643 0.122 5.279 0.000 0.643 0.522
## .f 0.000 0.000 0.000
##
## Variances:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## .sad 0.288 0.010 27.441 0.000 0.288 0.525
## .blues 0.304 0.011 28.390 0.000 0.304 0.558
## .depress 0.306 0.012 24.928 0.000 0.306 0.453
## .happy 0.585 0.020 29.396 0.000 0.585 0.598
## .enjoy 0.516 0.017 30.155 0.000 0.516 0.632
## .hopeful 1.314 0.039 33.691 0.000 1.314 0.866
## .f 0.254 0.015 17.503 0.000 0.977 0.977
ex0102mod.mplus <- MplusAutomation::mplusObject(
MODEL = "f by sad-hopeful;
f on age male ;",
OUTPUT = "STDYX; tech1;",
usevariables = c("sad", "blues", "depress", "happy",
"enjoy", "hopeful", "age", "male"),
rdata = epese
)
ex0102fit.mplus <- MplusAutomation::mplusModeler(ex0102mod.mplus,
dataout = "ex0102.dat",
modelout = "ex0102.inp",
run = TRUE)
##
## Running model: ex0102.inp
## System command: cd "/Users/douglastommet/Dropbox/work/ShortCourse/POSTED/SLIDES/LDA/lavaan" && "/Applications/Mplus/mplus" "ex0102.inp"
## Reading model: ex0102.out
## [1] TRUE
## Reading model: ex0102.out
## Estimated using ML
## Number of obs: 2442, number of (free) parameters: 20
##
## Model: Chi2(df = 19) = 496.985, p = 0
## Baseline model: Chi2(df = 27) = 4124.409, p = 0
##
## Fit Indices:
##
## CFI = 0.883, TLI = 0.834, SRMR = 0.048
## RMSEA = 0.101, 90% CI [0.094, 0.109], p < .05 = 0
## AIC = 34550.354, BIC = 34666.365
## NULL
## Reading model: ex0102.out
## paramHeader param est se est_se pval
## 1 F.BY SAD 1.000 0.000 999.000 999.000
## 2 F.BY BLUES 0.963 0.035 27.784 0.000
## 3 F.BY DEPRESS 1.190 0.040 30.104 0.000
## 4 F.BY HAPPY 1.231 0.049 25.035 0.000
## 5 F.BY ENJOY 1.075 0.045 24.007 0.000
## 6 F.BY HOPEFUL 0.884 0.057 15.517 0.000
## 7 F.ON AGE 0.006 0.002 3.234 0.001
## 8 F.ON MALE -0.134 0.023 -5.778 0.000
## 9 Intercepts SAD 0.082 0.134 0.608 0.543
## 10 Intercepts BLUES -0.021 0.130 -0.165 0.869
## 11 Intercepts DEPRESS 0.067 0.160 0.417 0.677
## 12 Intercepts HAPPY 0.172 0.166 1.034 0.301
## 13 Intercepts ENJOY 0.065 0.145 0.445 0.657
## 14 Intercepts HOPEFUL 0.643 0.122 5.265 0.000
## 15 Residual.Variances SAD 0.288 0.011 27.278 0.000
## 16 Residual.Variances BLUES 0.304 0.011 27.713 0.000
## 17 Residual.Variances DEPRESS 0.306 0.013 24.034 0.000
## 18 Residual.Variances HAPPY 0.585 0.021 27.942 0.000
## 19 Residual.Variances ENJOY 0.516 0.018 28.740 0.000
## 20 Residual.Variances HOPEFUL 1.314 0.039 33.541 0.000
## 21 Residual.Variances F 0.254 0.015 17.456 0.000
## Reading model: ex0102.out
## $parameterSpecification
## $nu
## SAD BLUES DEPRESS HAPPY ENJOY HOPEFUL AGE MALE
## 1 1 2 3 4 5 6 0 0
##
## $lambda
## F AGE MALE
## SAD 0 0 0
## BLUES 7 0 0
## DEPRESS 8 0 0
## HAPPY 9 0 0
## ENJOY 10 0 0
## HOPEFUL 11 0 0
## AGE 0 0 0
## MALE 0 0 0
##
## $theta
## SAD BLUES DEPRESS HAPPY ENJOY HOPEFUL AGE MALE
## SAD 12 NA NA NA NA NA NA NA
## BLUES 0 13 NA NA NA NA NA NA
## DEPRESS 0 0 14 NA NA NA NA NA
## HAPPY 0 0 0 15 NA NA NA NA
## ENJOY 0 0 0 0 16 NA NA NA
## HOPEFUL 0 0 0 0 0 17 NA NA
## AGE 0 0 0 0 0 0 0 NA
## MALE 0 0 0 0 0 0 0 0
##
## $alpha
## F AGE MALE
## 1 0 0 0
##
## $beta
## F AGE MALE
## F 0 18 19
## AGE 0 0 0
## MALE 0 0 0
##
## $psi
## F AGE MALE
## F 20 NA NA
## AGE 0 0 NA
## MALE 0 0 0
##
## attr(,"class")
## [1] "mplus.parameterSpecification" "list"
##
## $startingValues
## $nu
## SAD BLUES DEPRESS HAPPY ENJOY HOPEFUL AGE MALE
## 1 0.454 0.337 0.51 0.63 0.465 0.973 0 0
##
## $lambda
## F AGE MALE
## SAD 1.000 0 0
## BLUES 0.991 0 0
## DEPRESS 1.218 0 0
## HAPPY 1.176 0 0
## ENJOY 1.031 0 0
## HOPEFUL 0.847 0 0
## AGE 0.000 1 0
## MALE 0.000 0 1
##
## $theta
## SAD BLUES DEPRESS HAPPY ENJOY HOPEFUL AGE MALE
## SAD 0.274 NA NA NA NA NA NA NA
## BLUES 0.000 0.273 NA NA NA NA NA NA
## DEPRESS 0.000 0.000 0.337 NA NA NA NA NA
## HAPPY 0.000 0.000 0.000 0.49 NA NA NA NA
## ENJOY 0.000 0.000 0.000 0.00 0.408 NA NA NA
## HOPEFUL 0.000 0.000 0.000 0.00 0.000 0.759 NA NA
## AGE 0.000 0.000 0.000 0.00 0.000 0.000 0 NA
## MALE 0.000 0.000 0.000 0.00 0.000 0.000 0 0
##
## $alpha
## F AGE MALE
## 1 0 74.316 0.424
##
## $beta
## F AGE MALE
## F 0 0 0
## AGE 0 0 0
## MALE 0 0 0
##
## $psi
## F AGE MALE
## F 0.05 NA NA
## AGE 0.00 40.696 NA
## MALE 0.00 -0.190 0.244
##
## attr(,"class")
## [1] "mplus.startingValues" "list"
##
## attr(,"class")
## [1] "mplus.tech1" "list"
ex0103mod <- 'f =~ sad + blues + depress + happy + enjoy + hopeful
f ~ age + male
sad ~ 0 * age + 0 * male
blues ~ 0 * age + 0 * male
depress ~ 0 * age + 0 * male
happy ~ 0 * age + 0 * male
enjoy ~ 0 * age + 0 * male
hopeful ~ age + 0 * male
'
ex0103fit <- lavaan::sem(ex0103mod,data=epese, estimator = "ML", meanstructure = TRUE)
## lavaan 0.6-12 ended normally after 53 iterations
##
## Estimator ML
## Optimization method NLMINB
## Number of model parameters 21
##
## Number of observations 2442
##
## Model Test User Model:
##
## Test statistic 490.382
## Degrees of freedom 18
## P-value (Chi-square) 0.000
##
## Model Test Baseline Model:
##
## Test statistic 4124.409
## Degrees of freedom 27
## P-value 0.000
##
## User Model versus Baseline Model:
##
## Comparative Fit Index (CFI) 0.885
## Tucker-Lewis Index (TLI) 0.827
##
## Loglikelihood and Information Criteria:
##
## Loglikelihood user model (H0) -17251.875
## Loglikelihood unrestricted model (H1) -17006.684
##
## Akaike (AIC) 34545.751
## Bayesian (BIC) 34667.563
## Sample-size adjusted Bayesian (BIC) 34600.841
##
## Root Mean Square Error of Approximation:
##
## RMSEA 0.104
## 90 Percent confidence interval - lower 0.096
## 90 Percent confidence interval - upper 0.112
## P-value RMSEA <= 0.05 0.000
##
## Standardized Root Mean Square Residual:
##
## SRMR 0.045
##
## Parameter Estimates:
##
## Standard errors Standard
## Information Expected
## Information saturated (h1) model Structured
##
## Latent Variables:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## f =~
## sad 1.000 0.510 0.689
## blues 0.963 0.035 27.367 0.000 0.491 0.665
## depress 1.191 0.040 29.563 0.000 0.607 0.740
## happy 1.230 0.047 26.324 0.000 0.627 0.634
## enjoy 1.074 0.042 25.351 0.000 0.548 0.606
## hopeful 0.872 0.055 15.733 0.000 0.445 0.361
##
## Regressions:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## f ~
## age 0.005 0.002 3.007 0.003 0.011 0.067
## male -0.134 0.023 -5.785 0.000 -0.263 -0.130
## sad ~
## age 0.000 0.000 0.000
## male 0.000 0.000 0.000
## blues ~
## age 0.000 0.000 0.000
## male 0.000 0.000 0.000
## depress ~
## age 0.000 0.000 0.000
## male 0.000 0.000 0.000
## happy ~
## age 0.000 0.000 0.000
## male 0.000 0.000 0.000
## enjoy ~
## age 0.000 0.000 0.000
## male 0.000 0.000 0.000
## hopeful ~
## age 0.010 0.004 2.572 0.010 0.010 0.049
## male 0.000 0.000 0.000
##
## Intercepts:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## .sad 0.110 0.135 0.818 0.413 0.110 0.149
## .blues 0.006 0.130 0.047 0.963 0.006 0.008
## .depress 0.101 0.160 0.627 0.531 0.101 0.122
## .happy 0.207 0.166 1.245 0.213 0.207 0.209
## .enjoy 0.096 0.145 0.658 0.511 0.096 0.106
## .hopeful -0.037 0.291 -0.126 0.900 -0.037 -0.030
## .f 0.000 0.000 0.000
##
## Variances:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## .sad 0.287 0.010 27.426 0.000 0.287 0.525
## .blues 0.304 0.011 28.381 0.000 0.304 0.558
## .depress 0.305 0.012 24.900 0.000 0.305 0.453
## .happy 0.586 0.020 29.400 0.000 0.586 0.598
## .enjoy 0.516 0.017 30.162 0.000 0.516 0.632
## .hopeful 1.312 0.039 33.719 0.000 1.312 0.864
## .f 0.254 0.015 17.511 0.000 0.977 0.977
ex0103mod.mplus <- MplusAutomation::mplusObject(
MODEL = "f by sad-hopeful;
f on age male ;
sad-hopeful ON age-male@0 ;
hopeful ON age ;",
OUTPUT = "STDYX; tech1;",
usevariables = c("sad", "blues", "depress", "happy", "enjoy",
"hopeful", "age", "male"),
rdata = epese
)
ex0103fit.mplus <- MplusAutomation::mplusModeler(ex0103mod.mplus,
dataout = "ex0103.dat",
modelout = "ex0103.inp",
run = TRUE)
##
## Running model: ex0103.inp
## System command: cd "/Users/douglastommet/Dropbox/work/ShortCourse/POSTED/SLIDES/LDA/lavaan" && "/Applications/Mplus/mplus" "ex0103.inp"
## Reading model: ex0103.out
## [1] TRUE
## Reading model: ex0103.out
## Estimated using ML
## Number of obs: 2442, number of (free) parameters: 21
##
## Model: Chi2(df = 18) = 490.382, p = 0
## Baseline model: Chi2(df = 27) = 4124.409, p = 0
##
## Fit Indices:
##
## CFI = 0.885, TLI = 0.827, SRMR = 0.048
## RMSEA = 0.104, 90% CI [0.096, 0.112], p < .05 = 0
## AIC = 34545.751, BIC = 34667.563
## NULL
## Reading model: ex0103.out
## paramHeader param est se est_se pval
## 1 F.BY SAD 1.000 0.000 999.000 999.000
## 2 F.BY BLUES 0.963 0.035 27.792 0.000
## 3 F.BY DEPRESS 1.191 0.040 30.113 0.000
## 4 F.BY HAPPY 1.230 0.049 25.034 0.000
## 5 F.BY ENJOY 1.074 0.045 24.002 0.000
## 6 F.BY HOPEFUL 0.872 0.057 15.319 0.000
## 7 F.ON AGE 0.005 0.002 3.007 0.003
## 8 F.ON MALE -0.134 0.023 -5.779 0.000
## 9 SAD.ON AGE 0.000 0.000 999.000 999.000
## 10 SAD.ON MALE 0.000 0.000 999.000 999.000
## 11 BLUES.ON AGE 0.000 0.000 999.000 999.000
## 12 BLUES.ON MALE 0.000 0.000 999.000 999.000
## 13 DEPRESS.ON AGE 0.000 0.000 999.000 999.000
## 14 DEPRESS.ON MALE 0.000 0.000 999.000 999.000
## 15 HAPPY.ON AGE 0.000 0.000 999.000 999.000
## 16 HAPPY.ON MALE 0.000 0.000 999.000 999.000
## 17 ENJOY.ON AGE 0.000 0.000 999.000 999.000
## 18 ENJOY.ON MALE 0.000 0.000 999.000 999.000
## 19 HOPEFUL.ON AGE 0.010 0.004 2.572 0.010
## 20 HOPEFUL.ON MALE 0.000 0.000 999.000 999.000
## 21 Intercepts SAD 0.110 0.135 0.818 0.413
## 22 Intercepts BLUES 0.006 0.130 0.047 0.963
## 23 Intercepts DEPRESS 0.101 0.160 0.627 0.531
## 24 Intercepts HAPPY 0.207 0.166 1.245 0.213
## 25 Intercepts ENJOY 0.096 0.145 0.657 0.511
## 26 Intercepts HOPEFUL -0.037 0.291 -0.126 0.900
## 27 Residual.Variances SAD 0.287 0.011 27.267 0.000
## 28 Residual.Variances BLUES 0.304 0.011 27.708 0.000
## 29 Residual.Variances DEPRESS 0.305 0.013 24.009 0.000
## 30 Residual.Variances HAPPY 0.586 0.021 27.941 0.000
## 31 Residual.Variances ENJOY 0.516 0.018 28.747 0.000
## 32 Residual.Variances HOPEFUL 1.312 0.039 33.574 0.000
## 33 Residual.Variances F 0.254 0.015 17.467 0.000
## Reading model: ex0103.out
## $parameterSpecification
## $nu
## SAD BLUES DEPRESS HAPPY ENJOY HOPEFUL AGE MALE
## 1 0 0 0 0 0 0 0 0
##
## $lambda
## F SAD BLUES DEPRESS HAPPY ENJOY HOPEFUL AGE MALE
## SAD 0 0 0 0 0 0 0 0 0
## BLUES 0 0 0 0 0 0 0 0 0
## DEPRESS 0 0 0 0 0 0 0 0 0
## HAPPY 0 0 0 0 0 0 0 0 0
## ENJOY 0 0 0 0 0 0 0 0 0
## HOPEFUL 0 0 0 0 0 0 0 0 0
## AGE 0 0 0 0 0 0 0 0 0
## MALE 0 0 0 0 0 0 0 0 0
##
## $theta
## SAD BLUES DEPRESS HAPPY ENJOY HOPEFUL AGE MALE
## SAD 0 NA NA NA NA NA NA NA
## BLUES 0 0 NA NA NA NA NA NA
## DEPRESS 0 0 0 NA NA NA NA NA
## HAPPY 0 0 0 0 NA NA NA NA
## ENJOY 0 0 0 0 0 NA NA NA
## HOPEFUL 0 0 0 0 0 0 NA NA
## AGE 0 0 0 0 0 0 0 NA
## MALE 0 0 0 0 0 0 0 0
##
## $alpha
## F SAD BLUES DEPRESS HAPPY ENJOY HOPEFUL AGE MALE
## 1 0 1 2 3 4 5 6 0 0
##
## $beta
## F SAD BLUES DEPRESS HAPPY ENJOY HOPEFUL AGE MALE
## F 0 0 0 0 0 0 0 7 8
## SAD 0 0 0 0 0 0 0 0 0
## BLUES 9 0 0 0 0 0 0 0 0
## DEPRESS 10 0 0 0 0 0 0 0 0
## HAPPY 11 0 0 0 0 0 0 0 0
## ENJOY 12 0 0 0 0 0 0 0 0
## HOPEFUL 13 0 0 0 0 0 0 14 0
## AGE 0 0 0 0 0 0 0 0 0
## MALE 0 0 0 0 0 0 0 0 0
##
## $psi
## F SAD BLUES DEPRESS HAPPY ENJOY HOPEFUL AGE MALE
## F 15 NA NA NA NA NA NA NA NA
## SAD 0 16 NA NA NA NA NA NA NA
## BLUES 0 0 17 NA NA NA NA NA NA
## DEPRESS 0 0 0 18 NA NA NA NA NA
## HAPPY 0 0 0 0 19 NA NA NA NA
## ENJOY 0 0 0 0 0 20 NA NA NA
## HOPEFUL 0 0 0 0 0 0 21 NA NA
## AGE 0 0 0 0 0 0 0 0 NA
## MALE 0 0 0 0 0 0 0 0 0
##
## attr(,"class")
## [1] "mplus.parameterSpecification" "list"
##
## $startingValues
## $nu
## SAD BLUES DEPRESS HAPPY ENJOY HOPEFUL AGE MALE
## 1 0 0 0 0 0 0 0 0
##
## $lambda
## F SAD BLUES DEPRESS HAPPY ENJOY HOPEFUL AGE MALE
## SAD 0 1 0 0 0 0 0 0 0
## BLUES 0 0 1 0 0 0 0 0 0
## DEPRESS 0 0 0 1 0 0 0 0 0
## HAPPY 0 0 0 0 1 0 0 0 0
## ENJOY 0 0 0 0 0 1 0 0 0
## HOPEFUL 0 0 0 0 0 0 1 0 0
## AGE 0 0 0 0 0 0 0 1 0
## MALE 0 0 0 0 0 0 0 0 1
##
## $theta
## SAD BLUES DEPRESS HAPPY ENJOY HOPEFUL AGE MALE
## SAD 0 NA NA NA NA NA NA NA
## BLUES 0 0 NA NA NA NA NA NA
## DEPRESS 0 0 0 NA NA NA NA NA
## HAPPY 0 0 0 0 NA NA NA NA
## ENJOY 0 0 0 0 0 NA NA NA
## HOPEFUL 0 0 0 0 0 0 NA NA
## AGE 0 0 0 0 0 0 0 NA
## MALE 0 0 0 0 0 0 0 0
##
## $alpha
## F SAD BLUES DEPRESS HAPPY ENJOY HOPEFUL AGE MALE
## 1 0 0.454 0.337 0.51 0.63 0.465 0.973 74.316 0.424
##
## $beta
## F SAD BLUES DEPRESS HAPPY ENJOY HOPEFUL AGE MALE
## F 0.000 0 0 0 0 0 0 0 0
## SAD 1.000 0 0 0 0 0 0 0 0
## BLUES 0.991 0 0 0 0 0 0 0 0
## DEPRESS 1.218 0 0 0 0 0 0 0 0
## HAPPY 1.176 0 0 0 0 0 0 0 0
## ENJOY 1.031 0 0 0 0 0 0 0 0
## HOPEFUL 0.847 0 0 0 0 0 0 0 0
## AGE 0.000 0 0 0 0 0 0 0 0
## MALE 0.000 0 0 0 0 0 0 0 0
##
## $psi
## F SAD BLUES DEPRESS HAPPY ENJOY HOPEFUL AGE MALE
## F 0.05 NA NA NA NA NA NA NA NA
## SAD 0.00 0.274 NA NA NA NA NA NA NA
## BLUES 0.00 0.000 0.273 NA NA NA NA NA NA
## DEPRESS 0.00 0.000 0.000 0.337 NA NA NA NA NA
## HAPPY 0.00 0.000 0.000 0.000 0.49 NA NA NA NA
## ENJOY 0.00 0.000 0.000 0.000 0.00 0.408 NA NA NA
## HOPEFUL 0.00 0.000 0.000 0.000 0.00 0.000 0.759 NA NA
## AGE 0.00 0.000 0.000 0.000 0.00 0.000 0.000 40.696 NA
## MALE 0.00 0.000 0.000 0.000 0.00 0.000 0.000 -0.190 0.244
##
## attr(,"class")
## [1] "mplus.startingValues" "list"
##
## attr(,"class")
## [1] "mplus.tech1" "list"
alcohol1 %>%
select(-id) %>%
gtsummary::tbl_summary(type = list(c(peer, cpeer) ~ "continuous",
c(male, coa, ccoa) ~ "categorical"))
Characteristic | N = 2461 |
---|---|
age | |
14 | 82 (33%) |
15 | 82 (33%) |
16 | 82 (33%) |
coa | |
0 | 135 (55%) |
1 | 111 (45%) |
male | |
0 | 120 (49%) |
1 | 126 (51%) |
age14 | |
0 | 82 (33%) |
1 | 82 (33%) |
2 | 82 (33%) |
alcuse | 1.00 (0.00, 1.73) |
peer | 0.89 (0.00, 1.55) |
cpeer | -0.12 (-1.02, 0.53) |
ccoa | |
-0.451 | 135 (55%) |
0.549 | 111 (45%) |
1
n (%); Median (IQR)
|
ggplot(alcohol1, aes(x = age, y = alcuse, group = id, color = id)) +
geom_line(position = position_jitter(width = .5)) +
labs(title = "Alcohol use by age", x = "Age", y = "Alcohol Use") +
guides(color = "none")
ex0104fit.lme <- lme4::lmer(alcuse ~ (1 | id), data = alcohol1, REML = FALSE)
summary(ex0104fit.lme)
## Linear mixed model fit by maximum likelihood ['lmerMod']
## Formula: alcuse ~ (1 | id)
## Data: alcohol1
##
## AIC BIC logLik deviance df.resid
## 676.2 686.7 -335.1 670.2 243
##
## Scaled residuals:
## Min 1Q Median 3Q Max
## -1.8865 -0.3076 -0.3067 0.6137 2.8567
##
## Random effects:
## Groups Name Variance Std.Dev.
## id (Intercept) 0.5639 0.7509
## Residual 0.5617 0.7495
## Number of obs: 246, groups: id, 82
##
## Fixed effects:
## Estimate Std. Error t value
## (Intercept) 0.92195 0.09571 9.633
alc.wide <- alcohol1 %>%
select(id, age, alcuse) %>%
pivot_wider(id_cols = id,
names_from = age,
names_pre = "alc",
values_from = alcuse)
ex0104mod <- 'i =~ 1*alc14 + 1*alc15 + 1*alc16
alc14~~v1*alc14
alc15~~v1*alc15
alc16~~v1*alc16
i ~ 1
i~~i'
ex0104fit.lavaan <- lavaan(ex0104mod, data=alc.wide, estimator = "MLR")
## lavaan 0.6-12 ended normally after 13 iterations
##
## Estimator ML
## Optimization method NLMINB
## Number of model parameters 5
## Number of equality constraints 2
##
## Number of observations 82
##
## Model Test User Model:
## Standard Robust
## Test Statistic 37.783 30.060
## Degrees of freedom 6 6
## P-value (Chi-square) 0.000 0.000
## Scaling correction factor 1.257
## Yuan-Bentler correction (Mplus variant)
##
## Model Test Baseline Model:
##
## Test statistic 80.798 44.816
## Degrees of freedom 3 3
## P-value 0.000 0.000
## Scaling correction factor 1.803
##
## User Model versus Baseline Model:
##
## Comparative Fit Index (CFI) 0.591 0.425
## Tucker-Lewis Index (TLI) 0.796 0.712
##
## Robust Comparative Fit Index (CFI) 0.599
## Robust Tucker-Lewis Index (TLI) 0.799
##
## Loglikelihood and Information Criteria:
##
## Loglikelihood user model (H0) -335.078 -335.078
## Scaling correction factor 0.700
## for the MLR correction
## Loglikelihood unrestricted model (H1) -316.186 -316.186
## Scaling correction factor 1.227
## for the MLR correction
##
## Akaike (AIC) 676.156 676.156
## Bayesian (BIC) 683.376 683.376
## Sample-size adjusted Bayesian (BIC) 673.914 673.914
##
## Root Mean Square Error of Approximation:
##
## RMSEA 0.254 0.221
## 90 Percent confidence interval - lower 0.180 0.154
## 90 Percent confidence interval - upper 0.335 0.294
## P-value RMSEA <= 0.05 0.000 0.000
##
## Robust RMSEA 0.248
## 90 Percent confidence interval - lower 0.164
## 90 Percent confidence interval - upper 0.339
##
## Standardized Root Mean Square Residual:
##
## SRMR 0.178 0.178
##
## Parameter Estimates:
##
## Standard errors Sandwich
## Information bread Observed
## Observed information based on Hessian
##
## Latent Variables:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## i =~
## alc14 1.000 0.751 0.708
## alc15 1.000 0.751 0.708
## alc16 1.000 0.751 0.708
##
## Intercepts:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## i 0.922 0.096 9.633 0.000 1.228 1.228
## .alc14 0.000 0.000 0.000
## .alc15 0.000 0.000 0.000
## .alc16 0.000 0.000 0.000
##
## Variances:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## .alc14 (v1) 0.562 0.083 6.788 0.000 0.562 0.499
## .alc15 (v1) 0.562 0.083 6.788 0.000 0.562 0.499
## .alc16 (v1) 0.562 0.083 6.788 0.000 0.562 0.499
## i 0.564 0.106 5.314 0.000 1.000 1.000
ex0104mod.mplus <- MplusAutomation::mplusObject(
ANALYSIS = "ESTIMATOR = MLR; ",
MODEL = "i by alc14 - alc16@1 ;
[alc14 - alc16@0] ;
[i*] ;
alc14 - alc16 (1) ;",
OUTPUT = "STDYX; tech1;",
usevariables = c("alc14", "alc15", "alc16"),
rdata = alc.wide
)
ex0104fit.mplus <- MplusAutomation::mplusModeler(ex0104mod.mplus,
dataout = "ex0104.dat",
modelout = "ex0104.inp",
run = TRUE)
##
## Running model: ex0104.inp
## System command: cd "/Users/douglastommet/Dropbox/work/ShortCourse/POSTED/SLIDES/LDA/lavaan" && "/Applications/Mplus/mplus" "ex0104.inp"
## Reading model: ex0104.out
## [1] TRUE
## Reading model: ex0104.out
## Estimated using MLR
## Number of obs: 82, number of (free) parameters: 3
##
## Model: Chi2(df = 6) = 30.06, p = 0
## Baseline model: Chi2(df = 3) = 44.816, p = 0
##
## Fit Indices:
##
## CFI = 0.425, TLI = 0.712, SRMR = 0.152
## RMSEA = 0.221, 90% CI [0.146, 0.303], p < .05 = 0
## AIC = 676.156, BIC = 683.376
## NULL
## Reading model: ex0104.out
## paramHeader param est se est_se pval
## 1 I.BY ALC14 1.000 0.000 999.000 999
## 2 I.BY ALC15 1.000 0.000 999.000 999
## 3 I.BY ALC16 1.000 0.000 999.000 999
## 4 Means I 0.922 0.096 9.633 0
## 5 Intercepts ALC14 0.000 0.000 999.000 999
## 6 Intercepts ALC15 0.000 0.000 999.000 999
## 7 Intercepts ALC16 0.000 0.000 999.000 999
## 8 Variances I 0.564 0.106 5.314 0
## 9 Residual.Variances ALC14 0.562 0.083 6.788 0
## 10 Residual.Variances ALC15 0.562 0.083 6.788 0
## 11 Residual.Variances ALC16 0.562 0.083 6.788 0
## Reading model: ex0104.out
## $parameterSpecification
## $nu
## ALC14 ALC15 ALC16
## 1 0 0 0
##
## $lambda
## I
## ALC14 0
## ALC15 0
## ALC16 0
##
## $theta
## ALC14 ALC15 ALC16
## ALC14 1 NA NA
## ALC15 0 1 NA
## ALC16 0 0 1
##
## $alpha
## I
## 1 2
##
## $beta
## I
## I 0
##
## $psi
## I
## I 3
##
## attr(,"class")
## [1] "mplus.parameterSpecification" "list"
##
## $startingValues
## $nu
## ALC14 ALC15 ALC16
## 1 0 0 0
##
## $lambda
## I
## ALC14 1
## ALC15 1
## ALC16 1
##
## $theta
## ALC14 ALC15 ALC16
## ALC14 0.435 NA NA
## ALC15 0.000 0.542 NA
## ALC16 0.000 0.000 0.637
##
## $alpha
## I
## 1 0
##
## $beta
## I
## I 0
##
## $psi
## I
## I 0.05
##
## attr(,"class")
## [1] "mplus.startingValues" "list"
##
## attr(,"class")
## [1] "mplus.tech1" "list"
ex0105fit.lme <- lme4::lmer(alcuse ~ age14 + (1 + age14 | id ), data = alcohol1, REML = FALSE)
summary(ex0105fit.lme)
## Linear mixed model fit by maximum likelihood ['lmerMod']
## Formula: alcuse ~ age14 + (1 + age14 | id)
## Data: alcohol1
##
## AIC BIC logLik deviance df.resid
## 648.6 669.6 -318.3 636.6 240
##
## Scaled residuals:
## Min 1Q Median 3Q Max
## -2.47999 -0.38401 -0.07553 0.39001 2.50685
##
## Random effects:
## Groups Name Variance Std.Dev. Corr
## id (Intercept) 0.6244 0.7902
## age14 0.1512 0.3888 -0.22
## Residual 0.3373 0.5808
## Number of obs: 246, groups: id, 82
##
## Fixed effects:
## Estimate Std. Error t value
## (Intercept) 0.65130 0.10508 6.198
## age14 0.27065 0.06245 4.334
##
## Correlation of Fixed Effects:
## (Intr)
## age14 -0.441
## lavaan 0.6-12 ended normally after 21 iterations
##
## Estimator ML
## Optimization method NLMINB
## Number of model parameters 8
## Number of equality constraints 2
##
## Number of observations 82
##
## Model Test User Model:
## Standard Robust
## Test Statistic 4.238 2.939
## Degrees of freedom 3 3
## P-value (Chi-square) 0.237 0.401
## Scaling correction factor 1.442
## Yuan-Bentler correction (Mplus variant)
##
## Parameter Estimates:
##
## Standard errors Sandwich
## Information bread Observed
## Observed information based on Hessian
##
## Latent Variables:
## Estimate Std.Err z-value P(>|z|)
## i =~
## alc14 1.000
## alc15 1.000
## alc16 1.000
## s =~
## alc14 0.000
## alc15 1.000
## alc16 2.000
##
## Covariances:
## Estimate Std.Err z-value P(>|z|)
## i ~~
## s -0.068 0.074 -0.920 0.357
##
## Intercepts:
## Estimate Std.Err z-value P(>|z|)
## i 0.651 0.105 6.198 0.000
## s 0.271 0.062 4.334 0.000
## .alc14 0.000
## .alc15 0.000
## .alc16 0.000
##
## Variances:
## Estimate Std.Err z-value P(>|z|)
## .alc14 (v1) 0.337 0.066 5.105 0.000
## .alc15 (v1) 0.337 0.066 5.105 0.000
## .alc16 (v1) 0.337 0.066 5.105 0.000
## i 0.624 0.162 3.861 0.000
## s 0.151 0.056 2.721 0.007
ex0105mod.mplus <- MplusAutomation::mplusObject(
ANALYSIS = "ESTIMATOR = MLR; ",
MODEL = "i by alc14 - alc16@1 ;
s by alc14@0 alc15@1 alc16@2 ;
[alc14 - alc16@0] ;
[i* s*] ;
alc14 - alc16 (1) ;",
OUTPUT = "STDYX; tech1;",
usevariables = c("alc14", "alc15", "alc16"),
rdata = alc.wide
)
ex0105fit.mplus <- MplusAutomation::mplusModeler(ex0105mod.mplus,
dataout = "ex0105.dat",
modelout = "ex0105.inp",
run = TRUE)
##
## Running model: ex0105.inp
## System command: cd "/Users/douglastommet/Dropbox/work/ShortCourse/POSTED/SLIDES/LDA/lavaan" && "/Applications/Mplus/mplus" "ex0105.inp"
## Reading model: ex0105.out
## [1] TRUE
## Reading model: ex0105.out
## Estimated using MLR
## Number of obs: 82, number of (free) parameters: 6
##
## Model: Chi2(df = 3) = 2.939, p = 0.4011
## Baseline model: Chi2(df = 3) = 44.816, p = 0
##
## Fit Indices:
##
## CFI = 1, TLI = 1, SRMR = 0.055
## RMSEA = 0, 90% CI [0, 0.185], p < .05 = 0.49
## AIC = 648.611, BIC = 663.051
## NULL
## Reading model: ex0105.out
## paramHeader param est se est_se pval
## 1 I.BY ALC14 1.000 0.000 999.000 999.000
## 2 I.BY ALC15 1.000 0.000 999.000 999.000
## 3 I.BY ALC16 1.000 0.000 999.000 999.000
## 4 S.BY ALC14 0.000 0.000 999.000 999.000
## 5 S.BY ALC15 1.000 0.000 999.000 999.000
## 6 S.BY ALC16 2.000 0.000 999.000 999.000
## 7 S.WITH I -0.068 0.074 -0.920 0.357
## 8 Means I 0.651 0.105 6.198 0.000
## 9 Means S 0.271 0.062 4.334 0.000
## 10 Intercepts ALC14 0.000 0.000 999.000 999.000
## 11 Intercepts ALC15 0.000 0.000 999.000 999.000
## 12 Intercepts ALC16 0.000 0.000 999.000 999.000
## 13 Variances I 0.624 0.162 3.861 0.000
## 14 Variances S 0.151 0.056 2.721 0.007
## 15 Residual.Variances ALC14 0.337 0.066 5.105 0.000
## 16 Residual.Variances ALC15 0.337 0.066 5.105 0.000
## 17 Residual.Variances ALC16 0.337 0.066 5.105 0.000
## Reading model: ex0105.out
## $parameterSpecification
## $nu
## ALC14 ALC15 ALC16
## 1 0 0 0
##
## $lambda
## I S
## ALC14 0 0
## ALC15 0 0
## ALC16 0 0
##
## $theta
## ALC14 ALC15 ALC16
## ALC14 1 NA NA
## ALC15 0 1 NA
## ALC16 0 0 1
##
## $alpha
## I S
## 1 2 3
##
## $beta
## I S
## I 0 0
## S 0 0
##
## $psi
## I S
## I 4 NA
## S 5 6
##
## attr(,"class")
## [1] "mplus.parameterSpecification" "list"
##
## $startingValues
## $nu
## ALC14 ALC15 ALC16
## 1 0 0 0
##
## $lambda
## I S
## ALC14 1 0
## ALC15 1 1
## ALC16 1 2
##
## $theta
## ALC14 ALC15 ALC16
## ALC14 0.435 NA NA
## ALC15 0.000 0.542 NA
## ALC16 0.000 0.000 0.637
##
## $alpha
## I S
## 1 0 0
##
## $beta
## I S
## I 0 0
## S 0 0
##
## $psi
## I S
## I 0.05 NA
## S 0.00 0.05
##
## attr(,"class")
## [1] "mplus.startingValues" "list"
##
## attr(,"class")
## [1] "mplus.tech1" "list"