Part A of this exercise file involves estimating latent traits. Load the dataset into R using the load()
function, and inspect the defined objects. The object named data
represents a 15-item dichtomously scored test
mirt.model()
syntax to be used for later analysis. (Hint, use summary(..., suppress = #
))library(mirt)
## Loading required package: stats4
## Loading required package: lattice
load('Exercise_03.Rdata')
mod <- mirt(data, 2)
##
Iteration: 1, Log-Lik: -13503.606, Max-Change: 0.51101
Iteration: 2, Log-Lik: -13212.490, Max-Change: 0.16816
Iteration: 3, Log-Lik: -13176.891, Max-Change: 0.09760
Iteration: 4, Log-Lik: -13168.143, Max-Change: 0.06082
Iteration: 5, Log-Lik: -13165.183, Max-Change: 0.03954
Iteration: 6, Log-Lik: -13163.932, Max-Change: 0.02766
Iteration: 7, Log-Lik: -13162.939, Max-Change: 0.01794
Iteration: 8, Log-Lik: -13162.516, Max-Change: 0.01570
Iteration: 9, Log-Lik: -13162.145, Max-Change: 0.01390
Iteration: 10, Log-Lik: -13160.315, Max-Change: 0.01101
Iteration: 11, Log-Lik: -13160.059, Max-Change: 0.01031
Iteration: 12, Log-Lik: -13159.819, Max-Change: 0.00999
Iteration: 13, Log-Lik: -13158.488, Max-Change: 0.00921
Iteration: 14, Log-Lik: -13158.295, Max-Change: 0.00918
Iteration: 15, Log-Lik: -13158.111, Max-Change: 0.00898
Iteration: 16, Log-Lik: -13157.105, Max-Change: 0.00911
Iteration: 17, Log-Lik: -13156.960, Max-Change: 0.00876
Iteration: 18, Log-Lik: -13156.822, Max-Change: 0.00855
Iteration: 19, Log-Lik: -13156.053, Max-Change: 0.00746
Iteration: 20, Log-Lik: -13155.944, Max-Change: 0.00745
Iteration: 21, Log-Lik: -13155.839, Max-Change: 0.00738
Iteration: 22, Log-Lik: -13155.260, Max-Change: 0.00669
Iteration: 23, Log-Lik: -13155.180, Max-Change: 0.00657
Iteration: 24, Log-Lik: -13155.102, Max-Change: 0.00645
Iteration: 25, Log-Lik: -13154.677, Max-Change: 0.00556
Iteration: 26, Log-Lik: -13154.618, Max-Change: 0.00551
Iteration: 27, Log-Lik: -13154.561, Max-Change: 0.00544
Iteration: 28, Log-Lik: -13154.253, Max-Change: 0.00491
Iteration: 29, Log-Lik: -13154.212, Max-Change: 0.00478
Iteration: 30, Log-Lik: -13154.172, Max-Change: 0.00470
Iteration: 31, Log-Lik: -13153.952, Max-Change: 0.00383
Iteration: 32, Log-Lik: -13153.923, Max-Change: 0.00379
Iteration: 33, Log-Lik: -13153.895, Max-Change: 0.00375
Iteration: 34, Log-Lik: -13153.745, Max-Change: 0.00338
Iteration: 35, Log-Lik: -13153.725, Max-Change: 0.00329
Iteration: 36, Log-Lik: -13153.705, Max-Change: 0.00321
Iteration: 37, Log-Lik: -13153.602, Max-Change: 0.00273
Iteration: 38, Log-Lik: -13153.589, Max-Change: 0.00268
Iteration: 39, Log-Lik: -13153.576, Max-Change: 0.00262
Iteration: 40, Log-Lik: -13153.506, Max-Change: 0.00226
Iteration: 41, Log-Lik: -13153.497, Max-Change: 0.00221
Iteration: 42, Log-Lik: -13153.488, Max-Change: 0.00215
Iteration: 43, Log-Lik: -13153.441, Max-Change: 0.00183
Iteration: 44, Log-Lik: -13153.435, Max-Change: 0.00178
Iteration: 45, Log-Lik: -13153.429, Max-Change: 0.00174
Iteration: 46, Log-Lik: -13153.398, Max-Change: 0.00149
Iteration: 47, Log-Lik: -13153.394, Max-Change: 0.00145
Iteration: 48, Log-Lik: -13153.391, Max-Change: 0.00141
Iteration: 49, Log-Lik: -13153.370, Max-Change: 0.00120
Iteration: 50, Log-Lik: -13153.368, Max-Change: 0.00116
Iteration: 51, Log-Lik: -13153.365, Max-Change: 0.00113
Iteration: 52, Log-Lik: -13153.352, Max-Change: 0.00095
Iteration: 53, Log-Lik: -13153.350, Max-Change: 0.00098
Iteration: 54, Log-Lik: -13153.349, Max-Change: 0.00087
Iteration: 55, Log-Lik: -13153.343, Max-Change: 0.00061
Iteration: 56, Log-Lik: -13153.342, Max-Change: 0.00051
Iteration: 57, Log-Lik: -13153.341, Max-Change: 0.00056
Iteration: 58, Log-Lik: -13153.335, Max-Change: 0.00064
Iteration: 59, Log-Lik: -13153.335, Max-Change: 0.00058
Iteration: 60, Log-Lik: -13153.334, Max-Change: 0.00054
Iteration: 61, Log-Lik: -13153.330, Max-Change: 0.00040
Iteration: 62, Log-Lik: -13153.330, Max-Change: 0.00042
Iteration: 63, Log-Lik: -13153.329, Max-Change: 0.00042
Iteration: 64, Log-Lik: -13153.327, Max-Change: 0.00019
Iteration: 65, Log-Lik: -13153.327, Max-Change: 0.00018
Iteration: 66, Log-Lik: -13153.326, Max-Change: 0.00017
Iteration: 67, Log-Lik: -13153.325, Max-Change: 0.00018
Iteration: 68, Log-Lik: -13153.325, Max-Change: 0.00018
Iteration: 69, Log-Lik: -13153.325, Max-Change: 0.00018
Iteration: 70, Log-Lik: -13153.324, Max-Change: 0.00017
Iteration: 71, Log-Lik: -13153.324, Max-Change: 0.00016
Iteration: 72, Log-Lik: -13153.324, Max-Change: 0.00016
Iteration: 73, Log-Lik: -13153.323, Max-Change: 0.00015
Iteration: 74, Log-Lik: -13153.323, Max-Change: 0.00015
Iteration: 75, Log-Lik: -13153.323, Max-Change: 0.00015
Iteration: 76, Log-Lik: -13153.322, Max-Change: 0.00014
Iteration: 77, Log-Lik: -13153.322, Max-Change: 0.00014
Iteration: 78, Log-Lik: -13153.322, Max-Change: 0.00014
Iteration: 79, Log-Lik: -13153.322, Max-Change: 0.00013
Iteration: 80, Log-Lik: -13153.322, Max-Change: 0.00013
Iteration: 81, Log-Lik: -13153.322, Max-Change: 0.00013
Iteration: 82, Log-Lik: -13153.321, Max-Change: 0.00012
Iteration: 83, Log-Lik: -13153.321, Max-Change: 0.00012
Iteration: 84, Log-Lik: -13153.321, Max-Change: 0.00012
Iteration: 85, Log-Lik: -13153.321, Max-Change: 0.00011
Iteration: 86, Log-Lik: -13153.321, Max-Change: 0.00011
Iteration: 87, Log-Lik: -13153.321, Max-Change: 0.00011
Iteration: 88, Log-Lik: -13153.320, Max-Change: 0.00010
summary(mod, suppress = .2)
##
## Rotation: oblimin
##
## Rotated factor loadings:
##
## F1 F2 h2
## Item_1 NA -0.313 0.190
## Item_2 NA -0.370 0.197
## Item_3 NA -0.534 0.289
## Item_4 NA -0.451 0.169
## Item_5 NA -0.578 0.320
## Item_6 NA -0.548 0.309
## Item_7 NA -0.507 0.287
## Item_8 -0.436 NA 0.235
## Item_9 -0.641 NA 0.365
## Item_10 -0.454 NA 0.228
## Item_11 -0.373 NA 0.193
## Item_12 -0.376 NA 0.181
## Item_13 -0.465 NA 0.308
## Item_14 -0.337 NA 0.154
## Item_15 -0.561 NA 0.284
##
## Rotated SS loadings: 1.792 1.683
##
## Factor correlations:
##
## F1 F2
## F1 1.000 0.515
## F2 0.515 1.000
method = 'MHRM
or method = 'QMCEM'
). If possible, define an appropriate mirtCluster()
to estimate the Monte Carlo log-likelihood faster for the MHRM. Item fit statistics may also be helpful here.mirtCluster()
model <- mirt.model('F1 = 1-7
F2 = 8-15
COV = F1*F2')
cmod.2PL <- mirt(data, model, method = 'MHRM', verbose = FALSE)
cmod.rasch <- mirt(data, model, itemtype = 'Rasch', method = 'MHRM', verbose = FALSE)
anova(cmod.2PL, cmod.rasch) # I would favour the Rasch model here
##
## Model 1: mirt(data = data, model = model, itemtype = "Rasch", method = "MHRM",
## verbose = FALSE)
## Model 2: mirt(data = data, model = model, method = "MHRM", verbose = FALSE)
## AIC AICc SABIC BIC logLik X2 df p
## 1 26388.09 26388.55 26426.55 26483.73 -13176.05 NaN NaN NaN
## 2 26390.45 26391.80 26456.68 26555.16 -13164.22 23.645 13 0.0346
M2(cmod.rasch)
## M2 df p RMSEA RMSEA_5 RMSEA_95 SRMSR
## stats 138.1324 102 0.01000311 0.01537261 0.007838085 0.02153165 0.02949093
## TLI CFI
## stats 0.9829788 0.9834651
itemfit(cmod.rasch)
## item S_X2 df.S_X2 p.S_X2
## 1 Item_1 20.9144 11 0.0343
## 2 Item_2 6.1161 12 0.9101
## 3 Item_3 4.0077 12 0.9833
## 4 Item_4 17.6382 12 0.1271
## 5 Item_5 15.3807 11 0.1657
## 6 Item_6 14.4107 11 0.2111
## 7 Item_7 16.6345 11 0.1192
## 8 Item_8 4.4091 12 0.9749
## 9 Item_9 14.4188 11 0.2107
## 10 Item_10 8.3871 12 0.7542
## 11 Item_11 9.0835 11 0.6142
## 12 Item_12 15.2211 11 0.1726
## 13 Item_13 18.3609 11 0.0736
## 14 Item_14 6.6503 11 0.8267
## 15 Item_15 12.8678 11 0.3021
data
input.EAP <- fscores(cmod.2PL)
MAP <- fscores(cmod.2PL, method = 'MAP')
scores <- fscores(cmod.2PL, method = 'MAP', full.scores=TRUE)
head(scores)
## F1 F2
## [1,] -0.6642628 -0.06718946
## [2,] 0.7350927 -0.84137864
## [3,] -0.3372156 0.07807974
## [4,] -0.1093067 -0.29111827
## [5,] -0.1144325 -0.53026762
## [6,] -0.7458259 -1.37726955
Part B requires setting up mirt()
and mixedmirt()
for estimating latent regression and mixed effect models. The data and covariate information are available in the Exercise_03.Rdata
file.
Using the currently loaded dataset, inspect the covdata
object. It should contain four person-level covariates: Gender, a standardized midterm mark in math (zmidterm_Math
) and Science (zmidterm_Science
), and which school number the subjects were sampled from (50 schools in total).
Gender
, zmidterm_Math
, and zmidterm_Science
, can explain varability in the \(\theta\) values; use the mirt()
function with the ‘EM’ method. Include Gender
only in fitting the first model, and all three covariates when fitting the second model another. Inspect the coefficients using summary()
and coef()
.Gender
? What would you conclude about the latent regression predictor variables?head(covdata)
## Gender zmidterm_Math zmidterm_Science school
## 1 Male 0.4029737051 -1.5390769 school_1
## 2 Male -0.2000797279 -1.1371942 school_1
## 3 Female 0.0009380831 0.6042977 school_1
## 4 Male -0.2000797279 0.4033563 school_1
## 5 Male -0.4010975389 0.1354345 school_1
## 6 Male -0.8031331609 -1.7400183 school_1
summary(covdata)
## Gender zmidterm_Math zmidterm_Science school
## Female:825 Min. :-3.1148380 Min. :-2.945667 school_6 : 42
## Male :675 1st Qu.:-0.7026243 1st Qu.:-0.685076 school_45: 40
## Median : 0.0009381 Median : 0.001474 school_4 : 39
## Mean : 0.0000000 Mean : 0.000000 school_11: 38
## 3rd Qu.: 0.7045004 3rd Qu.: 0.671278 school_1 : 37
## Max. : 2.5136607 Max. : 2.345789 school_35: 37
## (Other) :1267
lmodRasch <- mirt(data, model, itemtype = 'Rasch', covdata=covdata, formula = ~ Gender)
##
Iteration: 1, Log-Lik: -13222.378, Max-Change: 0.78893
Iteration: 2, Log-Lik: -13058.467, Max-Change: 0.30716
Iteration: 3, Log-Lik: -13098.578, Max-Change: 0.10777
Iteration: 4, Log-Lik: -13123.979, Max-Change: 0.04110
Iteration: 5, Log-Lik: -13132.810, Max-Change: 0.03114
Iteration: 6, Log-Lik: -13135.119, Max-Change: 0.02417
Iteration: 7, Log-Lik: -13135.331, Max-Change: 0.01765
Iteration: 8, Log-Lik: -13134.952, Max-Change: 0.01260
Iteration: 9, Log-Lik: -13134.445, Max-Change: 0.00898
Iteration: 10, Log-Lik: -13133.921, Max-Change: 0.00646
Iteration: 11, Log-Lik: -13133.482, Max-Change: 0.00476
Iteration: 12, Log-Lik: -13133.080, Max-Change: 0.00385
Iteration: 13, Log-Lik: -13132.155, Max-Change: 0.00403
Iteration: 14, Log-Lik: -13132.217, Max-Change: 0.00332
Iteration: 15, Log-Lik: -13132.066, Max-Change: 0.00292
Iteration: 16, Log-Lik: -13131.416, Max-Change: 0.00292
Iteration: 17, Log-Lik: -13131.525, Max-Change: 0.00234
Iteration: 18, Log-Lik: -13131.452, Max-Change: 0.00204
Iteration: 19, Log-Lik: -13131.014, Max-Change: 0.00208
Iteration: 20, Log-Lik: -13131.104, Max-Change: 0.00171
Iteration: 21, Log-Lik: -13131.062, Max-Change: 0.00153
Iteration: 22, Log-Lik: -13130.751, Max-Change: 0.00157
Iteration: 23, Log-Lik: -13130.826, Max-Change: 0.00130
Iteration: 24, Log-Lik: -13130.802, Max-Change: 0.00116
Iteration: 25, Log-Lik: -13130.583, Max-Change: 0.00118
Iteration: 26, Log-Lik: -13130.641, Max-Change: 0.00098
Iteration: 27, Log-Lik: -13130.626, Max-Change: 0.00087
Iteration: 28, Log-Lik: -13130.467, Max-Change: 0.00089
Iteration: 29, Log-Lik: -13130.512, Max-Change: 0.00074
Iteration: 30, Log-Lik: -13130.504, Max-Change: 0.00066
Iteration: 31, Log-Lik: -13130.387, Max-Change: 0.00067
Iteration: 32, Log-Lik: -13130.422, Max-Change: 0.00056
Iteration: 33, Log-Lik: -13130.416, Max-Change: 0.00050
Iteration: 34, Log-Lik: -13130.330, Max-Change: 0.00051
Iteration: 35, Log-Lik: -13130.357, Max-Change: 0.00042
Iteration: 36, Log-Lik: -13130.353, Max-Change: 0.00038
Iteration: 37, Log-Lik: -13130.289, Max-Change: 0.00039
Iteration: 38, Log-Lik: -13130.309, Max-Change: 0.00032
Iteration: 39, Log-Lik: -13130.307, Max-Change: 0.00029
Iteration: 40, Log-Lik: -13130.259, Max-Change: 0.00029
Iteration: 41, Log-Lik: -13130.275, Max-Change: 0.00024
Iteration: 42, Log-Lik: -13130.273, Max-Change: 0.00022
Iteration: 43, Log-Lik: -13130.237, Max-Change: 0.00022
Iteration: 44, Log-Lik: -13130.249, Max-Change: 0.00019
Iteration: 45, Log-Lik: -13130.248, Max-Change: 0.00017
Iteration: 46, Log-Lik: -13130.221, Max-Change: 0.00017
Iteration: 47, Log-Lik: -13130.230, Max-Change: 0.00014
Iteration: 48, Log-Lik: -13130.229, Max-Change: 0.00013
Iteration: 49, Log-Lik: -13130.208, Max-Change: 0.00013
Iteration: 50, Log-Lik: -13130.215, Max-Change: 0.00011
Iteration: 51, Log-Lik: -13130.215, Max-Change: 0.00010
## Warning: Log-likelihood was decreasing near the ML solution. EM method may
## be unstable
coef(lmodRasch, simplify=TRUE)
## $items
## a1 a2 d g u
## Item_1 1 0 -0.444 0 1
## Item_2 1 0 0.542 0 1
## Item_3 1 0 0.264 0 1
## Item_4 1 0 1.982 0 1
## Item_5 1 0 1.549 0 1
## Item_6 1 0 -0.380 0 1
## Item_7 1 0 -1.099 0 1
## Item_8 0 1 0.444 0 1
## Item_9 0 1 -1.409 0 1
## Item_10 0 1 0.597 0 1
## Item_11 0 1 -0.488 0 1
## Item_12 0 1 1.228 0 1
## Item_13 0 1 0.978 0 1
## Item_14 0 1 1.745 0 1
## Item_15 0 1 -0.451 0 1
##
## $means
## F1 F2
## 0 0
##
## $cov
## F1 F2
## F1 0.915 NA
## F2 0.543 0.854
##
## $lr.betas
## F1 F2
## (Intercept) 0.000 0.000
## GenderMale -1.247 -1.211
anova(cmod.rasch, lmodRasch)
##
## Model 1: mirt(data = data, model = model, itemtype = "Rasch", method = "MHRM",
## verbose = FALSE)
## Model 2: mirt(data = data, model = model, itemtype = "Rasch", covdata = covdata,
## formula = ~Gender)
## AIC AICc SABIC BIC logLik X2 df p
## 1 26388.09 26388.55 26426.55 26483.73 -13176.05 NaN NaN NaN
## 2 26300.43 26301.00 26343.16 26406.69 -13130.21 91.662 2 0
lmodRasch2 <- mirt(data, model, itemtype = 'Rasch', covdata=covdata,
formula = ~ Gender + zmidterm_Math + zmidterm_Science)
##
Iteration: 1, Log-Lik: -13222.378, Max-Change: 0.42739
Iteration: 2, Log-Lik: -12845.918, Max-Change: 0.18423
Iteration: 3, Log-Lik: -12767.758, Max-Change: 0.16593
Iteration: 4, Log-Lik: -12697.822, Max-Change: 0.09299
Iteration: 5, Log-Lik: -12658.794, Max-Change: 0.06149
Iteration: 6, Log-Lik: -12639.912, Max-Change: 0.04127
Iteration: 7, Log-Lik: -12630.576, Max-Change: 0.03168
Iteration: 8, Log-Lik: -12625.436, Max-Change: 0.02517
Iteration: 9, Log-Lik: -12622.075, Max-Change: 0.02055
Iteration: 10, Log-Lik: -12619.645, Max-Change: 0.01717
Iteration: 11, Log-Lik: -12617.828, Max-Change: 0.01454
Iteration: 12, Log-Lik: -12616.413, Max-Change: 0.01248
Iteration: 13, Log-Lik: -12615.378, Max-Change: 0.01159
Iteration: 14, Log-Lik: -12614.361, Max-Change: 0.00955
Iteration: 15, Log-Lik: -12613.642, Max-Change: 0.00829
Iteration: 16, Log-Lik: -12613.071, Max-Change: 0.00726
Iteration: 17, Log-Lik: -12612.611, Max-Change: 0.00639
Iteration: 18, Log-Lik: -12612.236, Max-Change: 0.00566
Iteration: 19, Log-Lik: -12611.930, Max-Change: 0.00507
Iteration: 20, Log-Lik: -12611.674, Max-Change: 0.00452
Iteration: 21, Log-Lik: -12611.462, Max-Change: 0.00405
Iteration: 22, Log-Lik: -12611.286, Max-Change: 0.00365
Iteration: 23, Log-Lik: -12611.138, Max-Change: 0.00329
Iteration: 24, Log-Lik: -12611.013, Max-Change: 0.00298
Iteration: 25, Log-Lik: -12610.908, Max-Change: 0.00272
Iteration: 26, Log-Lik: -12610.817, Max-Change: 0.00247
Iteration: 27, Log-Lik: -12610.739, Max-Change: 0.00225
Iteration: 28, Log-Lik: -12610.673, Max-Change: 0.00206
Iteration: 29, Log-Lik: -12610.616, Max-Change: 0.00188
Iteration: 30, Log-Lik: -12610.566, Max-Change: 0.00173
Iteration: 31, Log-Lik: -12610.524, Max-Change: 0.00159
Iteration: 32, Log-Lik: -12610.487, Max-Change: 0.00147
Iteration: 33, Log-Lik: -12610.454, Max-Change: 0.00135
Iteration: 34, Log-Lik: -12610.427, Max-Change: 0.00125
Iteration: 35, Log-Lik: -12610.402, Max-Change: 0.00116
Iteration: 36, Log-Lik: -12610.380, Max-Change: 0.00107
Iteration: 37, Log-Lik: -12610.362, Max-Change: 0.00100
Iteration: 38, Log-Lik: -12610.345, Max-Change: 0.00092
Iteration: 39, Log-Lik: -12610.330, Max-Change: 0.00086
Iteration: 40, Log-Lik: -12610.318, Max-Change: 0.00080
Iteration: 41, Log-Lik: -12610.306, Max-Change: 0.00074
Iteration: 42, Log-Lik: -12610.296, Max-Change: 0.00069
Iteration: 43, Log-Lik: -12610.288, Max-Change: 0.00065
Iteration: 44, Log-Lik: -12610.280, Max-Change: 0.00061
Iteration: 45, Log-Lik: -12610.272, Max-Change: 0.00057
Iteration: 46, Log-Lik: -12610.267, Max-Change: 0.00053
Iteration: 47, Log-Lik: -12610.261, Max-Change: 0.00050
Iteration: 48, Log-Lik: -12610.256, Max-Change: 0.00046
Iteration: 49, Log-Lik: -12610.252, Max-Change: 0.00044
Iteration: 50, Log-Lik: -12610.248, Max-Change: 0.00041
Iteration: 51, Log-Lik: -12610.245, Max-Change: 0.00038
Iteration: 52, Log-Lik: -12610.242, Max-Change: 0.00036
Iteration: 53, Log-Lik: -12610.239, Max-Change: 0.00034
Iteration: 54, Log-Lik: -12610.237, Max-Change: 0.00032
Iteration: 55, Log-Lik: -12610.235, Max-Change: 0.00030
Iteration: 56, Log-Lik: -12610.233, Max-Change: 0.00028
Iteration: 57, Log-Lik: -12610.231, Max-Change: 0.00027
Iteration: 58, Log-Lik: -12610.230, Max-Change: 0.00025
Iteration: 59, Log-Lik: -12610.228, Max-Change: 0.00024
Iteration: 60, Log-Lik: -12610.227, Max-Change: 0.00022
Iteration: 61, Log-Lik: -12610.226, Max-Change: 0.00021
Iteration: 62, Log-Lik: -12610.225, Max-Change: 0.00020
Iteration: 63, Log-Lik: -12610.224, Max-Change: 0.00019
Iteration: 64, Log-Lik: -12610.223, Max-Change: 0.00018
Iteration: 65, Log-Lik: -12610.223, Max-Change: 0.00017
Iteration: 66, Log-Lik: -12610.222, Max-Change: 0.00016
Iteration: 67, Log-Lik: -12610.222, Max-Change: 0.00015
Iteration: 68, Log-Lik: -12610.221, Max-Change: 0.00014
Iteration: 69, Log-Lik: -12610.221, Max-Change: 0.00013
Iteration: 70, Log-Lik: -12610.220, Max-Change: 0.00013
Iteration: 71, Log-Lik: -12610.220, Max-Change: 0.00012
Iteration: 72, Log-Lik: -12610.220, Max-Change: 0.00011
Iteration: 73, Log-Lik: -12610.219, Max-Change: 0.00011
Iteration: 74, Log-Lik: -12610.219, Max-Change: 0.00010
Iteration: 75, Log-Lik: -12610.219, Max-Change: 0.00010
summary(lmodRasch2)
## F1 F2 h2
## Item_1 0.295 0.000 0.087
## Item_2 0.295 0.000 0.087
## Item_3 0.295 0.000 0.087
## Item_4 0.295 0.000 0.087
## Item_5 0.295 0.000 0.087
## Item_6 0.295 0.000 0.087
## Item_7 0.295 0.000 0.087
## Item_8 0.000 0.283 0.080
## Item_9 0.000 0.283 0.080
## Item_10 0.000 0.283 0.080
## Item_11 0.000 0.283 0.080
## Item_12 0.000 0.283 0.080
## Item_13 0.000 0.283 0.080
## Item_14 0.000 0.283 0.080
## Item_15 0.000 0.283 0.080
##
## SS loadings: 0.61 0.64
## Proportion Var: 0.041 0.043
##
## Factor correlations:
##
## F1 F2
## F1 1.000 0.283
## F2 0.283 1.000
coef(lmodRasch2, simplify=TRUE)
## $items
## a1 a2 d g u
## Item_1 1 0 -0.140 0 1
## Item_2 1 0 0.840 0 1
## Item_3 1 0 0.563 0 1
## Item_4 1 0 2.304 0 1
## Item_5 1 0 1.856 0 1
## Item_6 1 0 -0.076 0 1
## Item_7 1 0 -0.794 0 1
## Item_8 0 1 0.714 0 1
## Item_9 0 1 -1.120 0 1
## Item_10 0 1 0.865 0 1
## Item_11 0 1 -0.204 0 1
## Item_12 0 1 1.494 0 1
## Item_13 0 1 1.244 0 1
## Item_14 0 1 2.019 0 1
## Item_15 0 1 -0.167 0 1
##
## $means
## F1 F2
## 0 0
##
## $cov
## F1 F2
## F1 0.340 NA
## F2 0.092 0.312
##
## $lr.betas
## F1 F2
## (Intercept) 0.000 0.000
## GenderMale -0.670 -0.618
## zmidterm_Math 0.507 0.169
## zmidterm_Science 0.105 0.485
anova(lmodRasch2, lmodRasch)
##
## Model 1: mirt(data = data, model = model, itemtype = "Rasch", covdata = covdata,
## formula = ~Gender)
## Model 2: mirt(data = data, model = model, itemtype = "Rasch", covdata = covdata,
## formula = ~Gender + zmidterm_Math + zmidterm_Science)
## AIC AICc SABIC BIC logLik X2 df p
## 1 26300.43 26301.00 26343.16 26406.69 -13130.21 NaN NaN NaN
## 2 25268.44 25269.25 25319.71 25395.95 -12610.22 1039.992 4 0
school
predictor variable in the model containing the other three covariates. Given that school
has 50 unique values, it may be beneficial to treat the variable as a ‘random’ variable so that, instead of estimating 49 fixed effect coefficients, we can estimate only one variance term and therefore save degrees of freedom. This can add better stability to the model while still determining if there is any variability in the responses due to which school the particpants are from.school
using the mixedmirt()
function. Define the same model as above for the latent regression effect, but include school
as a random effect in the intercepts (be sure to include fixed = ~ items
or fixed = ~ 0 + items
to model the individual item intercept terms).rmod <- mixedmirt(data, covdata, model = model, itemtype = 'Rasch',
fixed = ~ 0 + items, random = ~ 1|school,
lr.fixed = ~ Gender + zmidterm_Math + zmidterm_Science, verbose = FALSE)
summary(rmod)
##
## Call:
## mixedmirt(data = data, covdata = covdata, model = model, fixed = ~0 +
## items, random = ~1 | school, itemtype = "Rasch", lr.fixed = ~Gender +
## zmidterm_Math + zmidterm_Science, verbose = FALSE)
##
##
## --------------
## RANDOM EFFECT COVARIANCE(S):
## Correlations on upper diagonal
##
## $Theta
## F1 F2
## F1 0.2778 0.355
## F2 0.0975 0.272
##
## $school
## COV_school
## COV_school 0.0153
##
## --------------
## LATENT REGRESSION FIXED EFFECTS:
##
## F1 F2
## (Intercept) 0.000 0.000
## GenderMale -0.587 -0.545
## zmidterm_Math 0.515 0.175
## zmidterm_Science 0.125 0.491
##
## Std.Error_F1 Std.Error_F2 z_F1 z_F2
## (Intercept) NA NA NA NA
## GenderMale 0.012 0.010 -48.736 -53.555
## zmidterm_Math 0.006 0.008 82.197 22.733
## zmidterm_Science 0.003 0.007 37.953 72.573
coef(rmod)
## $Item_1
## a1 a2 d g u
## par 1 0 -0.137 0 1
## CI_2.5 NA NA NaN NA NA
## CI_97.5 NA NA NaN NA NA
##
## $Item_2
## a1 a2 d g u
## par 1 0 0.815 0 1
## CI_2.5 NA NA 0.722 NA NA
## CI_97.5 NA NA 0.907 NA NA
##
## $Item_3
## a1 a2 d g u
## par 1 0 0.545 0 1
## CI_2.5 NA NA 0.480 NA NA
## CI_97.5 NA NA 0.611 NA NA
##
## $Item_4
## a1 a2 d g u
## par 1 0 2.246 0 1
## CI_2.5 NA NA 2.093 NA NA
## CI_97.5 NA NA 2.399 NA NA
##
## $Item_5
## a1 a2 d g u
## par 1 0 1.805 0 1
## CI_2.5 NA NA 1.668 NA NA
## CI_97.5 NA NA 1.943 NA NA
##
## $Item_6
## a1 a2 d g u
## par 1 0 -0.076 0 1
## CI_2.5 NA NA NaN NA NA
## CI_97.5 NA NA NaN NA NA
##
## $Item_7
## a1 a2 d g u
## par 1 0 -0.775 0 1
## CI_2.5 NA NA NaN NA NA
## CI_97.5 NA NA NaN NA NA
##
## $Item_8
## a1 a2 d g u
## par 0 1 0.698 0 1
## CI_2.5 NA NA 0.640 NA NA
## CI_97.5 NA NA 0.756 NA NA
##
## $Item_9
## a1 a2 d g u
## par 0 1 -1.095 0 1
## CI_2.5 NA NA NaN NA NA
## CI_97.5 NA NA NaN NA NA
##
## $Item_10
## a1 a2 d g u
## par 0 1 0.846 0 1
## CI_2.5 NA NA 0.781 NA NA
## CI_97.5 NA NA 0.912 NA NA
##
## $Item_11
## a1 a2 d g u
## par 0 1 -0.198 0 1
## CI_2.5 NA NA NaN NA NA
## CI_97.5 NA NA NaN NA NA
##
## $Item_12
## a1 a2 d g u
## par 0 1 1.463 0 1
## CI_2.5 NA NA 1.371 NA NA
## CI_97.5 NA NA 1.555 NA NA
##
## $Item_13
## a1 a2 d g u
## par 0 1 1.217 0 1
## CI_2.5 NA NA 1.135 NA NA
## CI_97.5 NA NA 1.299 NA NA
##
## $Item_14
## a1 a2 d g u
## par 0 1 1.980 0 1
## CI_2.5 NA NA 1.866 NA NA
## CI_97.5 NA NA 2.093 NA NA
##
## $Item_15
## a1 a2 d g u
## par 0 1 -0.162 0 1
## CI_2.5 NA NA NaN NA NA
## CI_97.5 NA NA NaN NA NA
##
## $GroupPars
## MEAN_1 MEAN_2 COV_11 COV_21 COV_22
## par 0 0 0.278 0.098 0.272
## CI_2.5 NA NA NaN NaN NaN
## CI_97.5 NA NA NaN NaN NaN
##
## $school
## COV_school_school
## par 0.015
## CI_2.5 NaN
## CI_97.5 NaN
##
## $lr.betas
## F1_(Intercept) F1_GenderMale F1_zmidterm_Math F1_zmidterm_Science
## par 0 -0.587 0.515 0.125
## CI_2.5 NA -0.611 0.503 0.118
## CI_97.5 NA -0.564 0.528 0.131
## F2_(Intercept) F2_GenderMale F2_zmidterm_Math F2_zmidterm_Science
## par 0 -0.545 0.175 0.491
## CI_2.5 NA -0.565 0.160 0.478
## CI_97.5 NA -0.525 0.190 0.504
anova(lmodRasch2, rmod)
##
## Model 1: mirt(data = data, model = model, itemtype = "Rasch", covdata = covdata,
## formula = ~Gender + zmidterm_Math + zmidterm_Science)
## Model 2: mixedmirt(data = data, covdata = covdata, model = model, fixed = ~0 +
## items, random = ~1 | school, itemtype = "Rasch", lr.fixed = ~Gender +
## zmidterm_Math + zmidterm_Science, verbose = FALSE)
## AIC AICc SABIC BIC logLik X2 df p
## 1 25268.44 25269.25 25319.71 25395.95 -12610.22 NaN NaN NaN
## 2 25277.71 25278.59 25331.12 25410.54 -12613.85 -7.27 1 1
re <- randef(rmod)
str(re)
## List of 2
## $ Theta : num [1:1500, 1:2] -0.813 -0.234 -0.19 -0.558 -0.726 ...
## ..- attr(*, "dimnames")=List of 2
## .. ..$ : NULL
## .. ..$ : chr [1:2] "F1" "F2"
## $ school: num [1:50, 1] -0.0329 -0.0637 0.00121 0.01584 -0.09925 ...
## ..- attr(*, "dimnames")=List of 2
## .. ..$ : chr [1:50] "school_1" "school_10" "school_11" "school_12" ...
## .. ..$ : chr "school"