Here we demonstrate three alternative ways to represent the simple Rasch model for dichotomous data.
Use fixed slope parameters set to 1, freely estimate all intercepts, and freely estimate the variance of the latent trait.
library(mirt)
dat <- expand.table(LSAT6)
mod <- mirt(dat, 1, itemtype = 'Rasch')
##
Iteration: 1, Log-Lik: -2473.219, Max-Change: 0.05796
Iteration: 2, Log-Lik: -2471.905, Max-Change: 0.03951
Iteration: 3, Log-Lik: -2471.040, Max-Change: 0.03366
Iteration: 4, Log-Lik: -2470.482, Max-Change: 0.03644
Iteration: 5, Log-Lik: -2469.693, Max-Change: 0.02590
Iteration: 6, Log-Lik: -2469.239, Max-Change: 0.02211
Iteration: 7, Log-Lik: -2468.875, Max-Change: 0.01999
Iteration: 8, Log-Lik: -2468.573, Max-Change: 0.01718
Iteration: 9, Log-Lik: -2468.330, Max-Change: 0.01528
Iteration: 10, Log-Lik: -2468.157, Max-Change: 0.01742
Iteration: 11, Log-Lik: -2467.932, Max-Change: 0.01255
Iteration: 12, Log-Lik: -2467.789, Max-Change: 0.01111
Iteration: 13, Log-Lik: -2467.669, Max-Change: 0.01035
Iteration: 14, Log-Lik: -2467.565, Max-Change: 0.00911
Iteration: 15, Log-Lik: -2467.480, Max-Change: 0.00826
Iteration: 16, Log-Lik: -2467.417, Max-Change: 0.00961
Iteration: 17, Log-Lik: -2467.338, Max-Change: 0.00701
Iteration: 18, Log-Lik: -2467.285, Max-Change: 0.00632
Iteration: 19, Log-Lik: -2467.240, Max-Change: 0.00600
Iteration: 20, Log-Lik: -2467.200, Max-Change: 0.00534
Iteration: 21, Log-Lik: -2467.166, Max-Change: 0.00491
Iteration: 22, Log-Lik: -2467.141, Max-Change: 0.00569
Iteration: 23, Log-Lik: -2467.110, Max-Change: 0.00424
Iteration: 24, Log-Lik: -2467.088, Max-Change: 0.00386
Iteration: 25, Log-Lik: -2467.069, Max-Change: 0.00371
Iteration: 26, Log-Lik: -2467.052, Max-Change: 0.00332
Iteration: 27, Log-Lik: -2467.038, Max-Change: 0.00307
Iteration: 28, Log-Lik: -2467.027, Max-Change: 0.00351
Iteration: 29, Log-Lik: -2467.014, Max-Change: 0.00269
Iteration: 30, Log-Lik: -2467.005, Max-Change: 0.00247
Iteration: 31, Log-Lik: -2466.997, Max-Change: 0.00238
Iteration: 32, Log-Lik: -2466.989, Max-Change: 0.00214
Iteration: 33, Log-Lik: -2466.983, Max-Change: 0.00199
Iteration: 34, Log-Lik: -2466.978, Max-Change: 0.00233
Iteration: 35, Log-Lik: -2466.973, Max-Change: 0.00176
Iteration: 36, Log-Lik: -2466.968, Max-Change: 0.00162
Iteration: 37, Log-Lik: -2466.965, Max-Change: 0.00157
Iteration: 38, Log-Lik: -2466.961, Max-Change: 0.00142
Iteration: 39, Log-Lik: -2466.959, Max-Change: 0.00132
Iteration: 40, Log-Lik: -2466.956, Max-Change: 0.00154
Iteration: 41, Log-Lik: -2466.954, Max-Change: 0.00117
Iteration: 42, Log-Lik: -2466.952, Max-Change: 0.00108
Iteration: 43, Log-Lik: -2466.950, Max-Change: 0.00104
Iteration: 44, Log-Lik: -2466.949, Max-Change: 0.00095
Iteration: 45, Log-Lik: -2466.947, Max-Change: 0.00088
Iteration: 46, Log-Lik: -2466.946, Max-Change: 0.00100
Iteration: 47, Log-Lik: -2466.945, Max-Change: 0.00079
Iteration: 48, Log-Lik: -2466.944, Max-Change: 0.00073
Iteration: 49, Log-Lik: -2466.944, Max-Change: 0.00071
Iteration: 50, Log-Lik: -2466.943, Max-Change: 0.00065
Iteration: 51, Log-Lik: -2466.942, Max-Change: 0.00060
Iteration: 52, Log-Lik: -2466.942, Max-Change: 0.00070
Iteration: 53, Log-Lik: -2466.941, Max-Change: 0.00054
Iteration: 54, Log-Lik: -2466.941, Max-Change: 0.00050
Iteration: 55, Log-Lik: -2466.940, Max-Change: 0.00048
Iteration: 56, Log-Lik: -2466.940, Max-Change: 0.00044
Iteration: 57, Log-Lik: -2466.940, Max-Change: 0.00041
Iteration: 58, Log-Lik: -2466.940, Max-Change: 0.00051
Iteration: 59, Log-Lik: -2466.939, Max-Change: 0.00036
Iteration: 60, Log-Lik: -2466.939, Max-Change: 0.00034
Iteration: 61, Log-Lik: -2466.939, Max-Change: 0.00033
Iteration: 62, Log-Lik: -2466.939, Max-Change: 0.00030
Iteration: 63, Log-Lik: -2466.939, Max-Change: 0.00028
Iteration: 64, Log-Lik: -2466.939, Max-Change: 0.00031
Iteration: 65, Log-Lik: -2466.938, Max-Change: 0.00025
Iteration: 66, Log-Lik: -2466.938, Max-Change: 0.00023
Iteration: 67, Log-Lik: -2466.938, Max-Change: 0.00023
Iteration: 68, Log-Lik: -2466.938, Max-Change: 0.00021
Iteration: 69, Log-Lik: -2466.938, Max-Change: 0.00019
Iteration: 70, Log-Lik: -2466.938, Max-Change: 0.00022
Iteration: 71, Log-Lik: -2466.938, Max-Change: 0.00017
Iteration: 72, Log-Lik: -2466.938, Max-Change: 0.00016
Iteration: 73, Log-Lik: -2466.938, Max-Change: 0.00016
Iteration: 74, Log-Lik: -2466.938, Max-Change: 0.00014
Iteration: 75, Log-Lik: -2466.938, Max-Change: 0.00014
Iteration: 76, Log-Lik: -2466.938, Max-Change: 0.00013
Iteration: 77, Log-Lik: -2466.938, Max-Change: 0.00012
Iteration: 78, Log-Lik: -2466.938, Max-Change: 0.00011
Iteration: 79, Log-Lik: -2466.938, Max-Change: 0.00011
Iteration: 80, Log-Lik: -2466.938, Max-Change: 0.00010
print(mod)
##
## Call:
## mirt(data = dat, model = 1, itemtype = "Rasch")
##
## Full-information item factor analysis with 1 factor(s).
## Converged within 1e-04 tolerance after 80 EM iterations.
## mirt version: 1.39.4
## M-step optimizer: nlminb
## EM acceleration: Ramsay
## Number of rectangular quadrature: 61
## Latent density type: Gaussian
##
## Log-likelihood = -2466.938
## Estimated parameters: 6
## AIC = 4945.875
## BIC = 4975.322; SABIC = 4956.266
## G2 (25) = 21.8, p = 0.6474
## RMSEA = 0, CFI = NaN, TLI = NaN
coef(mod)
## $Item_1
## a1 d g u
## par 1 2.731 0 1
##
## $Item_2
## a1 d g u
## par 1 0.999 0 1
##
## $Item_3
## a1 d g u
## par 1 0.24 0 1
##
## $Item_4
## a1 d g u
## par 1 1.307 0 1
##
## $Item_5
## a1 d g u
## par 1 2.1 0 1
##
## $GroupPars
## MEAN_1 COV_11
## par 0 0.572
Use freely estimate slope parameters but constrain them to be equal across all items, freely estimate all intercepts.
model <- mirt.model('Theta = 1-5
CONSTRAIN = (1-5, a1)')
mod2 <- mirt(dat, model)
##
Iteration: 1, Log-Lik: -2468.601, Max-Change: 0.07077
Iteration: 2, Log-Lik: -2467.371, Max-Change: 0.02297
Iteration: 3, Log-Lik: -2467.099, Max-Change: 0.01355
Iteration: 4, Log-Lik: -2466.986, Max-Change: 0.00735
Iteration: 5, Log-Lik: -2466.959, Max-Change: 0.00482
Iteration: 6, Log-Lik: -2466.947, Max-Change: 0.00316
Iteration: 7, Log-Lik: -2466.939, Max-Change: 0.00109
Iteration: 8, Log-Lik: -2466.938, Max-Change: 0.00073
Iteration: 9, Log-Lik: -2466.938, Max-Change: 0.00049
Iteration: 10, Log-Lik: -2466.938, Max-Change: 0.00028
Iteration: 11, Log-Lik: -2466.938, Max-Change: 0.00016
Iteration: 12, Log-Lik: -2466.938, Max-Change: 0.00009
print(mod2)
##
## Call:
## mirt(data = dat, model = model)
##
## Full-information item factor analysis with 1 factor(s).
## Converged within 1e-04 tolerance after 12 EM iterations.
## mirt version: 1.39.4
## M-step optimizer: BFGS
## EM acceleration: Ramsay
## Number of rectangular quadrature: 61
## Latent density type: Gaussian
##
## Log-likelihood = -2466.938
## Estimated parameters: 10
## AIC = 4945.875
## BIC = 4975.322; SABIC = 4956.265
## G2 (25) = 21.8, p = 0.6474
## RMSEA = 0, CFI = NaN, TLI = NaN
coef(mod2)
## $Item_1
## a1 d g u
## par 0.755 2.73 0 1
##
## $Item_2
## a1 d g u
## par 0.755 0.999 0 1
##
## $Item_3
## a1 d g u
## par 0.755 0.24 0 1
##
## $Item_4
## a1 d g u
## par 0.755 1.307 0 1
##
## $Item_5
## a1 d g u
## par 0.755 2.1 0 1
##
## $GroupPars
## MEAN_1 COV_11
## par 0 1
Fix slopes to 1, freely estimate intercepts subject to the constraint that \(\sum_j d_j = 0\), and freely estimate the latent mean and variance.
model2 <- mirt.model('Theta = 1-5
MEAN = Theta
COV = Theta*Theta')
#view how vector of parameters is organized internally
sv <- mirt(dat, model2, itemtype = 'Rasch', pars = 'values')
sv[sv$est, ]
## group item class name parnum value lbound ubound est prior.type
## 2 all Item_1 dich d 2 2.8152981 -Inf Inf TRUE none
## 6 all Item_2 dich d 6 1.0818304 -Inf Inf TRUE none
## 10 all Item_3 dich d 10 0.2618655 -Inf Inf TRUE none
## 14 all Item_4 dich d 14 1.4071275 -Inf Inf TRUE none
## 18 all Item_5 dich d 18 2.2136968 -Inf Inf TRUE none
## 21 all GROUP GroupPars MEAN_1 21 0.0000000 -Inf Inf TRUE none
## 22 all GROUP GroupPars COV_11 22 1.0000000 1e-04 Inf TRUE none
## prior_1 prior_2
## 2 NaN NaN
## 6 NaN NaN
## 10 NaN NaN
## 14 NaN NaN
## 18 NaN NaN
## 21 NaN NaN
## 22 NaN NaN
#constraint: create function for solnp to compute constraint, and declare value in eqB
eqfun <- function(p, optim_args) sum(p[1:5]) #could use browser() here, if it helps
solnp_args <- list(eqfun=eqfun, eqB=0, LB = c(rep(-15, 6), 1e-4))
mod3 <- mirt(dat, model2, itemtype = 'Rasch', optimizer = 'solnp', solnp_args=solnp_args,
pars=sv)
##
Iteration: 1, Log-Lik: -2473.219, Max-Change: 2.01548
Iteration: 2, Log-Lik: -2978.093, Max-Change: 0.64564
Iteration: 3, Log-Lik: -2637.888, Max-Change: 0.30377
Iteration: 4, Log-Lik: -2536.573, Max-Change: 0.16978
Iteration: 5, Log-Lik: -2498.679, Max-Change: 0.10519
Iteration: 6, Log-Lik: -2482.438, Max-Change: 0.06941
Iteration: 7, Log-Lik: -2474.859, Max-Change: 0.04771
Iteration: 8, Log-Lik: -2471.120, Max-Change: 0.03371
Iteration: 9, Log-Lik: -2469.203, Max-Change: 0.02429
Iteration: 10, Log-Lik: -2468.200, Max-Change: 0.01951
Iteration: 11, Log-Lik: -2467.607, Max-Change: 0.01270
Iteration: 12, Log-Lik: -2467.329, Max-Change: 0.00946
Iteration: 13, Log-Lik: -2467.179, Max-Change: 0.00864
Iteration: 14, Log-Lik: -2467.073, Max-Change: 0.00542
Iteration: 15, Log-Lik: -2467.027, Max-Change: 0.00401
Iteration: 16, Log-Lik: -2467.001, Max-Change: 0.00343
Iteration: 17, Log-Lik: -2466.983, Max-Change: 0.00217
Iteration: 18, Log-Lik: -2466.975, Max-Change: 0.00166
Iteration: 19, Log-Lik: -2466.969, Max-Change: 0.00190
Iteration: 20, Log-Lik: -2466.964, Max-Change: 0.00105
Iteration: 21, Log-Lik: -2466.961, Max-Change: 0.00106
Iteration: 22, Log-Lik: -2466.958, Max-Change: 0.00110
Iteration: 23, Log-Lik: -2466.955, Max-Change: 0.00077
Iteration: 24, Log-Lik: -2466.954, Max-Change: 0.00110
Iteration: 25, Log-Lik: -2466.952, Max-Change: 0.00105
Iteration: 26, Log-Lik: -2466.950, Max-Change: 0.00098
Iteration: 27, Log-Lik: -2466.948, Max-Change: 0.00093
Iteration: 28, Log-Lik: -2466.947, Max-Change: 0.00090
Iteration: 29, Log-Lik: -2466.946, Max-Change: 0.00084
Iteration: 30, Log-Lik: -2466.945, Max-Change: 0.00079
Iteration: 31, Log-Lik: -2466.944, Max-Change: 0.00075
Iteration: 32, Log-Lik: -2466.943, Max-Change: 0.00020
Iteration: 33, Log-Lik: -2466.943, Max-Change: 0.00014
Iteration: 34, Log-Lik: -2466.943, Max-Change: 0.00010
print(mod3)
##
## Call:
## mirt(data = dat, model = model2, itemtype = "Rasch", optimizer = "solnp",
## pars = sv, solnp_args = solnp_args)
##
## Full-information item factor analysis with 1 factor(s).
## Converged within 1e-04 tolerance after 34 EM iterations.
## mirt version: 1.39.4
## M-step optimizer: solnp
## EM acceleration: Ramsay
## Number of rectangular quadrature: 61
## Latent density type: Gaussian
##
## Log-likelihood = -2466.943
## Estimated parameters: 7
## AIC = 4947.887
## BIC = 4982.241; SABIC = 4960.009
## G2 (25) = 21.81, p = 0.6467
## RMSEA = 0, CFI = NaN, TLI = NaN
coef(mod3)
## $Item_1
## a1 d g u
## par 1 1.253 0 1
##
## $Item_2
## a1 d g u
## par 1 -0.475 0 1
##
## $Item_3
## a1 d g u
## par 1 -1.233 0 1
##
## $Item_4
## a1 d g u
## par 1 -0.168 0 1
##
## $Item_5
## a1 d g u
## par 1 0.623 0 1
##
## $GroupPars
## MEAN_1 COV_11
## par 1.472 0.559
(ds <- sapply(coef(mod3)[1:5], function(x) x[,'d']))
## Item_1 Item_2 Item_3 Item_4 Item_5
## 1.2529600 -0.4754484 -1.2327360 -0.1681705 0.6233949
sum(ds)
## [1] 4.551914e-15
The following is equivalent to the above, however it uses the nloptr
package instead (requires mirt > 1.24
).
library(nloptr)
heq <- function(p, optim_args) sum(p[1:5])
heqjac <- function(x, optim_args) nl.jacobian(x, heq)
nloptr_args <- list(lb = c(rep(-15, 6), 1e-4),
eval_g_eq = heq,
eval_jac_g_eq = heqjac,
opts = list(algorithm = 'NLOPT_LD_SLSQP',
xtol_rel=1e-8)
)
mod4 <- mirt(dat, model2, itemtype = 'Rasch', optimizer = 'nloptr', nloptr_args=nloptr_args,
pars=sv)
##
Iteration: 1, Log-Lik: -2473.219, Max-Change: 2.01548
Iteration: 2, Log-Lik: -2978.093, Max-Change: 0.64564
Iteration: 3, Log-Lik: -2637.888, Max-Change: 0.30377
Iteration: 4, Log-Lik: -2536.573, Max-Change: 0.16978
Iteration: 5, Log-Lik: -2498.679, Max-Change: 0.10519
Iteration: 6, Log-Lik: -2482.438, Max-Change: 0.06941
Iteration: 7, Log-Lik: -2474.859, Max-Change: 0.04771
Iteration: 8, Log-Lik: -2471.120, Max-Change: 0.03371
Iteration: 9, Log-Lik: -2469.203, Max-Change: 0.02429
Iteration: 10, Log-Lik: -2468.200, Max-Change: 0.01951
Iteration: 11, Log-Lik: -2467.607, Max-Change: 0.01270
Iteration: 12, Log-Lik: -2467.329, Max-Change: 0.00946
Iteration: 13, Log-Lik: -2467.179, Max-Change: 0.00868
Iteration: 14, Log-Lik: -2467.079, Max-Change: 0.00517
Iteration: 15, Log-Lik: -2467.033, Max-Change: 0.00394
Iteration: 16, Log-Lik: -2467.007, Max-Change: 0.00438
Iteration: 17, Log-Lik: -2466.987, Max-Change: 0.00224
Iteration: 18, Log-Lik: -2466.977, Max-Change: 0.00175
Iteration: 19, Log-Lik: -2466.971, Max-Change: 0.00271
Iteration: 20, Log-Lik: -2466.965, Max-Change: 0.00105
Iteration: 21, Log-Lik: -2466.962, Max-Change: 0.00098
Iteration: 22, Log-Lik: -2466.959, Max-Change: 0.00240
Iteration: 23, Log-Lik: -2466.956, Max-Change: 0.00095
Iteration: 24, Log-Lik: -2466.954, Max-Change: 0.00092
Iteration: 25, Log-Lik: -2466.953, Max-Change: 0.00135
Iteration: 26, Log-Lik: -2466.951, Max-Change: 0.00086
Iteration: 27, Log-Lik: -2466.949, Max-Change: 0.00082
Iteration: 28, Log-Lik: -2466.948, Max-Change: 0.00081
Iteration: 29, Log-Lik: -2466.947, Max-Change: 0.00075
Iteration: 30, Log-Lik: -2466.946, Max-Change: 0.00071
Iteration: 31, Log-Lik: -2466.945, Max-Change: 0.00069
Iteration: 32, Log-Lik: -2466.944, Max-Change: 0.00064
Iteration: 33, Log-Lik: -2466.944, Max-Change: 0.00061
Iteration: 34, Log-Lik: -2466.943, Max-Change: 0.00059
Iteration: 35, Log-Lik: -2466.942, Max-Change: 0.00055
Iteration: 36, Log-Lik: -2466.942, Max-Change: 0.00052
Iteration: 37, Log-Lik: -2466.941, Max-Change: 0.00050
Iteration: 38, Log-Lik: -2466.941, Max-Change: 0.00047
Iteration: 39, Log-Lik: -2466.941, Max-Change: 0.00044
Iteration: 40, Log-Lik: -2466.940, Max-Change: 0.00042
Iteration: 41, Log-Lik: -2466.940, Max-Change: 0.00039
Iteration: 42, Log-Lik: -2466.940, Max-Change: 0.00037
Iteration: 43, Log-Lik: -2466.940, Max-Change: 0.00036
Iteration: 44, Log-Lik: -2466.939, Max-Change: 0.00033
Iteration: 45, Log-Lik: -2466.939, Max-Change: 0.00032
Iteration: 46, Log-Lik: -2466.939, Max-Change: 0.00030
Iteration: 47, Log-Lik: -2466.939, Max-Change: 0.00028
Iteration: 48, Log-Lik: -2466.939, Max-Change: 0.00027
Iteration: 49, Log-Lik: -2466.939, Max-Change: 0.00026
Iteration: 50, Log-Lik: -2466.938, Max-Change: 0.00024
Iteration: 51, Log-Lik: -2466.938, Max-Change: 0.00023
Iteration: 52, Log-Lik: -2466.938, Max-Change: 0.00022
Iteration: 53, Log-Lik: -2466.938, Max-Change: 0.00020
Iteration: 54, Log-Lik: -2466.938, Max-Change: 0.00019
Iteration: 55, Log-Lik: -2466.938, Max-Change: 0.00018
Iteration: 56, Log-Lik: -2466.938, Max-Change: 0.00017
Iteration: 57, Log-Lik: -2466.938, Max-Change: 0.00016
Iteration: 58, Log-Lik: -2466.938, Max-Change: 0.00015
Iteration: 59, Log-Lik: -2466.938, Max-Change: 0.00014
Iteration: 60, Log-Lik: -2466.938, Max-Change: 0.00014
Iteration: 61, Log-Lik: -2466.938, Max-Change: 0.00013
Iteration: 62, Log-Lik: -2466.938, Max-Change: 0.00012
Iteration: 63, Log-Lik: -2466.938, Max-Change: 0.00011
Iteration: 64, Log-Lik: -2466.938, Max-Change: 0.00011
Iteration: 65, Log-Lik: -2466.938, Max-Change: 0.00010
Iteration: 66, Log-Lik: -2466.938, Max-Change: 0.00010
(ds <- sapply(coef(mod3)[1:5], function(x) x[,'d']))
## Item_1 Item_2 Item_3 Item_4 Item_5
## 1.2529600 -0.4754484 -1.2327360 -0.1681705 0.6233949
sum(ds)
## [1] 4.551914e-15