@article{f627175f12d24780933c7c5adb59b350,
title = "Online Calibration of a Joint Model of Item Responses and Response Times in Computerized Adaptive Testing",
abstract = "With the widespread use of computers in modern assessment, online calibration has become increasingly popular as a way of replenishing an item pool. The present study discusses online calibration strategies for a joint model of responses and response times. The study proposes likelihood inference methods for item paramter estimation and evaluates their performance along with optimal sampling procedures. An extensive simulation study indicates that the proposed online calibration strategies perform well with relatively small samples (e.g., 500∼800 examinees). The analysis of estimated parameters suggests that response time information can be used to improve the recovery of the response model parameters. Among a number of sampling methods investigated, A-optimal sampling was found most advantageous when the item parameters were weakly correlated. When the parameters were strongly correlated, D-optimal sampling tended to achieve the most accurate parameter recovery. The study provides guidelines for deciding sampling design under a specific goal of online calibration given the characteristics of field-testing items.",
keywords = "computerized adaptive testing, item response theory, online calibration, optimal sampling, response time",
author = "Kang, {Hyeon Ah} and Yi Zheng and Chang, {Hua Hua}",
note = "Funding Information: HYEON-AH KANG is an assistant professor at the University of Texas at Austin, 1912 Speedway, Stop D5800, Austin, TX 78712; email: hkang@austin.utexas.edu . Her research interests are theoretical and applied statistics in educational and psychological measurement. YI ZHENG is an assistant professor at Arizona State University, 1050 S Forest Mall, Tempe, AZ 85281; email: yi.isabel.zheng@asu.edu . Her research interests are theories and practices in educational and psychological testing, especially computerized adaptive testing. HUA-HUA CHANG is the Charles R. Hicks Chair Professor in Education at Purdue University, Beering Hall 5166, 100 N University St, West Lafayette, IN 47907-2098; email: chang606@purdue.edu . His research interests include computerized adaptive testing, test equity, and utilizing testing to help learning. 2019 1076998619879040 13 2 2017 9 2 2018 16 6 2019 4 9 2019 {\textcopyright} 2019 AERA 2019 American Educational Research Association With the widespread use of computers in modern assessment, online calibration has become increasingly popular as a way of replenishing an item pool. The present study discusses online calibration strategies for a joint model of responses and response times. The study proposes likelihood inference methods for item paramter estimation and evaluates their performance along with optimal sampling procedures. An extensive simulation study indicates that the proposed online calibration strategies perform well with relatively small samples (e.g., 500∼800 examinees). The analysis of estimated parameters suggests that response time information can be used to improve the recovery of the response model parameters. Among a number of sampling methods investigated, A -optimal sampling was found most advantageous when the item parameters were weakly correlated. When the parameters were strongly correlated, D -optimal sampling tended to achieve the most accurate parameter recovery. The study provides guidelines for deciding sampling design under a specific goal of online calibration given the characteristics of field-testing items. online calibration item response theory response time optimal sampling computerized adaptive testing Campus Research Board RB15138 edited-state corrected-proof Declaration of Conflicting Interests The author(s) declared no potential conflicts of interest with respect to the research, authorship, and/or publication of this article. Funding The author(s) disclosed receipt of the following financial support for the research and/or authorship of this article: This study received funding from Campus Research Board (grand ID RB15138). ORCID iD Hyeon-Ah Kang https://orcid.org/0000-0003-4496-6467 Publisher Copyright: {\textcopyright} 2019 AERA.",
year = "2020",
month = apr,
day = "1",
doi = "10.3102/1076998619879040",
language = "English (US)",
volume = "45",
pages = "175--208",
journal = "Journal of Educational and Behavioral Statistics",
issn = "1076-9986",
publisher = "SAGE Publications Inc.",
number = "2",
}