@inproceedings{0a149233835e413b88cc1c3577022415,
title = "Developing AI into explanatory supporting models: An explanation-visualized deep learning prototype",
abstract = "Using Artificial Intelligence (AI) and machine learning technologies to automatically mine latent patterns from educational data holds great potential to inform teaching and learning practices. However, the current AI technology mostly works as “black box” - only the inputs and the corresponding outputs are available, which largely impedes researchers from gaining access to explainable feedback. This interdisciplinary work presents an explainable AI prototype with visualized explanations as feedback for computer-supported collaborative learning (CSCL). This research study seeks to provide interpretable insights with machine learning technologies for multimodal learning analytics (MMLA) by introducing two different explanatory machine learning-based models (neural network and Bayesian network) in different manners (end-to-end learning and probabilistic analysis) and for the same goal - provide explainable and actionable feedback. The prototype is applied to the real-world collaborative learning scenario with data-driven learning based on sensor-data from multiple modalities which can assess collaborative learning processes and render explanatory real-time feedback.",
author = "Haoyu Chen and Esther Tan and Yoon Lee and Sambit Praharaj and Marcus Specht and Guoying Zhao",
note = "Publisher Copyright: {\textcopyright} ISLS.; 14th International Conference of the Learning Sciences: The Interdisciplinarity of the Learning Sciences, ICLS 2020 ; Conference date: 19-06-2020 Through 23-06-2020",
year = "2020",
language = "English",
series = "Computer-Supported Collaborative Learning Conference, CSCL",
publisher = "International Society of the Learning Sciences (ISLS)",
pages = "1133--1140",
editor = "Melissa Gresalfi and Horn, {Ilana Seidel}",
booktitle = "14th International Conference of the Learning Sciences",
address = "United States",
}