@inproceedings{60761fc6698343118c8c1db5ea74ecff,
title = "Ruminating Reader: Reasoning with Gated Multi-Hop Attention",
abstract = "To answer the question in machine comprehension (MC) task, the models need to establish the interaction between the question and the context. To tackle the problem that the single-pass model cannot reflect on and correct its answer, we present Ruminating Reader. Ruminating Reader adds a second pass of attention and a novel information fusion component to the Bi-Directional Attention Flow model (BIDAF). We propose novel layer structures that construct a query aware context vector representation and fuse encoding representation with intermediate representation on top of BIDAF model. We show that a multi-hop attention mechanism can be applied to a bi-directional attention structure. In experiments on SQuAD, we find that the Reader outperforms the BIDAF baseline by 2.1 F1 score and 2.7 EM score. Our analysis shows that different hops of the attention have different responsibilities in selecting answers.",
author = "Yichen Gong and Bowman, {Samuel R.}",
note = "Publisher Copyright: {\textcopyright} 2018 Association for Computational Linguistics; ACL 2018 Workshop on Machine Reading for Question Answering, MRQA 2018 ; Conference date: 19-07-2018",
year = "2018",
language = "English (US)",
series = "Proceedings of the Annual Meeting of the Association for Computational Linguistics",
publisher = "Association for Computational Linguistics (ACL)",
pages = "1--11",
booktitle = "ACL 2018 - Machine Reading for Question Answering, Proceedings of the Workshop",
}