@InProceedings{10.1007/978-3-030-04167-0_41,
author="Tran, Son N.
and Zhang, Qing
and Nguyen, Anthony
and Vu, Xuan-Son
and Ngo, Son",
editor="Cheng, Long
and Leung, Andrew Chi Sing
and Ozawa, Seiichi",
title="Improving Recurrent Neural Networks with Predictive Propagation for Sequence Labelling",
booktitle="Neural Information Processing",
year="2018",
publisher="Springer International Publishing",
address="Cham",
pages="452--462",
abstract="Recurrent neural networks (RNNs) is a useful tool for sequence labelling tasks in natural language processing. Although in practice RNNs suffer a problem of vanishing/exploding gradient, their compactness still offers efficiency and make them less prone to overfitting. In this paper we show that by propagating the prediction of previous labels we can improve the performance of RNNs while keeping the number of parameters in RNNs unchanged and adding only one more step for inference. As a result, the models are still more compact and efficient than other models with complex memory gates. In the experiment, we evaluate the idea on optical character recognition and Chunking which achieve promising results.",
isbn="978-3-030-04167-0"
}

