BERT: Pre-training of Deep Bidirectional Transformers for Language
Understanding
J. Devlin, M. Chang, K. Lee, and K. Toutanova. Proceedings of the 2019 Conference of the North American Chapter of
the Association for Computational Linguistics: Human Language Technologies,
NAACL-HLT 2019, Minneapolis, MN, USA, June 2-7, 2019, Volume 1 (Long
and Short Papers), page 4171--4186. Association for Computational Linguistics, (2019)
DOI: 10.18653/V1/N19-1423
Proceedings of the 2019 Conference of the North American Chapter of
the Association for Computational Linguistics: Human Language Technologies,
NAACL-HLT 2019, Minneapolis, MN, USA, June 2-7, 2019, Volume 1 (Long
and Short Papers)
%0 Conference Paper
%1 DBLP:conf/naacl/DevlinCLT19
%A Devlin, Jacob
%A Chang, Ming-Wei
%A Lee, Kenton
%A Toutanova, Kristina
%B Proceedings of the 2019 Conference of the North American Chapter of
the Association for Computational Linguistics: Human Language Technologies,
NAACL-HLT 2019, Minneapolis, MN, USA, June 2-7, 2019, Volume 1 (Long
and Short Papers)
%D 2019
%E Burstein, Jill
%E Doran, Christy
%E Solorio, Thamar
%I Association for Computational Linguistics
%K diss foundations imported
%P 4171--4186
%R 10.18653/V1/N19-1423
%T BERT: Pre-training of Deep Bidirectional Transformers for Language
Understanding
%U https://doi.org/10.18653/v1/n19-1423
@inproceedings{DBLP:conf/naacl/DevlinCLT19,
added-at = {2024-03-15T09:52:22.000+0100},
author = {Devlin, Jacob and Chang, Ming{-}Wei and Lee, Kenton and Toutanova, Kristina},
bibsource = {dblp computer science bibliography, https://dblp.org},
biburl = {https://www.bibsonomy.org/bibtex/23787ce6f243887a2497bfdeb13dcb5f4/tobias.koopmann},
booktitle = {Proceedings of the 2019 Conference of the North American Chapter of
the Association for Computational Linguistics: Human Language Technologies,
{NAACL-HLT} 2019, Minneapolis, MN, USA, June 2-7, 2019, Volume 1 (Long
and Short Papers)},
doi = {10.18653/V1/N19-1423},
editor = {Burstein, Jill and Doran, Christy and Solorio, Thamar},
interhash = {fc0c0c0264f63b06db4518c57ee21b9d},
intrahash = {3787ce6f243887a2497bfdeb13dcb5f4},
keywords = {diss foundations imported},
pages = {4171--4186},
publisher = {Association for Computational Linguistics},
timestamp = {2024-03-15T09:52:22.000+0100},
title = {{BERT:} Pre-training of Deep Bidirectional Transformers for Language
Understanding},
url = {https://doi.org/10.18653/v1/n19-1423},
year = 2019
}