Dataset Groups Activity Stream BERT: Pre-training of deep bidirectional transformers for language understanding This paper proposes BERT, a pre-trained deep bidirectional transformer for language understanding. BibTex: @dataset{Jacob_Devlin_and_Ming-Wei_Chang_and_Kenton_Lee_and_Kristina_Toutanova_2024, abstract = {This paper proposes BERT, a pre-trained deep bidirectional transformer for language understanding.}, author = {Jacob Devlin and Ming-Wei Chang and Kenton Lee and Kristina Toutanova}, doi = {10.57702/xvg4jrkz}, institution = {No Organization}, keyword = {'BERT', 'Deep Bidirectional Transformers', 'Deep Learning', 'Language Models', 'Language Understanding', 'Natural Language Processing', 'Natural Language Understanding', 'Pre-trained Deep Bidirectional Transformer', 'Pre-training', 'deep learning', 'pre-training'}, month = {dec}, publisher = {TIB}, title = {BERT: Pre-training of deep bidirectional transformers for language understanding}, url = {https://service.tib.eu/ldmservice/dataset/bert--pre-training-of-deep-bidirectional-transformers-for-language-understanding}, year = {2024} }