Dataset Groups Activity Stream Xlnet: Generalized Autoregressive Pretraining for Language Understanding The Xlnet is a generalized autoregressive pretraining model for language understanding. BibTex: @dataset{Zhilin_Yang_and_Zihang_Dai_and_Yiming_Yang_and_Jaime_Carbonell_and_Ruslan_Salakhutdinov_and_Quoc_V_Le_2025, abstract = {The Xlnet is a generalized autoregressive pretraining model for language understanding.}, author = {Zhilin Yang and Zihang Dai and Yiming Yang and Jaime Carbonell and Ruslan Salakhutdinov and Quoc V Le}, doi = {10.57702/mt69o3yg}, institution = {No Organization}, keyword = {'Generalized Autoregressive', 'Language Understanding', 'Pretraining Model', 'autoregressive', 'multi-lingual', 'pre-training', 'sequence-to-sequence'}, month = {jan}, publisher = {TIB}, title = {Xlnet: Generalized Autoregressive Pretraining for Language Understanding}, url = {https://service.tib.eu/ldmservice/dataset/xlnet--generalized-autoregressive-pretraining-for-language-understanding}, year = {2025} }