Dataset Groups Activity Stream ARAGPT2 ARAGPT2 is a stacked transformer-decoder model trained using the causal language modeling objective. The model is trained on 77GB of Arabic text. BibTex: @dataset{Wissam_Antoun_and_Fady_Baly_and_Hazem_Hajj_2024, abstract = {ARAGPT2 is a stacked transformer-decoder model trained using the causal language modeling objective. The model is trained on 77GB of Arabic text.}, author = {Wissam Antoun and Fady Baly and Hazem Hajj}, doi = {10.57702/pmubdrtb}, institution = {No Organization}, keyword = {'Arabic Language', 'Natural Language Processing', 'Transformer Model'}, month = {dec}, publisher = {TIB}, title = {ARAGPT2}, url = {https://service.tib.eu/ldmservice/dataset/aragpt2}, year = {2024} }