Dataset Groups Activity Stream Scaling (Down) CLIP: A Comprehensive Analysis of Data, Architecture, and Training Strategies This paper investigates the performance of the Contrastive Language-Image Pre-training (CLIP) when scaled down to limited computation budgets. BibTex: @dataset{Zichao_Li_and_Cihang_Xie_and_Ekin_Dogus_Cubuk_2024, abstract = {This paper investigates the performance of the Contrastive Language-Image Pre-training (CLIP) when scaled down to limited computation budgets.}, author = {Zichao Li and Cihang Xie and Ekin Dogus Cubuk}, doi = {10.57702/t6hrskc9}, institution = {No Organization}, keyword = {'CLIP', 'Contrastive Learning', 'Image and Language Understanding'}, month = {dec}, publisher = {TIB}, title = {Scaling (Down) CLIP: A Comprehensive Analysis of Data, Architecture, and Training Strategies}, url = {https://service.tib.eu/ldmservice/dataset/scaling--down--clip--a-comprehensive-analysis-of-data--architecture--and-training-strategies}, year = {2024} }