Dataset Groups Activity Stream ZeroQuant-FP: A Leap Forward in LLMs Post-Training W4A8 Quantization Using Floating-Point Formats The dataset used in the paper is not explicitly described, but it is mentioned that it is a large language model dataset. BibTex: @dataset{Xiaoxia_Wu_and_Zhewei_Yao_and_Yuxiong_He_2025, abstract = {The dataset used in the paper is not explicitly described, but it is mentioned that it is a large language model dataset.}, author = {Xiaoxia Wu and Zhewei Yao and Yuxiong He}, doi = {10.57702/sjdl7oz5}, institution = {No Organization}, keyword = {'Large Language Models', 'Natural Language Processing', 'Post-Training Quantization'}, month = {jan}, publisher = {TIB}, title = {ZeroQuant-FP: A Leap Forward in LLMs Post-Training W4A8 Quantization Using Floating-Point Formats}, url = {https://service.tib.eu/ldmservice/dataset/zeroquant-fp--a-leap-forward-in-llms-post-training-w4a8-quantization-using-floating-point-formats}, year = {2025} }