<?xml version="1.0" encoding="US-ASCII"?>
<dblp>
<inproceedings key="conf/sc/ChenHPLG0D021" mdate="2025-12-16">
<author orcid="0000-0003-2626-7865">Shiyang Chen 0004</author>
<author orcid="0000-0001-6093-9798">Shaoyi Huang</author>
<author orcid="0000-0002-3528-6868">Santosh Pandey 0001</author>
<author>Bingbing Li</author>
<author>Guang R. Gao</author>
<author>Long Zheng 0001</author>
<author>Caiwen Ding</author>
<author>Hang Liu 0001</author>
<title>E.T.: re-thinking self-attention for transformer models on GPUs.</title>
<pages>25</pages>
<year>2021</year>
<booktitle>SC</booktitle>
<ee>https://doi.org/10.1145/3458817.3476138</ee>
<crossref>conf/sc/2021</crossref>
<url>db/conf/sc/sc2021.html#ChenHPLG0D021</url>
</inproceedings>
</dblp>
