{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2024,1,12]],"date-time":"2024-01-12T05:05:42Z","timestamp":1705035942354},"reference-count":27,"publisher":"Institute of Electrical and Electronics Engineers (IEEE)","issue":"1","license":[{"start":{"date-parts":[[2024,1,1]],"date-time":"2024-01-01T00:00:00Z","timestamp":1704067200000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/ieeexplore.ieee.org\/Xplorehelp\/downloads\/license-information\/IEEE.html"},{"start":{"date-parts":[[2024,1,1]],"date-time":"2024-01-01T00:00:00Z","timestamp":1704067200000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-029"},{"start":{"date-parts":[[2024,1,1]],"date-time":"2024-01-01T00:00:00Z","timestamp":1704067200000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-037"}],"funder":[{"DOI":"10.13039\/501100001809","name":"NSFC","doi-asserted-by":"publisher","award":["62125403","U19B2041","92164301"],"id":[{"id":"10.13039\/501100001809","id-type":"DOI","asserted-by":"publisher"}]},{"DOI":"10.13039\/501100012166","name":"National Key Research and Development Program","doi-asserted-by":"publisher","award":["2021ZD0114400"],"id":[{"id":"10.13039\/501100012166","id-type":"DOI","asserted-by":"publisher"}]},{"DOI":"10.13039\/501100017582","name":"Beijing National Research Center for Information Science and Technology","doi-asserted-by":"publisher","id":[{"id":"10.13039\/501100017582","id-type":"DOI","asserted-by":"publisher"}]},{"name":"Beijing Advanced Innovation Center for Integrated Circuits"}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":["IEEE J. Solid-State Circuits"],"published-print":{"date-parts":[[2024,1]]},"DOI":"10.1109\/jssc.2023.3305663","type":"journal-article","created":{"date-parts":[[2023,8,23]],"date-time":"2023-08-23T17:58:59Z","timestamp":1692813539000},"page":"90-101","source":"Crossref","is-referenced-by-count":0,"title":["MulTCIM: Digital Computing-in-Memory-Based Multimodal Transformer Accelerator With Attention-Token-Bit Hybrid Sparsity"],"prefix":"10.1109","volume":"59","author":[{"ORCID":"http:\/\/orcid.org\/0000-0003-2228-8829","authenticated-orcid":false,"given":"Fengbin","family":"Tu","sequence":"first","affiliation":[{"name":"School of Integrated Circuits, Tsinghua University, Beijing, China"}]},{"ORCID":"http:\/\/orcid.org\/0000-0002-5858-8850","authenticated-orcid":false,"given":"Zihan","family":"Wu","sequence":"additional","affiliation":[{"name":"School of Integrated Circuits, Tsinghua University, Beijing, China"}]},{"ORCID":"http:\/\/orcid.org\/0000-0002-9657-3617","authenticated-orcid":false,"given":"Yiqi","family":"Wang","sequence":"additional","affiliation":[{"name":"School of Integrated Circuits, Tsinghua University, Beijing, China"}]},{"ORCID":"http:\/\/orcid.org\/0000-0001-9639-6123","authenticated-orcid":false,"given":"Weiwei","family":"Wu","sequence":"additional","affiliation":[{"name":"School of Integrated Circuits, Tsinghua University, Beijing, China"}]},{"ORCID":"http:\/\/orcid.org\/0000-0001-7548-4116","authenticated-orcid":false,"given":"Leibo","family":"Liu","sequence":"additional","affiliation":[{"name":"School of Integrated Circuits, Tsinghua University, Beijing, China"}]},{"given":"Yang","family":"Hu","sequence":"additional","affiliation":[{"name":"School of Integrated Circuits, Tsinghua University, Beijing, China"}]},{"ORCID":"http:\/\/orcid.org\/0000-0001-5117-7920","authenticated-orcid":false,"given":"Shaojun","family":"Wei","sequence":"additional","affiliation":[{"name":"School of Integrated Circuits, Tsinghua University, Beijing, China"}]},{"ORCID":"http:\/\/orcid.org\/0000-0003-2309-572X","authenticated-orcid":false,"given":"Shouyi","family":"Yin","sequence":"additional","affiliation":[{"name":"School of Integrated Circuits, Tsinghua University, Beijing, China"}]}],"member":"263","reference":[{"key":"ref1","doi-asserted-by":"publisher","DOI":"10.1109\/isscc42613.2021.9365791"},{"key":"ref2","doi-asserted-by":"publisher","DOI":"10.18653\/v1\/2020.emnlp-main.19"},{"key":"ref3","doi-asserted-by":"publisher","DOI":"10.1109\/ISSCC.2016.7418007"},{"key":"ref4","doi-asserted-by":"publisher","DOI":"10.1109\/ISSCC42613.2021.9365766"},{"key":"ref5","article-title":"Generating long sequences with sparse transformers","author":"Child","year":"2019","journal-title":"arXiv:1904.10509"},{"key":"ref6","doi-asserted-by":"publisher","DOI":"10.1109\/ISSCC42614.2022.9731754"},{"key":"ref7","doi-asserted-by":"publisher","DOI":"10.1109\/hpca47549.2020.00035"},{"key":"ref8","doi-asserted-by":"publisher","DOI":"10.1109\/ISCA52012.2021.00060"},{"key":"ref9","doi-asserted-by":"publisher","DOI":"10.1109\/ISSCC42614.2022.9731715"},{"key":"ref10","doi-asserted-by":"publisher","DOI":"10.1109\/JSSC.2023.3234893"},{"key":"ref11","doi-asserted-by":"publisher","DOI":"10.1109\/ISSCC.2018.8310262"},{"key":"ref12","first-page":"13","article-title":"VilBERT: Pretraining task-agnostic visiolinguistic representations for vision-and-language tasks","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","volume":"32","author":"Lu"},{"key":"ref13","doi-asserted-by":"publisher","DOI":"10.1145\/3466752.3480125"},{"key":"ref14","first-page":"14200","article-title":"Attention bottlenecks for multimodal fusion","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","volume":"34","author":"Nagrani"},{"key":"ref15","doi-asserted-by":"publisher","DOI":"10.1145\/3503222.3507738"},{"key":"ref16","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR52688.2022.01743"},{"key":"ref17","doi-asserted-by":"publisher","DOI":"10.1109\/CVPR46437.2021.01660"},{"key":"ref18","doi-asserted-by":"publisher","DOI":"10.1109\/isscc42614.2022.9731762"},{"key":"ref19","doi-asserted-by":"publisher","DOI":"10.1109\/JSSC.2020.3021661"},{"key":"ref20","doi-asserted-by":"publisher","DOI":"10.1109\/isscc42614.2022.9731645"},{"key":"ref21","doi-asserted-by":"publisher","DOI":"10.1109\/ISSCC42615.2023.10067842"},{"key":"ref22","doi-asserted-by":"publisher","DOI":"10.1109\/ISSCC42614.2022.9731686"},{"key":"ref23","doi-asserted-by":"publisher","DOI":"10.1609\/aaai.v36i3.20202"},{"key":"ref24","doi-asserted-by":"publisher","DOI":"10.1109\/ISSCC42613.2021.9365769"},{"key":"ref25","doi-asserted-by":"publisher","DOI":"10.1109\/JSSC.2017.2778281"},{"key":"ref26","doi-asserted-by":"publisher","DOI":"10.1109\/ISSCC42613.2021.9365958"},{"key":"ref27","first-page":"17283","article-title":"Big bird: Transformers for longer sequences","volume-title":"Proc. Adv. Neural Inf. Process. Syst.","volume":"33","author":"Zaheer"}],"container-title":["IEEE Journal of Solid-State Circuits"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx7\/4\/10375801\/10226612.pdf?arnumber=10226612","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2024,1,12]],"date-time":"2024-01-12T00:31:41Z","timestamp":1705019501000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/10226612\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2024,1]]},"references-count":27,"journal-issue":{"issue":"1"},"URL":"https:\/\/doi.org\/10.1109\/jssc.2023.3305663","relation":{},"ISSN":["0018-9200","1558-173X"],"issn-type":[{"value":"0018-9200","type":"print"},{"value":"1558-173X","type":"electronic"}],"subject":[],"published":{"date-parts":[[2024,1]]}}}