From a30d07bb7fe177cd0dfc3e43f0314c816b9c1044 Mon Sep 17 00:00:00 2001 From: xcfeng <42307427+xcfcode@users.noreply.github.com> Date: Tue, 9 Jul 2024 12:26:20 +0800 Subject: [PATCH] Update papers.bib --- _bibliography/papers.bib | 394 +++++++++++++++++++-------------------- 1 file changed, 195 insertions(+), 199 deletions(-) diff --git a/_bibliography/papers.bib b/_bibliography/papers.bib index 2521599..569bff6 100644 --- a/_bibliography/papers.bib +++ b/_bibliography/papers.bib @@ -1,9 +1,5 @@ --- --- - - - - @article{Zhao2024BBABB, title = {BBA: Bi-Modal Behavioral Alignment for Reasoning with Large Vision-Language Models}, author = {Xueliang Zhao and Xinting Huang and Tingchen Fu and Qintong Li and Shansan Gong and Lemao Liu and Wei Bi and Lingpeng Kong}, @@ -157,21 +153,75 @@ @article{Dong2023StatisticalKA arxiv = {2305.10519} } -@inproceedings{Wu2022SelfAdaptiveIL, - title = {Self-Adaptive In-Context Learning: An Information Compression Perspective for In-Context Example Selection and Ordering}, - author = {Zhiyong Wu and Yaoxiang Wang and Jiacheng Ye and Lingpeng Kong}, - booktitle = {In Proceedings of the Annual Meeting of the Association for Computational Linguistics (ACL)}, - year = {2023}, - arxiv = {2212.10375}, - code = {https://github.com/Shark-NLP/self-adaptive-ICL} +@article{Wu2022SelfAdaptiveIL, + title = {Self-Adaptive In-Context Learning: An Information Compression Perspective for In-Context Example Selection and Ordering}, + author = {Zhiyong Wu and Yaoxiang Wang and Jiacheng Ye and Lingpeng Kong}, + journal = {In Proceedings of the Annual Meeting of the Association for Computational Linguistics (ACL)}, + year = {2023}, + arxiv = {2212.10375}, + code = {https://github.com/Shark-NLP/self-adaptive-ICL} +} + +@article{Li2022ExplanationRV, + title = {Explanation Regeneration via Information Bottleneck}, + author = {Qintong Li and Zhiyong Wu and Lingpeng Kong and Wei Bi}, + journal = {In Findings of the Annual Meeting of the Association for Computational Linguistics (ACL Findings)}, + year = {2023}, + arxiv = {2212.09603} } -@inproceedings{Li2022ExplanationRV, - title = {Explanation Regeneration via Information Bottleneck}, - author = {Qintong Li and Zhiyong Wu and Lingpeng Kong and Wei Bi}, - booktitle = {In Findings of the Annual Meeting of the Association for Computational Linguistics (ACL Findings)}, - year = {2023}, - arxiv = {2212.09603} +@article{Jiang2023ACS, + title = {A Cognitive Stimulation Dialogue System with Multi-source Knowledge Fusion for Elders with Cognitive Impairment}, + author = {Jiyue Jiang and Sheng Wang and Qintong Li and Lingpeng Kong and Chuan Wu}, + journal = {In Proceedings of the Annual Meeting of the Association for Computational Linguistics (ACL)}, + year = {2023}, + arxiv = {2305.08200} +} + + +@article{Ye2023CompositionalEF, + title = {Compositional Exemplars for In-context Learning}, + author = {Jiacheng Ye and Zhiyong Wu and Jiangtao Feng and Tao Yu and Lingpeng Kong}, + journal = {In Proceedings of the International Conference on Machine Learning (ICML)}, + year = {2023}, + arxiv = {2302.05698}, + code = {https://github.com/HKUNLP/icl-ceil} +} + +@article{Zhang2022CABCA, + title = {CAB: Comprehensive Attention Benchmarking on Long Sequence Modeling}, + author = {Jinchao Zhang and Shuyang Jiang and Jiangtao Feng and Lin Zheng and Lingpeng Kong}, + journal = {In Proceedings of the International Conference on Machine Learning (ICML)}, + year = {2023}, + arxiv = {2210.07661}, + code = {https://github.com/Shark-NLP/CAB} +} + +@article{Zheng2023EfficientAV, + title = {Efficient Attention via Control Variates}, + author = {Lin Zheng and Jianbo Yuan and Chong Wang and Lingpeng Kong}, + journal = {In International Conference on Learning Representations (ICLR)}, + year = {2023}, + arxiv = {2302.04542}, + code = {https://github.com/HKUNLP/efficient-attention} +} + +@article{Gong2022DiffuSeqST, + title = {DiffuSeq: Sequence to Sequence Text Generation with Diffusion Models}, + author = {Shansan Gong and Mukai Li and Jiangtao Feng and Zhiyong Wu and Lingpeng Kong}, + journal = {In International Conference on Learning Representations (ICLR)}, + year = {2023}, + arxiv = {2210.08933}, + code = {https://github.com/Shark-NLP/DiffuSeq} +} + +@article{Ye2022ProGenPZ, + title = {ProGen: Progressive Zero-shot Dataset Generation via In-context Feedback}, + author = {Jiacheng Ye and Jiahui Gao and Jiangtao Feng and Zhiyong Wu and Tao Yu and Lingpeng Kong}, + journal = {In Findings of the Conference on Empirical Methods in Natural Language Processing (EMNLP Findings)}, + year = {2022}, + arxiv = {2210.12329}, + code = {https://github.com/HKUNLP/ProGen} } @@ -199,35 +249,35 @@ @article{lai2022ds } -@inproceedings{liu2022augmenting, - title = {Augmenting Multi-Turn Text-to-SQL Datasets with Self-Play}, - author = {Liu, Qi and Ye, Zihuiwen and Yu, Tao and Blunsom, Phil and Song, Linfeng}, - booktitle = {Findings of EMNLP 2022}, - year = {2022}, - note = {Long Paper}, - arxiv = {2210.12096}, - code = {https://github.com/leuchine/self_play_picard} -} - -@inproceedings{cheng2023binding, - title = {Binding Language Models in Symbolic Languages}, - author = {Cheng, Zhoujun and Xie, Tianbao and Shi, Peng and Li, Chengzu and Nadkarni, Rahul and Hu, Yushi and Xiong, Caiming and Radev, Dragomir and Ostendorf, Mari and Zettlemoyer, Luke and Smith, Noah A and Yu, Tao}, - booktitle = {International Conference on Learning Representations (ICLR 2023)}, - location = {Berlin, Germany}, - year = {2023}, - arxiv = {2210.02875}, - code = {https://lm-code-binder.github.io/}, - poster = {https://lm-code-binder.github.io/}, - selected = {y} +@article{liu2022augmenting, + title = {Augmenting Multi-Turn Text-to-SQL Datasets with Self-Play}, + author = {Liu, Qi and Ye, Zihuiwen and Yu, Tao and Blunsom, Phil and Song, Linfeng}, + journal = {Findings of EMNLP 2022}, + year = {2022}, + note = {Long Paper}, + arxiv = {2210.12096}, + code = {https://github.com/leuchine/self_play_picard} +} + +@article{cheng2023binding, + title = {Binding Language Models in Symbolic Languages}, + author = {Cheng, Zhoujun and Xie, Tianbao and Shi, Peng and Li, Chengzu and Nadkarni, Rahul and Hu, Yushi and Xiong, Caiming and Radev, Dragomir and Ostendorf, Mari and Zettlemoyer, Luke and Smith, Noah A and Yu, Tao}, + journal = {International Conference on Learning Representations (ICLR)}, + location = {Berlin, Germany}, + year = {2023}, + arxiv = {2210.02875}, + code = {https://lm-code-binder.github.io/}, + poster = {https://lm-code-binder.github.io/}, + selected = {y} } -@inproceedings{su2023selective, - title = {Selective Annotation Makes Language Models Better Few-Shot Learners}, - author = {Su, Hongjin and Kasai, Jungo and Wu, Chen Henry and Shi, Weijia and Wang, Tianlu and Xin, Jiayi and Zhang, Rui and Ostendorf, Mari and Zettlemoyer, Luke and Smith, Noah A. and Yu, Tao}, - booktitle = {International Conference on Learning Representations (ICLR 2023)}, - year = {2023}, - arxiv = {2209.01975}, - code = {https://github.com/HKUNLP/icl-selective-annotation} +@article{su2023selective, + title = {Selective Annotation Makes Language Models Better Few-Shot Learners}, + author = {Su, Hongjin and Kasai, Jungo and Wu, Chen Henry and Shi, Weijia and Wang, Tianlu and Xin, Jiayi and Zhang, Rui and Ostendorf, Mari and Zettlemoyer, Luke and Smith, Noah A. and Yu, Tao}, + journal = {International Conference on Learning Representations (ICLR)}, + year = {2023}, + arxiv = {2209.01975}, + code = {https://github.com/HKUNLP/icl-selective-annotation} } @article{wang2022evaluating, @@ -246,135 +296,107 @@ @article{wang2022augmenting arxiv = {2206.00362} } -@inproceedings{ye2022zerogen, - title = {ZeroGen: Efficient Zero-shot Learning via Dataset Generation}, - author = {Ye, Jiacheng and Gao, Jiahui and Li, Qintong and Xu, Hang and Feng, Jiangtao and Wu, Zhiyong and Yu, Tao and Kong, Lingpeng}, - booktitle = {Empirical Methods in Natural Language Processing (EMNLP 2022)}, - year = {2022}, - arxiv = {2202.07922}, - code = {https://github.com/jiacheng-ye/ZeroGen} -} - -@inproceedings{xie2022unifiedskg, - title = {UnifiedSKG: Unifying and Multi-Tasking Structured Knowledge Grounding with Text-to-Text Language Models}, - author = {Xie, Tianbao and Wu, Chen Henry and Shi, Peng and Zhong, Ruiqi and Scholak, Torsten and Yasunaga, Michihiro and Wu, Chien-Sheng and Zhong, Ming and Yin, Pengcheng and Wang, Sida and Zhong, Victor and Wang, Bailin and Li, Chengzu and Boyle, Connor and Ni, Ansong and Yao, Ziyu and Radev, Dragomir and Xiong, Caiming and Kong, Lingpeng and Zhang, Rui and Smith, Noah A. and Zettlemoyer, Luke and Yu, Tao}, - booktitle = {Empirical Methods in Natural Language Processing (EMNLP 2022)}, - year = {2022}, - arxiv = {2201.05966}, - code = {https://github.com/hkunlp/unifiedskg}, - poster = {https://unifiedskg.com/} -} - -@inproceedings{zheng2021cascaded, - title = {Cascaded Head-colliding Attention}, - author = {Lin Zheng and Zhiyong Wu and Lingpeng Kong}, - booktitle = {Proceedings of the Annual Meeting of the Association for Computational Linguistics (ACL 2021)}, - year = {2021}, - arxiv = {2105.14850}, - code = {https://github.com/LZhengisme/CODA} -} - -@inproceedings{wu2021good, - title = {Good for Misconceived Reasons: An Empirical Revisiting on the Need for Visual Context in Multimodal Machine Translation}, - author = {Zhiyong Wu and Lingpeng Kong and Wei Bi and Xiang Li and Ben Kao}, - booktitle = {Proceedings of the Annual Meeting of the Association for Computational Linguistics (ACL 2021)}, - year = {2021}, - arxiv = {2105.14462}, - code = {https://github.com/LividWo/Revisit-MMT} -} - -@inproceedings{zheng2022linear, - title = {Linear Complexity Randomized Self-attention Mechanism}, - author = {Lin Zheng and Chong Wang and Lingpeng Kong}, - booktitle = {Proceedings of the International Conference on Machine Learning (ICML 2022)}, - year = {2022}, - arxiv = {2204.04667}, - code = {https://github.com/HKUNLP/efficient-attention} -} - -@inproceedings{zheng2022ripple, - title = {Ripple Attention for Visual Perception with Sub-quadratic Complexity}, - author = {Lin Zheng and Huijie Pan and Lingpeng Kong}, - booktitle = {Proceedings of the International Conference on Machine Learning (ICML 2022)}, - year = {2022}, - arxiv = {2110.02453} -} - -@inproceedings{prange2022linguistic, - title = {Linguistic Frameworks Go Toe-to-Toe at Neuro-Symbolic Language Modeling}, - author = {Jakob Prange and Nathan Schneider and Lingpeng Kong}, - booktitle = {Proceedings of the Conference of the North American Chapter of the Association for Computational Linguistics (NAACL 2022)}, - year = {2022}, - arxiv = {2112.07874}, - code = {https://github.com/jakpra/LinguisticStructureLM} -} - -@inproceedings{li2022event, - title = {Event Transition Planning for Open-ended Text Generation}, - author = {Qintong Li and Piji Li and Wei Bi and Zhaochun Ren and Yuxuan Lai and Lingpeng Kong}, - booktitle = {Findings of the Annual Meeting of the Association for Computational Linguistics (ACL 2022 Findings)}, - year = {2022}, - arxiv = {2204.09453}, - code = {https://github.com/qtli/EventPlanforTextGen} -} - -@inproceedings{wu2022lexical, - title = {Lexical Knowledge Internalization for Neural Dialog Generation}, - author = {Zhiyong Wu and Wei Bi and Xiang Li and Lingpeng Kong and Ben Kao}, - booktitle = {Proceedings of the Annual Meeting of the Association for Computational Linguistics (ACL 2022)}, - year = {2022}, - arxiv = {2205.01941}, - code = {https://github.com/LividWo/KI} -} - -@inproceedings{ye2022progen, - title = {ProGen: Progressive Zero-shot Dataset Generation via In-context Feedback}, - author = {Jiacheng Ye and Jiahui Gao and Zhiyong Wu and Jiangtao Feng and Tao Yu and Lingpeng Kong}, - booktitle = {Findings of the Conference on Empirical Methods in Natural Language Processing (EMNLP 2022 Findings)}, - location = {Abu Dhabi}, - year = {2022}, - arxiv = {2210.12329}, - code = {https://github.com/HKUNLP/ProGen} -} - -@inproceedings{zheng2023efficient, - title = {Efficient Attention via Control Variates}, - author = {Lin Zheng and Jianbo Yuan and Chong Wang and Lingpeng Kong}, - booktitle = {International Conference on Learning Representations (ICLR 2023)}, - location = {Kigali, Rwanda}, - year = {2023}, - arxiv = {2302.04542}, - code = {https://github.com/HKUNLP/efficient-attention} -} - -@inproceedings{gao2023selfguided, - title = {Self-Guided Noise-Free Data Generation for Efficient Zero-Shot Learning}, - author = {Jiahui Gao and Renjie Pi and Lin Yong and Hang Xu and Jiacheng Ye and Zhiyong Wu and Weizhong Zhang and Xiaodan Liang and Zhenguo Li and Lingpeng Kong}, - booktitle = {International Conference on Learning Representations (ICLR 2023)}, - location = {Kigali, Rwanda}, - year = {2023}, - arxiv = {2205.12679}, - code = {https://github.com/SumilerGAO/SunGen} -} - -@inproceedings{gong2023diffuseq, - title = {DiffuSeq: Sequence to Sequence Text Generation with Diffusion Models}, - author = {Shansan Gong and Mukai Li and Jiangtao Feng and Zhiyong Wu and Lingpeng Kong}, - booktitle = {International Conference on Learning Representations (ICLR 2023)}, - location = {Kigali, Rwanda}, - year = {2023}, - arxiv = {2210.08933}, - code = {https://github.com/Shark-NLP/DiffuSeq} -} - -@inproceedings{chen2023unsupervised, - title = {Unsupervised Explanation Generation via Correct Instantiations}, - author = {Sijie Chen and Zhiyong Wu and Jiangjie Chen and Zhixing Li and Yang Liu and Lingpeng Kong}, - booktitle = {Proceedings of AAAI Conference on Artificial Intelligence (AAAI 2023)}, - location = {Washington, DC}, - year = {2023}, - arxiv = {2211.11160}, - code = {https://github.com/Shark-NLP/Neon} +@article{ye2022zerogen, + title = {ZeroGen: Efficient Zero-shot Learning via Dataset Generation}, + author = {Ye, Jiacheng and Gao, Jiahui and Li, Qintong and Xu, Hang and Feng, Jiangtao and Wu, Zhiyong and Yu, Tao and Kong, Lingpeng}, + journal = {Empirical Methods in Natural Language Processing (EMNLP)}, + year = {2022}, + arxiv = {2202.07922}, + code = {https://github.com/jiacheng-ye/ZeroGen} +} + +@article{xie2022unifiedskg, + title = {UnifiedSKG: Unifying and Multi-Tasking Structured Knowledge Grounding with Text-to-Text Language Models}, + author = {Xie, Tianbao and Wu, Chen Henry and Shi, Peng and Zhong, Ruiqi and Scholak, Torsten and Yasunaga, Michihiro and Wu, Chien-Sheng and Zhong, Ming and Yin, Pengcheng and Wang, Sida and Zhong, Victor and Wang, Bailin and Li, Chengzu and Boyle, Connor and Ni, Ansong and Yao, Ziyu and Radev, Dragomir and Xiong, Caiming and Kong, Lingpeng and Zhang, Rui and Smith, Noah A. and Zettlemoyer, Luke and Yu, Tao}, + journal = {Empirical Methods in Natural Language Processing (EMNLP)}, + year = {2022}, + arxiv = {2201.05966}, + code = {https://github.com/hkunlp/unifiedskg}, + poster = {https://unifiedskg.com/} +} + +@article{zheng2021cascaded, + title = {Cascaded Head-colliding Attention}, + author = {Lin Zheng and Zhiyong Wu and Lingpeng Kong}, + journal = {Proceedings of the Annual Meeting of the Association for Computational Linguistics (ACL)}, + year = {2021}, + arxiv = {2105.14850}, + code = {https://github.com/LZhengisme/CODA} +} + +@article{wu2021good, + title = {Good for Misconceived Reasons: An Empirical Revisiting on the Need for Visual Context in Multimodal Machine Translation}, + author = {Zhiyong Wu and Lingpeng Kong and Wei Bi and Xiang Li and Ben Kao}, + journal = {Proceedings of the Annual Meeting of the Association for Computational Linguistics (ACL)}, + year = {2021}, + arxiv = {2105.14462}, + code = {https://github.com/LividWo/Revisit-MMT} +} + +@article{zheng2022linear, + title = {Linear Complexity Randomized Self-attention Mechanism}, + author = {Lin Zheng and Chong Wang and Lingpeng Kong}, + journal = {Proceedings of the International Conference on Machine Learning (ICML)}, + year = {2022}, + arxiv = {2204.04667}, + code = {https://github.com/HKUNLP/efficient-attention} +} + +@article{zheng2022ripple, + title = {Ripple Attention for Visual Perception with Sub-quadratic Complexity}, + author = {Lin Zheng and Huijie Pan and Lingpeng Kong}, + journal = {Proceedings of the International Conference on Machine Learning (ICML)}, + year = {2022}, + arxiv = {2110.02453} +} + +@article{prange2022linguistic, + title = {Linguistic Frameworks Go Toe-to-Toe at Neuro-Symbolic Language Modeling}, + author = {Jakob Prange and Nathan Schneider and Lingpeng Kong}, + journal = {Proceedings of the Conference of the North American Chapter of the Association for Computational Linguistics (NAACL)}, + year = {2022}, + arxiv = {2112.07874}, + code = {https://github.com/jakpra/LinguisticStructureLM} +} + +@article{li2022event, + title = {Event Transition Planning for Open-ended Text Generation}, + author = {Qintong Li and Piji Li and Wei Bi and Zhaochun Ren and Yuxuan Lai and Lingpeng Kong}, + journal = {Findings of the Annual Meeting of the Association for Computational Linguistics (ACL Findings)}, + year = {2022}, + arxiv = {2204.09453}, + code = {https://github.com/qtli/EventPlanforTextGen} +} + +@article{wu2022lexical, + title = {Lexical Knowledge Internalization for Neural Dialog Generation}, + author = {Zhiyong Wu and Wei Bi and Xiang Li and Lingpeng Kong and Ben Kao}, + journal = {Proceedings of the Annual Meeting of the Association for Computational Linguistics (ACL)}, + year = {2022}, + arxiv = {2205.01941}, + code = {https://github.com/LividWo/KI} +} + + + +@article{gao2023selfguided, + title = {Self-Guided Noise-Free Data Generation for Efficient Zero-Shot Learning}, + author = {Jiahui Gao and Renjie Pi and Lin Yong and Hang Xu and Jiacheng Ye and Zhiyong Wu and Weizhong Zhang and Xiaodan Liang and Zhenguo Li and Lingpeng Kong}, + journal = {International Conference on Learning Representations (ICLR)}, + location = {Kigali, Rwanda}, + year = {2023}, + arxiv = {2205.12679}, + code = {https://github.com/SumilerGAO/SunGen} +} + +@article{chen2023unsupervised, + title = {Unsupervised Explanation Generation via Correct Instantiations}, + author = {Sijie Chen and Zhiyong Wu and Jiangjie Chen and Zhixing Li and Yang Liu and Lingpeng Kong}, + journal = {Proceedings of AAAI Conference on Artificial Intelligence (AAAI)}, + location = {Washington, DC}, + year = {2023}, + arxiv = {2211.11160}, + code = {https://github.com/Shark-NLP/Neon} } @article{ma2023retrieved, @@ -395,31 +417,5 @@ @article{zheng2023reparameterized code = {https://github.com/HKUNLP/reparam-discrete-diffusion} } -@inproceedings{ye2023compositional, - title = {Compositional Exemplars for In-context Learning}, - author = {Jiacheng Ye and Zhiyong Wu and Jiangtao Feng and Tao Yu and Lingpeng Kong}, - booktitle = {Proceedings of the International Conference on Machine Learning (ICML 2023)}, - location = {Honolulu, Hawaii}, - year = {2023}, - arxiv = {2302.05698}, - code = {https://github.com/HKUNLP/icl-ceil} -} -@article{wu2022selfadaptive, - title = {Self-adaptive In-context Learning}, - author = {Zhiyong Wu and Yaoxiang Wang and Jiacheng Ye and Lingpeng Kong}, - journal = {arXiv preprint arXiv:2212.10375}, - year = {2022}, - arxiv = {2212.10375}, - code = {https://github.com/Shark-NLP/self-adaptive-ICL} -} -@inproceedings{zhang2022cab, - title = {CAB: Comprehensive Attention Benchmarking on Long Sequence Modeling}, - author = {Jun Zhang and Shuyang Jiang and Jiangtao Feng and Lin Zheng and Lingpeng Kong}, - booktitle = {Proceedings of the International Conference on Machine Learning (ICML 2023)}, - location = {Honolulu, Hawaii}, - year = {2023}, - arxiv = {2210.07661}, - code = {https://github.com/Shark-NLP/CAB} -}