@inproceedings{011694989693456db02adba1041a821f,
title = "Learning Sequential and Structural Information for Source Code Summarization",
abstract = "We propose a model that learns both the sequential and the structural features of code for source code summarization. We adopt the abstract syntax tree (AST) and graph convolution to model the structural information and the Transformer to model the sequential information. We convert code snippets into ASTs and apply graph convolution to obtain structurally-encoded node representations. Then, the sequences of the graph-convolutioned AST nodes are processed by the Transformer layers. Since structurally-neighboring nodes will have similar representations in graph-convolutioned trees, the Transformer layers can effectively capture not only the sequential information but also the structural information such as sentences or blocks of source code. We show that our model outperforms the state-of-the-art for source code summarization by experiments and human evaluations.",
author = "Choi, \{Yun Seok\} and Bak, \{Jin Yeong\} and Na, \{Cheol Won\} and Lee, \{Jee Hyong\}",
note = "Publisher Copyright: {\textcopyright} 2021 Association for Computational Linguistics; Findings of the Association for Computational Linguistics: ACL-IJCNLP 2021 ; Conference date: 01-08-2021 Through 06-08-2021",
year = "2021",
doi = "10.18653/v1/2021.findings-acl.251",
language = "English",
series = "Findings of the Association for Computational Linguistics: ACL-IJCNLP 2021",
publisher = "Association for Computational Linguistics (ACL)",
pages = "2842--2851",
editor = "Chengqing Zong and Fei Xia and Wenjie Li and Roberto Navigli",
booktitle = "Findings of the Association for Computational Linguistics",
}