@Article{cmes.2023.024332, AUTHOR = {Zefeng Gu, Hua Chen}, TITLE = {Knowledge Graph Representation Learning Based on Automatic Network Search for Link Prediction}, JOURNAL = {Computer Modeling in Engineering \& Sciences}, VOLUME = {135}, YEAR = {2023}, NUMBER = {3}, PAGES = {2497--2514}, URL = {http://www.techscience.com/CMES/v135n3/50510}, ISSN = {1526-1506}, ABSTRACT = {Link prediction, also known as Knowledge Graph Completion (KGC), is the common task in Knowledge Graphs (KGs) to predict missing connections between entities. Most existing methods focus on designing shallow, scalable models, which have less expressive than deep, multi-layer models. Furthermore, most operations like addition, matrix multiplications or factorization are handcrafted based on a few known relation patterns in several well-known datasets, such as FB15k, WN18, etc. However, due to the diversity and complex nature of real-world data distribution, it is inherently difficult to preset all latent patterns. To address this issue, we propose KGE-ANS, a novel knowledge graph embedding framework for general link prediction tasks using automatic network search. KGE-ANS can learn a deep, multi-layer effective architecture to adapt to different datasets through neural architecture search. In addition, the general search space we designed is tailored for KG tasks. We perform extensive experiments on benchmark datasets and the dataset constructed in this paper. The results show that our KGE-ANS outperforms several state-of-the-art methods, especially on these datasets with complex relation patterns.}, DOI = {10.32604/cmes.2023.024332} }