@Article{cmc.2023.035716, AUTHOR = {Umer Waqas, Jesse Wiebe Visser, Hana Choe, Donghun Lee}, TITLE = {Multimodal Fused Deep Learning Networks for Domain Specific Image Similarity Search}, JOURNAL = {Computers, Materials \& Continua}, VOLUME = {75}, YEAR = {2023}, NUMBER = {1}, PAGES = {243--258}, URL = {http://www.techscience.com/cmc/v75n1/51508}, ISSN = {1546-2226}, ABSTRACT = {The exponential increase in data over the past few years, particularly in images, has led to more complex content since visual representation became the new norm. E-commerce and similar platforms maintain large image catalogues of their products. In image databases, searching and retrieving similar images is still a challenge, even though several image retrieval techniques have been proposed over the decade. Most of these techniques work well when querying general image databases. However, they often fail in domain-specific image databases, especially for datasets with low intraclass variance. This paper proposes a domain-specific image similarity search engine based on a fused deep learning network. The network is comprised of an improved object localization module, a classification module to narrow down search options and finally a feature extraction and similarity calculation module. The network features both an offline stage for indexing the dataset and an online stage for querying. The dataset used to evaluate the performance of the proposed network is a custom domain-specific dataset related to cosmetics packaging gathered from various online platforms. The proposed method addresses the intraclass variance problem with more precise object localization and the introduction of top result reranking based on object contours. Finally, quantitative and qualitative experiment results are presented, showing improved image similarity search performance.}, DOI = {10.32604/cmc.2023.035716} }