@Article{cmc.2023.029999, AUTHOR = {Sunil Kumar, Manisha Jailia, Sudeep Varshney, Nitish Pathak, Shabana Urooj, Nouf Abd Elmunim}, TITLE = {Robust Vehicle Detection Based on Improved You Look Only Once}, JOURNAL = {Computers, Materials \& Continua}, VOLUME = {74}, YEAR = {2023}, NUMBER = {2}, PAGES = {3561--3577}, URL = {http://www.techscience.com/cmc/v74n2/50210}, ISSN = {1546-2226}, ABSTRACT = {Vehicle detection is still challenging for intelligent transportation systems (ITS) to achieve satisfactory performance. The existing methods based on one stage and two-stage have intrinsic weakness in obtaining high vehicle detection performance. Due to advancements in detection technology, deep learning-based methods for vehicle detection have become more popular because of their higher detection accuracy and speed than the existing algorithms. This paper presents a robust vehicle detection technique based on Improved You Look Only Once (RVD-YOLOv5) to enhance vehicle detection accuracy. The proposed method works in three phases; in the first phase, the K-means algorithm performs data clustering on datasets to generate the classes of the objects. Subsequently, in the second phase, the YOLOv5 is applied to create the bounding box, and the Non-Maximum Suppression (NMS) technique is used to eliminate the overlapping of the bounding boxes of the vehicle. Then, the loss function CIoU is employed to obtain the accurate regression bounding box of the vehicle in the third phase. The simulation results show that the proposed method achieves better results when compared with other state-of-art techniques, namely Lightweight Dilated Convolutional Neural Network (LD-CNN), Single Shot Detector (SSD), YOLOv3 and YOLOv4 on the performance metric like precision, recall, mAP and F1-Score. The simulation and analysis are carried out on PASCAL VOC 2007, 2012 and MS COCO 2017 datasets to obtain better performance for vehicle detection. Finally, the RVD-YOLOv5 obtains the results with an mAP of 98.6% and Precision, Recall, and F1-Score are 98%, 96.2% and 97.09%, respectively.}, DOI = {10.32604/cmc.2023.029999} }