@article {10.3844/jcssp.2025.3051.3080, article_type = {journal}, title = {An Occlusion Aware Facial Expression Recognition Model Using Fitness Based Cheetah Optimizer and Adaptive Multi-Scale ViT-CNN With Attention Mechanism}, author = {Prasad, A. Reddy and Rajesh, A.}, volume = {21}, number = {12}, year = {2026}, month = {Jan}, pages = {3051-3080}, doi = {10.3844/jcssp.2025.3051.3080}, url = {https://thescipub.com/abstract/jcssp.2025.3051.3080}, abstract = {As a highly nuanced aspect of human communication, facial expression recognition presents a computationally complex problem, making it a prominent area of research in computer vision and affective computing. Problems like poor image quality, occlusions, inconsistent illumination, and head attitude changes are frequently observed in images taken from unstructured sources such as the internet that affect the accuracy of facial expression performance. With the aim of resolving these issues, an innovative occluded Facial Expression Recognition (FER) using an advanced deep learning model is proposed. For recognizing facial expressions, images are gathered in benchmark sources. The Viola-Jones (VJ) facial detector model is processed using the collected images. The detected face images from the VJ are given to the Regions of Interest (ROI) extraction process. The extracted ROI is passed to the Adaptive and Multiscale Vision Transformer-Convolutional Neural Network with Attention Mechanism (AMViTCNN-AM) for recognizing facial expressions. AMViTCNN-AM accurately identifies the expression in the face images even in the presence of occlusion. To get better performance in the FER process, the parameters in the network are optimized by the Fitness-based Cheetah Optimizer (F-CO). Experiments are carried out to prove the efficiency of the designed framework. The outcomes show that the implemented approach attained an accuracy value of 98.43%, which proves the potential of a developed deep learning model in the FER.}, journal = {Journal of Computer Science}, publisher = {Science Publications} }