@article{uninimx5660, number = {2}, title = {Contextual Urdu Lemmatization Using Recurrent Neural Network Models}, pages = {435}, volume = {11}, author = {Rabab Hafeez and Muhammad Waqas Anwar and Muhammad Hasan Jamal and Tayyaba Fatima and Julio C{\'e}sar Mart{\'i}nez Espinosa and Luis Alonso Dzul L{\'o}pez and Ernesto Bautista Thompson and Imran Ashraf}, journal = {Mathematics}, year = {2023}, url = {http://repositorio.unini.edu.mx/id/eprint/5660/}, abstract = {In the field of natural language processing, machine translation is a colossally developing research area that helps humans communicate more effectively by bridging the linguistic gap. In machine translation, normalization and morphological analyses are the first and perhaps the most important modules for information retrieval (IR). To build a morphological analyzer, or to complete the normalization process, it is important to extract the correct root out of different words. Stemming and lemmatization are techniques commonly used to find the correct root words in a language. However, a few studies on IR systems for the Urdu language have shown that lemmatization is more effective than stemming due to infixes found in Urdu words. This paper presents a lemmatization algorithm based on recurrent neural network models for the Urdu language. However, lemmatization techniques for resource-scarce languages such as Urdu are not very common. The proposed model is trained and tested on two datasets, namely, the Urdu Monolingual Corpus (UMC) and the Universal Dependencies Corpus of Urdu (UDU). The datasets are lemmatized with the help of recurrent neural network models. The Word2Vec model and edit trees are used to generate semantic and syntactic embedding. Bidirectional long short-term memory (BiLSTM), bidirectional gated recurrent unit (BiGRU), bidirectional gated recurrent neural network (BiGRNN), and attention-free encoder?decoder (AFED) models are trained under defined hyperparameters. Experimental results show that the attention-free encoder-decoder model achieves an accuracy, precision, recall, and F-score of 0.96, 0.95, 0.95, and 0.95, respectively, and outperforms existing models}, keywords = {neural networks; natural language processing; inflectional morphology; derivational morphology; MSC: 68T50} }