{ "nbformat": 4, "nbformat_minor": 0, "metadata": { "colab": { "provenance": [], "collapsed_sections": [ "wDV7ysXf2b_H", "Jjacw9Mp2eoX", "ciSPyhRc3Rvo" ] }, "kernelspec": { "name": "python3", "display_name": "Python 3" }, "language_info": { "name": "python" } }, "cells": [ { "cell_type": "markdown", "metadata": { "id": "WEY5MiKLzurH" }, "source": [ "# Setup Environment" ] }, { "cell_type": "code", "source": [ "! pip install hazm==0.10.0" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/", "height": 1000 }, "id": "euO_NTvwG0HW", "outputId": "18d7ea5b-baeb-4d73-afa2-254ac5642fac" }, "execution_count": null, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "Collecting hazm==0.10.0\n", " Downloading hazm-0.10.0-py3-none-any.whl.metadata (11 kB)\n", "Collecting fasttext-wheel<0.10.0,>=0.9.2 (from hazm==0.10.0)\n", " Downloading fasttext_wheel-0.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (16 kB)\n", "Collecting flashtext<3.0,>=2.7 (from hazm==0.10.0)\n", " Downloading flashtext-2.7.tar.gz (14 kB)\n", " Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n", "Collecting gensim<5.0.0,>=4.3.1 (from hazm==0.10.0)\n", " Downloading gensim-4.3.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (8.1 kB)\n", "Requirement already satisfied: nltk<4.0.0,>=3.8.1 in /usr/local/lib/python3.11/dist-packages (from hazm==0.10.0) (3.9.1)\n", "Collecting numpy==1.24.3 (from hazm==0.10.0)\n", " Downloading numpy-1.24.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (5.6 kB)\n", "Collecting python-crfsuite<0.10.0,>=0.9.9 (from hazm==0.10.0)\n", " Downloading python_crfsuite-0.9.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (4.3 kB)\n", "Requirement already satisfied: scikit-learn<2.0.0,>=1.2.2 in /usr/local/lib/python3.11/dist-packages (from hazm==0.10.0) (1.6.1)\n", "Collecting pybind11>=2.2 (from fasttext-wheel<0.10.0,>=0.9.2->hazm==0.10.0)\n", " Downloading pybind11-2.13.6-py3-none-any.whl.metadata (9.5 kB)\n", "Requirement already satisfied: setuptools>=0.7.0 in /usr/local/lib/python3.11/dist-packages (from fasttext-wheel<0.10.0,>=0.9.2->hazm==0.10.0) (75.2.0)\n", "Collecting scipy<1.14.0,>=1.7.0 (from gensim<5.0.0,>=4.3.1->hazm==0.10.0)\n", " Downloading scipy-1.13.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (60 kB)\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m60.6/60.6 kB\u001b[0m \u001b[31m2.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[?25hRequirement already satisfied: smart-open>=1.8.1 in /usr/local/lib/python3.11/dist-packages (from gensim<5.0.0,>=4.3.1->hazm==0.10.0) (7.1.0)\n", "Requirement already satisfied: click in /usr/local/lib/python3.11/dist-packages (from nltk<4.0.0,>=3.8.1->hazm==0.10.0) (8.1.8)\n", "Requirement already satisfied: joblib in /usr/local/lib/python3.11/dist-packages (from nltk<4.0.0,>=3.8.1->hazm==0.10.0) (1.4.2)\n", "Requirement already satisfied: regex>=2021.8.3 in /usr/local/lib/python3.11/dist-packages (from nltk<4.0.0,>=3.8.1->hazm==0.10.0) (2024.11.6)\n", "Requirement already satisfied: tqdm in /usr/local/lib/python3.11/dist-packages (from nltk<4.0.0,>=3.8.1->hazm==0.10.0) (4.67.1)\n", "Requirement already satisfied: threadpoolctl>=3.1.0 in /usr/local/lib/python3.11/dist-packages (from scikit-learn<2.0.0,>=1.2.2->hazm==0.10.0) (3.6.0)\n", "Requirement already satisfied: wrapt in /usr/local/lib/python3.11/dist-packages (from smart-open>=1.8.1->gensim<5.0.0,>=4.3.1->hazm==0.10.0) (1.17.2)\n", "Downloading hazm-0.10.0-py3-none-any.whl (892 kB)\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m892.6/892.6 kB\u001b[0m \u001b[31m15.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[?25hDownloading numpy-1.24.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (17.3 MB)\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m17.3/17.3 MB\u001b[0m \u001b[31m28.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[?25hDownloading fasttext_wheel-0.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (4.4 MB)\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m4.4/4.4 MB\u001b[0m \u001b[31m58.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[?25hDownloading gensim-4.3.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (26.7 MB)\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m26.7/26.7 MB\u001b[0m \u001b[31m26.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[?25hDownloading python_crfsuite-0.9.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.3 MB)\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.3/1.3 MB\u001b[0m \u001b[31m45.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[?25hDownloading pybind11-2.13.6-py3-none-any.whl (243 kB)\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m243.3/243.3 kB\u001b[0m \u001b[31m14.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[?25hDownloading scipy-1.13.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (38.6 MB)\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m38.6/38.6 MB\u001b[0m \u001b[31m9.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[?25hBuilding wheels for collected packages: flashtext\n", " Building wheel for flashtext (setup.py) ... \u001b[?25l\u001b[?25hdone\n", " Created wheel for flashtext: filename=flashtext-2.7-py2.py3-none-any.whl size=9300 sha256=e7380f6f98ff10f751d96f3b3233a8814bed40dc9fbcb43bace244e15a39a818\n", " Stored in directory: /root/.cache/pip/wheels/49/20/47/f03dfa8a7239c54cbc44ff7389eefbf888d2c1873edaaec888\n", "Successfully built flashtext\n", "Installing collected packages: flashtext, python-crfsuite, pybind11, numpy, scipy, fasttext-wheel, gensim, hazm\n", " Attempting uninstall: numpy\n", " Found existing installation: numpy 2.0.2\n", " Uninstalling numpy-2.0.2:\n", " Successfully uninstalled numpy-2.0.2\n", " Attempting uninstall: scipy\n", " Found existing installation: scipy 1.15.2\n", " Uninstalling scipy-1.15.2:\n", " Successfully uninstalled scipy-1.15.2\n", "\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n", "blosc2 3.3.2 requires numpy>=1.26, but you have numpy 1.24.3 which is incompatible.\n", "thinc 8.3.6 requires numpy<3.0.0,>=2.0.0, but you have numpy 1.24.3 which is incompatible.\n", "treescope 0.1.9 requires numpy>=1.25.2, but you have numpy 1.24.3 which is incompatible.\n", "pymc 5.22.0 requires numpy>=1.25.0, but you have numpy 1.24.3 which is incompatible.\n", "albumentations 2.0.6 requires numpy>=1.24.4, but you have numpy 1.24.3 which is incompatible.\n", "albucore 0.0.24 requires numpy>=1.24.4, but you have numpy 1.24.3 which is incompatible.\n", "tensorflow 2.18.0 requires numpy<2.1.0,>=1.26.0, but you have numpy 1.24.3 which is incompatible.\n", "jax 0.5.2 requires numpy>=1.25, but you have numpy 1.24.3 which is incompatible.\n", "jaxlib 0.5.1 requires numpy>=1.25, but you have numpy 1.24.3 which is incompatible.\u001b[0m\u001b[31m\n", "\u001b[0mSuccessfully installed fasttext-wheel-0.9.2 flashtext-2.7 gensim-4.3.3 hazm-0.10.0 numpy-1.24.3 pybind11-2.13.6 python-crfsuite-0.9.11 scipy-1.13.1\n" ] }, { "output_type": "display_data", "data": { "application/vnd.colab-display-data+json": { "pip_warning": { "packages": [ "numpy" ] }, "id": "f860e129e3a34cef9daac243c26d8728" } }, "metadata": {} } ] }, { "cell_type": "code", "source": [ "!pip install numpy==1.26.0" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "Y2cfyWETIpEf", "outputId": "5814b320-2ead-4b47-94e6-3fad4d6bd5ee" }, "execution_count": null, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "Collecting numpy==1.26.0\n", " Downloading numpy-1.26.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (58 kB)\n", "\u001b[?25l \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m0.0/58.5 kB\u001b[0m \u001b[31m?\u001b[0m eta \u001b[36m-:--:--\u001b[0m\r\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m58.5/58.5 kB\u001b[0m \u001b[31m2.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[?25hDownloading numpy-1.26.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (18.2 MB)\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m18.2/18.2 MB\u001b[0m \u001b[31m43.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[?25hInstalling collected packages: numpy\n", " Attempting uninstall: numpy\n", " Found existing installation: numpy 1.24.3\n", " Uninstalling numpy-1.24.3:\n", " Successfully uninstalled numpy-1.24.3\n", "\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n", "hazm 0.10.0 requires numpy==1.24.3, but you have numpy 1.26.0 which is incompatible.\n", "thinc 8.3.6 requires numpy<3.0.0,>=2.0.0, but you have numpy 1.26.0 which is incompatible.\u001b[0m\u001b[31m\n", "\u001b[0mSuccessfully installed numpy-1.26.0\n" ] } ] }, { "cell_type": "code", "source": [ "!pip install pandas==2.1.4" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "Qe7BBEZTS7Y6", "outputId": "acae1624-bc7e-4208-e2f0-80b9e66a18ff" }, "execution_count": null, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "Collecting pandas==2.1.4\n", " Downloading pandas-2.1.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (18 kB)\n", "Requirement already satisfied: numpy<2,>=1.23.2 in /usr/local/lib/python3.11/dist-packages (from pandas==2.1.4) (1.26.0)\n", "Requirement already satisfied: python-dateutil>=2.8.2 in /usr/local/lib/python3.11/dist-packages (from pandas==2.1.4) (2.9.0.post0)\n", "Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.11/dist-packages (from pandas==2.1.4) (2025.2)\n", "Requirement already satisfied: tzdata>=2022.1 in /usr/local/lib/python3.11/dist-packages (from pandas==2.1.4) (2025.2)\n", "Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.11/dist-packages (from python-dateutil>=2.8.2->pandas==2.1.4) (1.17.0)\n", "Downloading pandas-2.1.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (12.2 MB)\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m12.2/12.2 MB\u001b[0m \u001b[31m97.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[?25hInstalling collected packages: pandas\n", " Attempting uninstall: pandas\n", " Found existing installation: pandas 2.2.2\n", " Uninstalling pandas-2.2.2:\n", " Successfully uninstalled pandas-2.2.2\n", "\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n", "google-colab 1.0.0 requires pandas==2.2.2, but you have pandas 2.1.4 which is incompatible.\n", "plotnine 0.14.5 requires pandas>=2.2.0, but you have pandas 2.1.4 which is incompatible.\n", "mizani 0.13.3 requires pandas>=2.2.0, but you have pandas 2.1.4 which is incompatible.\u001b[0m\u001b[31m\n", "\u001b[0mSuccessfully installed pandas-2.1.4\n" ] } ] }, { "cell_type": "code", "source": [ "! git clone https://github.com/AzamRabiee/Persian_G2P.git" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "qPAmCjfcUJ_f", "outputId": "28cb142c-3df0-4f4e-f008-7f29773b6aa6" }, "execution_count": null, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "Cloning into 'Persian_G2P'...\n", "remote: Enumerating objects: 35, done.\u001b[K\n", "remote: Counting objects: 100% (6/6), done.\u001b[K\n", "remote: Compressing objects: 100% (6/6), done.\u001b[K\n", "remote: Total 35 (delta 1), reused 0 (delta 0), pack-reused 29 (from 1)\u001b[K\n", "Receiving objects: 100% (35/35), 614.07 KiB | 3.96 MiB/s, done.\n", "Resolving deltas: 100% (9/9), done.\n" ] } ] }, { "cell_type": "code", "source": [ "! pip install num2fawords" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "53Wr50lQVFKe", "outputId": "ab081b4f-3ea1-4448-e68a-c7761cdd554c" }, "execution_count": null, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "Collecting num2fawords\n", " Downloading num2fawords-1.1-py3-none-any.whl.metadata (4.1 kB)\n", "Downloading num2fawords-1.1-py3-none-any.whl (9.8 kB)\n", "Installing collected packages: num2fawords\n", "Successfully installed num2fawords-1.1\n" ] } ] }, { "cell_type": "code", "source": [ "! pip install Distance" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "YOYSz85eVPhk", "outputId": "c69b78f5-f273-49e9-a9a6-ecb37ad63b82" }, "execution_count": null, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "Collecting Distance\n", " Downloading Distance-0.1.3.tar.gz (180 kB)\n", "\u001b[?25l \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m0.0/180.3 kB\u001b[0m \u001b[31m?\u001b[0m eta \u001b[36m-:--:--\u001b[0m\r\u001b[2K \u001b[91m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[91m╸\u001b[0m\u001b[90m━\u001b[0m \u001b[32m174.1/180.3 kB\u001b[0m \u001b[31m5.7 MB/s\u001b[0m eta \u001b[36m0:00:01\u001b[0m\r\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m180.3/180.3 kB\u001b[0m \u001b[31m3.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[?25h Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n", "Building wheels for collected packages: Distance\n", " Building wheel for Distance (setup.py) ... \u001b[?25l\u001b[?25hdone\n", " Created wheel for Distance: filename=Distance-0.1.3-py3-none-any.whl size=16256 sha256=4eae7bc18b3a6f86cfdd2471b5159e0257a7276ed17d95f03216b27931d5838e\n", " Stored in directory: /root/.cache/pip/wheels/fb/cd/9c/3ab5d666e3bcacc58900b10959edd3816cc9557c7337986322\n", "Successfully built Distance\n", "Installing collected packages: Distance\n", "Successfully installed Distance-0.1.3\n" ] } ] }, { "cell_type": "code", "source": [ "! pip install jiwer" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "vMY8rtJX7mwy", "outputId": "4d8413fd-330f-4517-a52d-dedcca5c6524" }, "execution_count": null, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "Collecting jiwer\n", " Downloading jiwer-3.1.0-py3-none-any.whl.metadata (2.6 kB)\n", "Requirement already satisfied: click>=8.1.8 in /usr/local/lib/python3.11/dist-packages (from jiwer) (8.1.8)\n", "Collecting rapidfuzz>=3.9.7 (from jiwer)\n", " Downloading rapidfuzz-3.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (12 kB)\n", "Downloading jiwer-3.1.0-py3-none-any.whl (22 kB)\n", "Downloading rapidfuzz-3.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (3.1 MB)\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m3.1/3.1 MB\u001b[0m \u001b[31m26.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[?25hInstalling collected packages: rapidfuzz, jiwer\n", "Successfully installed jiwer-3.1.0 rapidfuzz-3.13.0\n" ] } ] }, { "cell_type": "code", "source": [ "import os\n", "import re\n", "from tqdm import tqdm\n", "import csv\n", "import pandas as pd\n", "import json\n", "import itertools\n", "from jiwer import cer" ], "metadata": { "id": "LtiXrEaI7svO" }, "execution_count": null, "outputs": [] }, { "cell_type": "markdown", "source": [ "# Setup Model" ], "metadata": { "id": "bfqjC8pN7viW" } }, { "cell_type": "code", "source": [ "! wget https://raw.githubusercontent.com/tihu-nlp/tihudict/master/tihu.demo.dict" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "BlFG8_MSyfHv", "outputId": "5f6ea297-ee76-4e0c-f855-6fa8fe0bcd3f" }, "execution_count": null, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "--2025-05-11 10:20:04-- https://raw.githubusercontent.com/tihu-nlp/tihudict/master/tihu.demo.dict\n", "Resolving raw.githubusercontent.com (raw.githubusercontent.com)... 185.199.108.133, 185.199.109.133, 185.199.110.133, ...\n", "Connecting to raw.githubusercontent.com (raw.githubusercontent.com)|185.199.108.133|:443... connected.\n", "HTTP request sent, awaiting response... 200 OK\n", "Length: 49306 (48K) [text/plain]\n", "Saving to: ‘tihu.demo.dict’\n", "\n", "tihu.demo.dict 100%[===================>] 48.15K --.-KB/s in 0.01s \n", "\n", "2025-05-11 10:20:04 (3.69 MB/s) - ‘tihu.demo.dict’ saved [49306/49306]\n", "\n" ] } ] }, { "cell_type": "code", "source": [ "! mv tihu.demo.dict Persian_G2P/tihudict.dict" ], "metadata": { "id": "yGhXkFQQzUUB" }, "execution_count": null, "outputs": [] }, { "cell_type": "code", "source": [ "import os\n", "os.chdir('Persian_G2P')" ], "metadata": { "id": "p31NbG4H0jPH" }, "execution_count": null, "outputs": [] }, { "cell_type": "code", "source": [ "import subprocess\n", "\n", "def run_script_with_args(sent):\n", " try:\n", " command = [\"python\", \"g2p.py\", \"--text\", sent]\n", " result = subprocess.run(command, capture_output=True, text=True, )\n", "\n", " if result.returncode == 0:\n", " return result.stdout\n", " else:\n", " print(f\"An error occurred: {result.stderr}\")\n", " return ''\n", "\n", " except Exception as e:\n", " print(f\"An unexpected error occurred: {str(e)}\")" ], "metadata": { "id": "9cuSN2rfYhtb" }, "execution_count": null, "outputs": [] }, { "cell_type": "code", "source": [ "! python g2p.py --text 'دلم میخواست برم '" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "_BMSg8CcUrK1", "outputId": "dc26661c-fe1c-474b-ebfb-08d1d8345b3d" }, "execution_count": null, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "dalam mixAst beram\n" ] } ] }, { "cell_type": "code", "source": [ "! python g2p.py --text 'انجمن نابینایان برای افرادی که تمایل به شنیدن مجلهی نسل مانا را دارند، این امکان را فراهم کردهاست.'" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "UBzBWDKBXzi2", "outputId": "32ba0590-e392-4d91-ae68-f0547838d288" }, "execution_count": null, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "a n j o m a n nAbinA?An b a r A ^ y e e f r A d i k e t a m A y o l b e Senidan majele?i n a s l mAnA r A d A r a n d ، i n e m k A n r A f a r A h a m kerdedest .\n" ] } ] }, { "cell_type": "markdown", "source": [ "# mapping" ], "metadata": { "id": "VtxEYym69RUH" } }, { "cell_type": "code", "source": [ "output_to_phonetics_map = {\n", " 'м': 'm',\n", " 'ʷ':' v',\n", " 'w': 'v',\n", " 'c': 'k',\n", " 'ĉ': 'C',\n", " 'č': 'C',\n", " '̕': \"?\",\n", " \"'\": '?',\n", " 'ʔ': \"?\",\n", " 'ꞌ': \"?\",\n", " '̛': \"?\",\n", " '’': \"?\",\n", " 'ʼ': \"?\",\n", " \"'\": '?',\n", " 'â': 'A',\n", " 'â': 'A',\n", " 'ȃ': 'A',\n", " 'ž': 'Z',\n", " 'š': 'S',\n", " 'W': 'v',\n", " 'β': 'f',\n", " 'е': 'e',\n", " '`': \"?\",\n", " 'ɑ': 'A',\n", " 'ɑ': 'A',\n", " 'ʃ': 'S',\n", " 'ð': 'z',\n", " 'ɾ': 'r',\n", " 'æ': 'a',\n", " 'ɪ': 'e',\n", " 'χ': 'x',\n", " 'ɣ': 'q',\n", " 'ʒ': 'Z',\n", " ':': '',\n", " 'ː': '',\n", " 'ā': 'A',\n", " 'ː': '',\n", " 'ä': 'A',\n", " 'á': 'A',\n", " 'š': 'S',\n", " 'ū': 'u',\n", " 'û': 'u',\n", " 'ś': 's',\n", " 'ī': 'i',\n", " 'í': 'i',\n", " 'î': 'i',\n", " 'é': 'e',\n", " 'ḥ': 'h',\n", " 'ɒ': 'A',\n", " 'ʰ': '',\n", " 'ə': 'e',\n", " 'R': 'r',\n", " 'W': 'v',\n", " 'Q': 'q',\n", " 'T': 't',\n", " 'Y': 'y',\n", " 'P': 'p',\n", " 'D': 'd',\n", " 'F': 'f',\n", " 'H': 'h',\n", " 'J': 'j',\n", " 'L': 'l',\n", " 'X': 'x',\n", " 'V': 'v',\n", " 'B': 'b',\n", " 'N': 'n',\n", " 'M': 'm',\n", " 'K': 'k',\n", " 'G': 'g',\n", " 'U': 'u',\n", " 'O': 'o',\n", " 'I': 'i',\n", " 'E': 'e',\n", " 'ŋ': 'ng',\n", " '.': '',\n", " 'ɛ': 'e',\n", " 'ʊ': 'u',\n", " \"ˈ\": '?',\n", " 'ù': 'u',\n", " 'θ': 's',\n", " '̪': '',\n", " 'ũ': 'u',\n", " '_': '',\n", " 'ç': 'C',\n", " 'ĝ': 'q',\n", " 'ɢ': 'q',\n", " 'ː': '',\n", " 'í': 'i',\n", " 'ŝ': 'S',\n", " '!': '',\n", " 'ǧ': 'q',\n", " 'ʻ': '?',\n", " 'è': 'e',\n", " '�': '',\n", " 'ú': 'u',\n", " 'ô': 'o',\n", " 'ē': 'e',\n", " 'à': 'A',\n", " 'ă': 'A',\n", " 'ǐ': 'i',\n", " 'ü': 'u',\n", " '\\u200e': '',\n", " 'ğ': 'q',\n", " 'ṣ': 'S',\n", " 'â': 'A',\n", " 'â': 'A',\n", " 'ȃ': 'A',\n", " 'ž': 'Z',\n", " 'š': 'S',\n", " 'ā': 'A',\n", " 'ː': '',\n", " 'ä': 'A',\n", " 'á': 'A',\n", " 'š': 'S',\n", " 'ū': 'u',\n", " 'û': 'u',\n", " 'ś': 'S',\n", " 'ī': 'i',\n", " 'í': 'i',\n", " 'î': 'i',\n", " 'é': 'e',\n", "}\n", "\n", "consonants_regex = '(?=' + '|'.join(['q', 'r', 't', 'y', 'p', 's', 'd', 'f', 'g', 'h', 'j', 'k', 'l', 'z', 'x', 'c', 'v', 'b', 'n', 'm', 'Q', 'R', 'T', 'Y', 'P', 'S', 'D', 'F', 'G', 'H', 'J', 'K', 'L', 'Z', 'X', 'C', 'V', 'B', 'N', 'M' ]) + ')'\n", "vowels_regex = '(?=' + '|'.join(['a', 'A', 'e', 'i', 'u', 'o']) + ')'\n", "\n", "\n", "def replace_phonetic_characters(input_string, char_map=output_to_phonetics_map, from_phonetics=False):\n", " substituted = re.sub(r'tʃʰ', 'C', input_string)\n", " substituted = re.sub(r't͡ʃ', 'C', input_string)\n", " substituted = re.sub(r'tʃ', 'C', substituted)\n", " substituted = re.sub(r't͡S', 'C', substituted)\n", " substituted = re.sub(r'ow', 'o', substituted)\n", " substituted = re.sub('d͡ʒ', 'j', substituted)\n", " substituted = re.sub('dʒ', 'j', substituted)\n", "\n", " # Create a translation table using str.maketrans\n", " translation_table = str.maketrans(char_map)\n", "\n", " # Use str.translate to replace characters based on the translation table\n", " translated = substituted.translate(translation_table)\n", "\n", " return translated" ], "metadata": { "id": "TKx8oA1n7rKh" }, "execution_count": null, "outputs": [] }, { "cell_type": "markdown", "metadata": { "id": "XjAPkfq7SF87" }, "source": [ "# Get Evaluation Data" ] }, { "cell_type": "code", "source": [ "!wget https://huggingface.co/datasets/MahtaFetrat/SentenceBench/raw/main/SentenceBench.csv" ], "metadata": { "id": "qwCG0jX-88nQ", "colab": { "base_uri": "https://localhost:8080/" }, "outputId": "ea16b431-5340-458d-b44c-69b62bf49f8d" }, "execution_count": null, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "--2025-05-11 10:20:36-- https://huggingface.co/datasets/MahtaFetrat/SentenceBench/raw/main/SentenceBench.csv\n", "Resolving huggingface.co (huggingface.co)... 18.172.134.124, 18.172.134.4, 18.172.134.88, ...\n", "Connecting to huggingface.co (huggingface.co)|18.172.134.124|:443... connected.\n", "HTTP request sent, awaiting response... 200 OK\n", "Length: 56026 (55K) [text/plain]\n", "Saving to: ‘SentenceBench.csv’\n", "\n", "\rSentenceBench.csv 0%[ ] 0 --.-KB/s \rSentenceBench.csv 100%[===================>] 54.71K --.-KB/s in 0.01s \n", "\n", "2025-05-11 10:20:36 (4.25 MB/s) - ‘SentenceBench.csv’ saved [56026/56026]\n", "\n" ] } ] }, { "cell_type": "code", "source": [ "sentence_bench = pd.read_csv('SentenceBench.csv')" ], "metadata": { "id": "hJO-UAPDQvcb" }, "execution_count": null, "outputs": [] }, { "cell_type": "code", "source": [ "sentence_bench.head(3)" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/", "height": 143 }, "id": "qlYbrnUa9LAN", "outputId": "4b2b2c89-6aa3-4ba7-e2b3-65f89fcafc66" }, "execution_count": null, "outputs": [ { "output_type": "execute_result", "data": { "text/plain": [ " dataset grapheme \\\n", "0 homograph من قدر تو را میدانم \n", "1 homograph از قضای الهی به قدر الهی پناه میبرم \n", "2 homograph به دست و صورتم کرم زدم \n", "\n", " phoneme homograph word \\\n", "0 man qadr-e to rA mi-dAnam قدر \n", "1 ?az qazAy ?elAhi be qadar-e ?elAhi panAh mi-baram قدر \n", "2 be dast-o suratam kerem zadam کرم \n", "\n", " pronunciation \n", "0 qadr \n", "1 qadar \n", "2 kerem " ], "text/html": [ "\n", "
\n", " | dataset | \n", "grapheme | \n", "phoneme | \n", "homograph word | \n", "pronunciation | \n", "
---|---|---|---|---|---|
0 | \n", "homograph | \n", "من قدر تو را میدانم | \n", "man qadr-e to rA mi-dAnam | \n", "قدر | \n", "qadr | \n", "
1 | \n", "homograph | \n", "از قضای الهی به قدر الهی پناه میبرم | \n", "?az qazAy ?elAhi be qadar-e ?elAhi panAh mi-baram | \n", "قدر | \n", "qadar | \n", "
2 | \n", "homograph | \n", "به دست و صورتم کرم زدم | \n", "be dast-o suratam kerem zadam | \n", "کرم | \n", "kerem | \n", "