diff --git a/Anupriya/Kaggle/kaggle_supervised.ipynb b/Anupriya/Kaggle/kaggle_supervised.ipynb new file mode 100644 index 00000000..c88c2dc3 --- /dev/null +++ b/Anupriya/Kaggle/kaggle_supervised.ipynb @@ -0,0 +1,2562 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "provenance": [] + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "language_info": { + "name": "python" + } + }, + "cells": [ + { + "cell_type": "code", + "source": [ + "from google.colab import drive\n", + "drive.mount('/content/drive')" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "4NOZaiyCArZN", + "outputId": "f8daafbc-8e5c-4156-be27-f9ef465e733d" + }, + "execution_count": 14, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Drive already mounted at /content/drive; to attempt to forcibly remount, call drive.mount(\"/content/drive\", force_remount=True).\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "import pandas as pd\n", + "\n", + "def load_data(s):\n", + " data=pd.read_csv(s)\n", + " return data\n", + "\n", + "path_a1raw='/content/drive/MyDrive/kaggle/dsg-challenge-1-supervised-learning/a1_raw.csv'\n", + "path_a2raw='/content/drive/MyDrive/kaggle/dsg-challenge-1-supervised-learning/a2_raw.csv'\n", + "path_a3raw='/content/drive/MyDrive/kaggle/dsg-challenge-1-supervised-learning/a3_raw.csv'\n", + "path_b1raw='/content/drive/MyDrive/kaggle/dsg-challenge-1-supervised-learning/b1_raw.csv'\n", + "path_b3raw='/content/drive/MyDrive/kaggle/dsg-challenge-1-supervised-learning/b3_raw.csv'\n", + "path_c1raw='/content/drive/MyDrive/kaggle/dsg-challenge-1-supervised-learning/c1_raw.csv'\n", + "path_c3raw='/content/drive/MyDrive/kaggle/dsg-challenge-1-supervised-learning/c3_raw.csv'\n", + "\n", + "path_a1='/content/drive/MyDrive/kaggle/dsg-challenge-1-supervised-learning/a1_va3.csv'\n", + "path_a3='/content/drive/MyDrive/kaggle/dsg-challenge-1-supervised-learning/a3_va3.csv'\n", + "path_test='/content/drive/MyDrive/kaggle/dsg-challenge-1-supervised-learning/test.csv'\n", + "path_b1='/content/drive/MyDrive/kaggle/dsg-challenge-1-supervised-learning/b1_va3.csv'\n", + "path_b3='/content/drive/MyDrive/kaggle/dsg-challenge-1-supervised-learning/b3_va3.csv'\n", + "path_c1='/content/drive/MyDrive/kaggle/dsg-challenge-1-supervised-learning/c1_va3.csv'\n", + "path_c3='/content/drive/MyDrive/kaggle/dsg-challenge-1-supervised-learning/c3_va3.csv'\n", + "path_test='/content/drive/MyDrive/kaggle/dsg-challenge-1-supervised-learning/test.csv'\n", + "\n", + "data_a1=load_data(path_a1)\n", + "data_a3=load_data(path_a3)\n", + "data_b1=load_data(path_b1)\n", + "data_b3=load_data(path_b3)\n", + "data_c1=load_data(path_c1)\n", + "data_c3=load_data(path_c3)\n", + "data_a1raw=load_data(path_a1raw)\n", + "data_a2raw=load_data(path_a2raw)\n", + "data_a3raw=load_data(path_a3raw)\n", + "data_b1raw=load_data(path_b1raw)\n", + "data_b3raw=load_data(path_b3raw)\n", + "data_c1raw=load_data(path_c1raw)\n", + "data_c3raw=load_data(path_c3raw)\n", + "test=load_data(path_test)\n" + ], + "metadata": { + "id": "3oOZi4n0YtyQ" + }, + "execution_count": 15, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "print(data_a1raw['phase'].unique())\n", + "print(data_a2raw['phase'].unique())\n", + "print(data_a3raw['phase'].unique())\n", + "print(data_b1raw['phase'].unique())\n", + "print(data_b3raw['phase'].unique())\n", + "print(data_c1raw['phase'].unique())\n", + "print(data_c3raw['phase'].unique())\n", + "\n" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "wwsMc--NV7nr", + "outputId": "64bc90ef-367a-491f-bc18-0fa5680da663" + }, + "execution_count": 16, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "['Rest' 'Preparation' 'Stroke' 'Hold' 'Retraction']\n", + "['Rest' 'Preparation' 'Stroke' 'Retraction' 'Hold']\n", + "['Rest' 'Preparation' 'Stroke' 'Retraction' 'Hold']\n", + "['Rest' 'Preparation' 'Hold' 'Stroke' 'Retraction' 'Preparação']\n", + "['Rest' 'Preparation' 'Hold' 'Stroke' 'Retraction']\n", + "['Rest' 'Preparation' 'Stroke' 'Hold' 'Retraction']\n", + "['Rest' 'Preparation' 'Stroke' 'Hold' 'Retraction']\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "data_b1raw['phase']=data_b1raw['phase'].replace(['Preparação'],['Preparation'])\n", + "data_b1raw['phase'].unique()" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "6wXLPeIeYoF2", + "outputId": "8085c572-51a8-42eb-da6e-3e723b132527" + }, + "execution_count": 17, + "outputs": [ + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "array(['Rest', 'Preparation', 'Hold', 'Stroke', 'Retraction'],\n", + " dtype=object)" + ] + }, + "metadata": {}, + "execution_count": 17 + } + ] + }, + { + "cell_type": "code", + "source": [ + "framesprocessed=[data_a1,data_a3,data_b1,data_b3,data_c1,data_c3]\n", + "framesraw=[data_a1raw,data_a3raw,data_b1raw,data_b3raw,data_c1raw,data_c3raw]\n", + "\n", + "for i in framesraw:\n", + " i.drop(range(0,4),inplace=True)\n", + " i.reset_index()\n", + "\n", + "final=pd.concat(framesraw)\n", + "final.reset_index(inplace=True)\n", + "finalv=pd.concat(framesprocessed)\n", + "finalv.reset_index(inplace=True)\n", + "final_df=pd.concat([finalv,final],axis=1)\n", + "final_df" + ], + "metadata": { + "id": "0hnNF188SYzD", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 443 + }, + "outputId": "1c43b604-b0de-424c-b509-7facd10b885c" + }, + "execution_count": 18, + "outputs": [ + { + "output_type": "execute_result", + "data": { + "text/plain": [ + " index 1 2 3 4 5 6 \\\n", + "0 0 -0.005009 -0.000964 0.000573 0.008623 0.005667 0.001302 \n", + "1 1 0.004905 0.001209 -0.000649 0.004737 0.003166 0.000819 \n", + "2 2 -0.002393 -0.000216 0.000136 0.003028 0.001212 0.000336 \n", + "3 3 -0.001394 -0.000242 0.000056 0.001182 0.000575 0.000225 \n", + "4 4 -0.000156 -0.000004 0.000023 0.001585 0.000630 0.000094 \n", + "... ... ... ... ... ... ... ... \n", + "8608 1439 -0.003709 -0.006168 0.000786 -0.000155 0.001088 -0.000144 \n", + "8609 1440 -0.000727 0.001536 -0.000211 0.000700 -0.000975 0.000067 \n", + "8610 1441 0.003074 0.007870 -0.000962 0.000526 -0.000779 0.000090 \n", + "8611 1442 0.003297 0.008467 -0.001035 0.000578 -0.000740 0.000101 \n", + "8612 1443 0.000204 -0.000040 0.000058 0.000586 -0.000619 0.000087 \n", + "\n", + " 7 8 9 ... sy sz lwx \\\n", + "0 -0.000631 0.000130 -0.000048 ... 4.225485 1.775536 4.983912 \n", + "1 -0.000572 -0.000015 0.000023 ... 4.223284 1.777401 5.000410 \n", + "2 -0.000449 0.000017 0.000047 ... 4.223690 1.777571 5.001656 \n", + "3 -0.000479 -0.000050 0.000104 ... 4.224827 1.777669 5.002672 \n", + "4 -0.000303 0.000097 0.000065 ... 4.223671 1.778054 5.012298 \n", + "... ... ... ... ... ... ... ... \n", + "8608 -0.003815 -0.004658 0.000656 ... 4.199645 1.939572 3.801623 \n", + "8609 -0.001147 0.000177 0.000008 ... 4.199096 1.939843 3.736713 \n", + "8610 0.002786 0.005035 -0.000606 ... 4.200613 1.940351 3.736855 \n", + "8611 0.002947 0.005385 -0.000652 ... 4.200203 1.940679 3.736708 \n", + "8612 0.000229 0.000003 0.000061 ... 4.198929 1.941195 3.736303 \n", + "\n", + " lwy lwz rwx rwy rwz timestamp phase \n", + "0 4.296833 1.569889 5.193762 4.335417 1.560144 5702167 Rest \n", + "1 4.301358 1.566544 5.164159 4.313107 1.552097 5702307 Rest \n", + "2 4.299812 1.566537 5.136817 4.307087 1.551576 5702338 Rest \n", + "3 4.298810 1.566489 5.125220 4.300282 1.550805 5702370 Rest \n", + "4 4.298582 1.565061 5.114789 4.292008 1.549765 5702432 Rest \n", + "... ... ... ... ... ... ... ... \n", + "8608 5.192412 1.812156 5.206748 5.086565 1.837070 5432739 Rest \n", + "8609 5.067120 1.828599 5.205452 5.085346 1.837165 5432771 Rest \n", + "8610 5.068608 1.828367 5.202618 5.090534 1.836787 5432808 Rest \n", + "8611 5.067500 1.828450 5.196628 5.095811 1.836236 5432836 Rest \n", + "8612 5.066618 1.828540 5.194844 5.096206 1.836543 5432869 Rest \n", + "\n", + "[8613 rows x 55 columns]" + ], + "text/html": [ + "\n", + "
\n", + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
index123456789...syszlwxlwylwzrwxrwyrwztimestampphase
00-0.005009-0.0009640.0005730.0086230.0056670.001302-0.0006310.000130-0.000048...4.2254851.7755364.9839124.2968331.5698895.1937624.3354171.5601445702167Rest
110.0049050.001209-0.0006490.0047370.0031660.000819-0.000572-0.0000150.000023...4.2232841.7774015.0004104.3013581.5665445.1641594.3131071.5520975702307Rest
22-0.002393-0.0002160.0001360.0030280.0012120.000336-0.0004490.0000170.000047...4.2236901.7775715.0016564.2998121.5665375.1368174.3070871.5515765702338Rest
33-0.001394-0.0002420.0000560.0011820.0005750.000225-0.000479-0.0000500.000104...4.2248271.7776695.0026724.2988101.5664895.1252204.3002821.5508055702370Rest
44-0.000156-0.0000040.0000230.0015850.0006300.000094-0.0003030.0000970.000065...4.2236711.7780545.0122984.2985821.5650615.1147894.2920081.5497655702432Rest
..................................................................
86081439-0.003709-0.0061680.000786-0.0001550.001088-0.000144-0.003815-0.0046580.000656...4.1996451.9395723.8016235.1924121.8121565.2067485.0865651.8370705432739Rest
86091440-0.0007270.001536-0.0002110.000700-0.0009750.000067-0.0011470.0001770.000008...4.1990961.9398433.7367135.0671201.8285995.2054525.0853461.8371655432771Rest
861014410.0030740.007870-0.0009620.000526-0.0007790.0000900.0027860.005035-0.000606...4.2006131.9403513.7368555.0686081.8283675.2026185.0905341.8367875432808Rest
861114420.0032970.008467-0.0010350.000578-0.0007400.0001010.0029470.005385-0.000652...4.2002031.9406793.7367085.0675001.8284505.1966285.0958111.8362365432836Rest
861214430.000204-0.0000400.0000580.000586-0.0006190.0000870.0002290.0000030.000061...4.1989291.9411953.7363035.0666181.8285405.1948445.0962061.8365435432869Rest
\n", + "

8613 rows × 55 columns

\n", + "
\n", + "
\n", + "\n", + "
\n", + " \n", + "\n", + " \n", + "\n", + " \n", + "
\n", + "\n", + "\n", + "
\n", + " \n", + "\n", + "\n", + "\n", + " \n", + "
\n", + "
\n", + "
\n" + ] + }, + "metadata": {}, + "execution_count": 18 + } + ] + }, + { + "cell_type": "code", + "source": [ + "final_df.drop(['Phase'],axis=1,inplace=True)\n", + "list1=list(range(0,5))\n", + "print(final_df['phase'].unique())\n", + "final_df['phase'].replace(final_df['phase'].unique(),list1,inplace=True)" + ], + "metadata": { + "id": "lAQuSOMcXhZo", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "d40de825-89bb-45e5-cc5b-8cf8246325b0" + }, + "execution_count": 19, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "['Rest' 'Preparation' 'Stroke' 'Hold' 'Retraction']\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "\n", + "y=final_df['phase'].values\n", + "final_df['phase'].unique()\n", + "y" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "DmFlaaLBcGSW", + "outputId": "eed7aec0-4d15-4e77-b849-f49823461ba4" + }, + "execution_count": 20, + "outputs": [ + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "array([0, 0, 0, ..., 0, 0, 0])" + ] + }, + "metadata": {}, + "execution_count": 20 + } + ] + }, + { + "cell_type": "code", + "source": [ + "X=final_df.drop(['phase'],axis=1)" + ], + "metadata": { + "id": "cLrh-HwpZQw2" + }, + "execution_count": 21, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "X.drop(['index'],axis=1,inplace=True)" + ], + "metadata": { + "id": "O8glM7I1YIQb" + }, + "execution_count": 22, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "import lightgbm as lgbm\n", + "\n", + "lgbm=lgbm.LGBMClassifier()\n", + "\n", + "X" + ], + "metadata": { + "id": "zf3ja_z4ZzGS", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 443 + }, + "outputId": "ee4f18e3-958e-4db5-c9e1-952088693cef" + }, + "execution_count": 23, + "outputs": [ + { + "output_type": "execute_result", + "data": { + "text/plain": [ + " 1 2 3 4 5 6 7 \\\n", + "0 -0.005009 -0.000964 0.000573 0.008623 0.005667 0.001302 -0.000631 \n", + "1 0.004905 0.001209 -0.000649 0.004737 0.003166 0.000819 -0.000572 \n", + "2 -0.002393 -0.000216 0.000136 0.003028 0.001212 0.000336 -0.000449 \n", + "3 -0.001394 -0.000242 0.000056 0.001182 0.000575 0.000225 -0.000479 \n", + "4 -0.000156 -0.000004 0.000023 0.001585 0.000630 0.000094 -0.000303 \n", + "... ... ... ... ... ... ... ... \n", + "8608 -0.003709 -0.006168 0.000786 -0.000155 0.001088 -0.000144 -0.003815 \n", + "8609 -0.000727 0.001536 -0.000211 0.000700 -0.000975 0.000067 -0.001147 \n", + "8610 0.003074 0.007870 -0.000962 0.000526 -0.000779 0.000090 0.002786 \n", + "8611 0.003297 0.008467 -0.001035 0.000578 -0.000740 0.000101 0.002947 \n", + "8612 0.000204 -0.000040 0.000058 0.000586 -0.000619 0.000087 0.000229 \n", + "\n", + " 8 9 10 ... sx sy sz \\\n", + "0 0.000130 -0.000048 0.007762 ... 5.052367 4.225485 1.775536 \n", + "1 -0.000015 0.000023 0.002706 ... 5.045395 4.223284 1.777401 \n", + "2 0.000017 0.000047 0.002868 ... 5.045374 4.223690 1.777571 \n", + "3 -0.000050 0.000104 0.001171 ... 5.045767 4.224827 1.777669 \n", + "4 0.000097 0.000065 0.001579 ... 5.047422 4.223671 1.778054 \n", + "... ... ... ... ... ... ... ... \n", + "8608 -0.004658 0.000656 0.000060 ... 4.473687 4.199645 1.939572 \n", + "8609 0.000177 0.000008 0.000423 ... 4.474245 4.199096 1.939843 \n", + "8610 0.005035 -0.000606 0.000413 ... 4.476590 4.200613 1.940351 \n", + "8611 0.005385 -0.000652 0.000580 ... 4.477201 4.200203 1.940679 \n", + "8612 0.000003 0.000061 0.000644 ... 4.478990 4.198929 1.941195 \n", + "\n", + " lwx lwy lwz rwx rwy rwz timestamp \n", + "0 4.983912 4.296833 1.569889 5.193762 4.335417 1.560144 5702167 \n", + "1 5.000410 4.301358 1.566544 5.164159 4.313107 1.552097 5702307 \n", + "2 5.001656 4.299812 1.566537 5.136817 4.307087 1.551576 5702338 \n", + "3 5.002672 4.298810 1.566489 5.125220 4.300282 1.550805 5702370 \n", + "4 5.012298 4.298582 1.565061 5.114789 4.292008 1.549765 5702432 \n", + "... ... ... ... ... ... ... ... \n", + "8608 3.801623 5.192412 1.812156 5.206748 5.086565 1.837070 5432739 \n", + "8609 3.736713 5.067120 1.828599 5.205452 5.085346 1.837165 5432771 \n", + "8610 3.736855 5.068608 1.828367 5.202618 5.090534 1.836787 5432808 \n", + "8611 3.736708 5.067500 1.828450 5.196628 5.095811 1.836236 5432836 \n", + "8612 3.736303 5.066618 1.828540 5.194844 5.096206 1.836543 5432869 \n", + "\n", + "[8613 rows x 51 columns]" + ], + "text/html": [ + "\n", + "
\n", + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
12345678910...sxsyszlwxlwylwzrwxrwyrwztimestamp
0-0.005009-0.0009640.0005730.0086230.0056670.001302-0.0006310.000130-0.0000480.007762...5.0523674.2254851.7755364.9839124.2968331.5698895.1937624.3354171.5601445702167
10.0049050.001209-0.0006490.0047370.0031660.000819-0.000572-0.0000150.0000230.002706...5.0453954.2232841.7774015.0004104.3013581.5665445.1641594.3131071.5520975702307
2-0.002393-0.0002160.0001360.0030280.0012120.000336-0.0004490.0000170.0000470.002868...5.0453744.2236901.7775715.0016564.2998121.5665375.1368174.3070871.5515765702338
3-0.001394-0.0002420.0000560.0011820.0005750.000225-0.000479-0.0000500.0001040.001171...5.0457674.2248271.7776695.0026724.2988101.5664895.1252204.3002821.5508055702370
4-0.000156-0.0000040.0000230.0015850.0006300.000094-0.0003030.0000970.0000650.001579...5.0474224.2236711.7780545.0122984.2985821.5650615.1147894.2920081.5497655702432
..................................................................
8608-0.003709-0.0061680.000786-0.0001550.001088-0.000144-0.003815-0.0046580.0006560.000060...4.4736874.1996451.9395723.8016235.1924121.8121565.2067485.0865651.8370705432739
8609-0.0007270.001536-0.0002110.000700-0.0009750.000067-0.0011470.0001770.0000080.000423...4.4742454.1990961.9398433.7367135.0671201.8285995.2054525.0853461.8371655432771
86100.0030740.007870-0.0009620.000526-0.0007790.0000900.0027860.005035-0.0006060.000413...4.4765904.2006131.9403513.7368555.0686081.8283675.2026185.0905341.8367875432808
86110.0032970.008467-0.0010350.000578-0.0007400.0001010.0029470.005385-0.0006520.000580...4.4772014.2002031.9406793.7367085.0675001.8284505.1966285.0958111.8362365432836
86120.000204-0.0000400.0000580.000586-0.0006190.0000870.0002290.0000030.0000610.000644...4.4789904.1989291.9411953.7363035.0666181.8285405.1948445.0962061.8365435432869
\n", + "

8613 rows × 51 columns

\n", + "
\n", + "
\n", + "\n", + "
\n", + " \n", + "\n", + " \n", + "\n", + " \n", + "
\n", + "\n", + "\n", + "
\n", + " \n", + "\n", + "\n", + "\n", + " \n", + "
\n", + "
\n", + "
\n" + ] + }, + "metadata": {}, + "execution_count": 23 + } + ] + }, + { + "cell_type": "code", + "source": [ + "y" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "otA0Zm92fw0h", + "outputId": "b9c83261-686b-41a2-af8a-ebf22cc3f986" + }, + "execution_count": 24, + "outputs": [ + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "array([0, 0, 0, ..., 0, 0, 0])" + ] + }, + "metadata": {}, + "execution_count": 24 + } + ] + }, + { + "cell_type": "code", + "source": [ + "lgbm.fit(X,y)\n" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 248 + }, + "id": "aqDhvd1g4b-z", + "outputId": "eba62194-ea74-4a98-bbee-f7531a491e02" + }, + "execution_count": 25, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001060 seconds.\n", + "You can set `force_row_wise=true` to remove the overhead.\n", + "And if memory is not enough, you can set `force_col_wise=true`.\n", + "[LightGBM] [Info] Total Bins 13005\n", + "[LightGBM] [Info] Number of data points in the train set: 8613, number of used features: 51\n", + "[LightGBM] [Info] Start training from score -1.341454\n", + "[LightGBM] [Info] Start training from score -1.512999\n", + "[LightGBM] [Info] Start training from score -1.229411\n", + "[LightGBM] [Info] Start training from score -2.210902\n", + "[LightGBM] [Info] Start training from score -2.152273\n" + ] + }, + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "LGBMClassifier()" + ], + "text/html": [ + "
LGBMClassifier()
In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook.
On GitHub, the HTML representation is unable to render, please try loading this page with nbviewer.org.
" + ] + }, + "metadata": {}, + "execution_count": 25 + } + ] + }, + { + "cell_type": "code", + "source": [ + "Y_a2=data_a2raw.drop([0,1,2,3])\n", + "Y_a2.reset_index(inplace=True)\n", + "Y_a2.drop(['index'],axis=1,inplace=True)\n", + "Y_a2\n" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 443 + }, + "id": "7j0HqnPSdL_1", + "outputId": "eebe6a42-4471-4ced-e3b5-47fe7e5b8d3f" + }, + "execution_count": 26, + "outputs": [ + { + "output_type": "execute_result", + "data": { + "text/plain": [ + " lhx lhy lhz rhx rhy rhz hx \\\n", + "0 4.497225 5.598363 1.684676 6.932706 5.420004 1.648139 5.532781 \n", + "1 4.504442 5.563369 1.682441 6.984883 5.338807 1.662477 5.535068 \n", + "2 4.501790 5.587282 1.681250 6.936526 5.400179 1.650723 5.536529 \n", + "3 4.481035 5.558424 1.678168 6.935569 5.403949 1.650177 5.536117 \n", + "4 4.490471 5.547697 1.677370 6.954857 5.384894 1.654895 5.537160 \n", + "... ... ... ... ... ... ... ... \n", + "1255 3.766681 5.047060 1.599805 6.787716 4.922377 1.483261 5.494840 \n", + "1256 3.772569 5.135340 1.607126 6.771120 4.998858 1.492831 5.457356 \n", + "1257 3.781345 5.191569 1.616638 6.748940 5.098884 1.488763 5.463538 \n", + "1258 3.794636 5.245894 1.628859 6.735619 5.142869 1.507751 5.469743 \n", + "1259 3.815556 5.279714 1.638650 6.699938 5.184754 1.514433 5.480086 \n", + "\n", + " hy hz sx sy sz lwx lwy \\\n", + "0 1.472957 1.781428 5.581297 4.110899 1.776406 4.550096 5.212202 \n", + "1 1.473257 1.780948 5.581542 4.111409 1.776078 4.534203 5.175910 \n", + "2 1.473684 1.780335 5.581291 4.111289 1.775740 4.530342 5.199273 \n", + "3 1.472946 1.780279 5.581693 4.109772 1.775356 4.523950 5.174112 \n", + "4 1.473327 1.779768 5.582080 4.108705 1.775060 4.521791 5.162915 \n", + "... ... ... ... ... ... ... ... \n", + "1255 1.408275 1.695259 5.391037 4.153896 1.700377 4.035254 4.780551 \n", + "1256 1.410738 1.691791 5.375273 4.153625 1.699941 4.155283 5.046596 \n", + "1257 1.409960 1.692598 5.366023 4.152654 1.699411 4.154757 5.052544 \n", + "1258 1.409930 1.692958 5.356151 4.152353 1.699148 4.155412 5.076689 \n", + "1259 1.410949 1.694152 5.343333 4.151536 1.698735 4.169959 5.114348 \n", + "\n", + " lwz rwx rwy rwz timestamp phase \n", + "0 1.688152 6.621651 5.184755 1.650331 5103827 Rest \n", + "1 1.689498 6.619938 5.200892 1.661059 5103859 Rest \n", + "2 1.687336 6.613071 5.181889 1.651599 5103893 Rest \n", + "3 1.688738 6.613035 5.184223 1.651119 5103916 Rest \n", + "4 1.688910 6.594951 5.234004 1.653260 5103947 Rest \n", + "... ... ... ... ... ... ... \n", + "1255 1.625402 6.644851 4.683230 1.522554 5155770 Retraction \n", + "1256 1.621549 6.672435 4.837710 1.516715 5155833 Retraction \n", + "1257 1.622160 6.684550 4.894862 1.529447 5155902 Retraction \n", + "1258 1.623406 6.632552 4.916888 1.539666 5155939 Retraction \n", + "1259 1.622405 6.615832 4.942080 1.543551 5155956 Retraction \n", + "\n", + "[1260 rows x 20 columns]" + ], + "text/html": [ + "\n", + "
\n", + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
lhxlhylhzrhxrhyrhzhxhyhzsxsyszlwxlwylwzrwxrwyrwztimestampphase
04.4972255.5983631.6846766.9327065.4200041.6481395.5327811.4729571.7814285.5812974.1108991.7764064.5500965.2122021.6881526.6216515.1847551.6503315103827Rest
14.5044425.5633691.6824416.9848835.3388071.6624775.5350681.4732571.7809485.5815424.1114091.7760784.5342035.1759101.6894986.6199385.2008921.6610595103859Rest
24.5017905.5872821.6812506.9365265.4001791.6507235.5365291.4736841.7803355.5812914.1112891.7757404.5303425.1992731.6873366.6130715.1818891.6515995103893Rest
34.4810355.5584241.6781686.9355695.4039491.6501775.5361171.4729461.7802795.5816934.1097721.7753564.5239505.1741121.6887386.6130355.1842231.6511195103916Rest
44.4904715.5476971.6773706.9548575.3848941.6548955.5371601.4733271.7797685.5820804.1087051.7750604.5217915.1629151.6889106.5949515.2340041.6532605103947Rest
...............................................................
12553.7666815.0470601.5998056.7877164.9223771.4832615.4948401.4082751.6952595.3910374.1538961.7003774.0352544.7805511.6254026.6448514.6832301.5225545155770Retraction
12563.7725695.1353401.6071266.7711204.9988581.4928315.4573561.4107381.6917915.3752734.1536251.6999414.1552835.0465961.6215496.6724354.8377101.5167155155833Retraction
12573.7813455.1915691.6166386.7489405.0988841.4887635.4635381.4099601.6925985.3660234.1526541.6994114.1547575.0525441.6221606.6845504.8948621.5294475155902Retraction
12583.7946365.2458941.6288596.7356195.1428691.5077515.4697431.4099301.6929585.3561514.1523531.6991484.1554125.0766891.6234066.6325524.9168881.5396665155939Retraction
12593.8155565.2797141.6386506.6999385.1847541.5144335.4800861.4109491.6941525.3433334.1515361.6987354.1699595.1143481.6224056.6158324.9420801.5435515155956Retraction
\n", + "

1260 rows × 20 columns

\n", + "
\n", + "
\n", + "\n", + "
\n", + " \n", + "\n", + " \n", + "\n", + " \n", + "
\n", + "\n", + "\n", + "
\n", + " \n", + "\n", + "\n", + "\n", + " \n", + "
\n", + "
\n", + "
\n" + ] + }, + "metadata": {}, + "execution_count": 26 + } + ] + }, + { + "cell_type": "code", + "source": [ + "list1=list(range(0,5))\n", + "Y_a2['phase'].replace(Y_a2['phase'].unique(),list1,inplace=True)\n", + "y_a2=Y_a2['phase'].values\n", + "\n", + "y_a2_nophase=Y_a2.drop(['phase'],axis=1)\n", + "xa2test=pd.concat([test,y_a2_nophase],axis=1)\n", + "xa2test\n", + "print(y_a2)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "QmhA-NzDdHo0", + "outputId": "d61aa073-e8c1-46aa-c936-293ca81071c9" + }, + "execution_count": 27, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "[0 0 0 ... 3 3 3]\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "y_pred_lgbm_a2=lgbm.predict(xa2test)" + ], + "metadata": { + "id": "dWIHqNAvfJH4" + }, + "execution_count": 28, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "from sklearn.metrics import accuracy_score" + ], + "metadata": { + "id": "G19SoNqwaP2g" + }, + "execution_count": 29, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "accuracy_score(y_pred_lgbm_a2,y_a2)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "A3Ti4_f1fOdg", + "outputId": "0429dc66-511c-46c8-ebec-82580947f10f" + }, + "execution_count": 30, + "outputs": [ + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "0.6753968253968254" + ] + }, + "metadata": {}, + "execution_count": 30 + } + ] + }, + { + "cell_type": "code", + "source": [ + "y_pred_lgbm_a2" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "mvemRLtGf_h0", + "outputId": "dfdc029f-f006-4002-fcc9-9f8627b21e53" + }, + "execution_count": 31, + "outputs": [ + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "array([1, 1, 1, ..., 4, 4, 4])" + ] + }, + "metadata": {}, + "execution_count": 31 + } + ] + }, + { + "cell_type": "code", + "source": [ + "list1= ['D','P','S','H','R']\n", + "final=[]\n", + "for i in y_pred_lgbm_a2:\n", + " final.append(list[i])\n" + ], + "metadata": { + "id": "6FWw6GRUSwuK" + }, + "execution_count": 61, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "ID=[]\n", + "for i in range(1260):\n", + " ID.append(i+1)\n", + "print(ID)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "dCfoKs8xUwqR", + "outputId": "bb6bff50-7504-43ca-aabd-a8565fc168fa" + }, + "execution_count": 62, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, 329, 330, 331, 332, 333, 334, 335, 336, 337, 338, 339, 340, 341, 342, 343, 344, 345, 346, 347, 348, 349, 350, 351, 352, 353, 354, 355, 356, 357, 358, 359, 360, 361, 362, 363, 364, 365, 366, 367, 368, 369, 370, 371, 372, 373, 374, 375, 376, 377, 378, 379, 380, 381, 382, 383, 384, 385, 386, 387, 388, 389, 390, 391, 392, 393, 394, 395, 396, 397, 398, 399, 400, 401, 402, 403, 404, 405, 406, 407, 408, 409, 410, 411, 412, 413, 414, 415, 416, 417, 418, 419, 420, 421, 422, 423, 424, 425, 426, 427, 428, 429, 430, 431, 432, 433, 434, 435, 436, 437, 438, 439, 440, 441, 442, 443, 444, 445, 446, 447, 448, 449, 450, 451, 452, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 468, 469, 470, 471, 472, 473, 474, 475, 476, 477, 478, 479, 480, 481, 482, 483, 484, 485, 486, 487, 488, 489, 490, 491, 492, 493, 494, 495, 496, 497, 498, 499, 500, 501, 502, 503, 504, 505, 506, 507, 508, 509, 510, 511, 512, 513, 514, 515, 516, 517, 518, 519, 520, 521, 522, 523, 524, 525, 526, 527, 528, 529, 530, 531, 532, 533, 534, 535, 536, 537, 538, 539, 540, 541, 542, 543, 544, 545, 546, 547, 548, 549, 550, 551, 552, 553, 554, 555, 556, 557, 558, 559, 560, 561, 562, 563, 564, 565, 566, 567, 568, 569, 570, 571, 572, 573, 574, 575, 576, 577, 578, 579, 580, 581, 582, 583, 584, 585, 586, 587, 588, 589, 590, 591, 592, 593, 594, 595, 596, 597, 598, 599, 600, 601, 602, 603, 604, 605, 606, 607, 608, 609, 610, 611, 612, 613, 614, 615, 616, 617, 618, 619, 620, 621, 622, 623, 624, 625, 626, 627, 628, 629, 630, 631, 632, 633, 634, 635, 636, 637, 638, 639, 640, 641, 642, 643, 644, 645, 646, 647, 648, 649, 650, 651, 652, 653, 654, 655, 656, 657, 658, 659, 660, 661, 662, 663, 664, 665, 666, 667, 668, 669, 670, 671, 672, 673, 674, 675, 676, 677, 678, 679, 680, 681, 682, 683, 684, 685, 686, 687, 688, 689, 690, 691, 692, 693, 694, 695, 696, 697, 698, 699, 700, 701, 702, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 714, 715, 716, 717, 718, 719, 720, 721, 722, 723, 724, 725, 726, 727, 728, 729, 730, 731, 732, 733, 734, 735, 736, 737, 738, 739, 740, 741, 742, 743, 744, 745, 746, 747, 748, 749, 750, 751, 752, 753, 754, 755, 756, 757, 758, 759, 760, 761, 762, 763, 764, 765, 766, 767, 768, 769, 770, 771, 772, 773, 774, 775, 776, 777, 778, 779, 780, 781, 782, 783, 784, 785, 786, 787, 788, 789, 790, 791, 792, 793, 794, 795, 796, 797, 798, 799, 800, 801, 802, 803, 804, 805, 806, 807, 808, 809, 810, 811, 812, 813, 814, 815, 816, 817, 818, 819, 820, 821, 822, 823, 824, 825, 826, 827, 828, 829, 830, 831, 832, 833, 834, 835, 836, 837, 838, 839, 840, 841, 842, 843, 844, 845, 846, 847, 848, 849, 850, 851, 852, 853, 854, 855, 856, 857, 858, 859, 860, 861, 862, 863, 864, 865, 866, 867, 868, 869, 870, 871, 872, 873, 874, 875, 876, 877, 878, 879, 880, 881, 882, 883, 884, 885, 886, 887, 888, 889, 890, 891, 892, 893, 894, 895, 896, 897, 898, 899, 900, 901, 902, 903, 904, 905, 906, 907, 908, 909, 910, 911, 912, 913, 914, 915, 916, 917, 918, 919, 920, 921, 922, 923, 924, 925, 926, 927, 928, 929, 930, 931, 932, 933, 934, 935, 936, 937, 938, 939, 940, 941, 942, 943, 944, 945, 946, 947, 948, 949, 950, 951, 952, 953, 954, 955, 956, 957, 958, 959, 960, 961, 962, 963, 964, 965, 966, 967, 968, 969, 970, 971, 972, 973, 974, 975, 976, 977, 978, 979, 980, 981, 982, 983, 984, 985, 986, 987, 988, 989, 990, 991, 992, 993, 994, 995, 996, 997, 998, 999, 1000, 1001, 1002, 1003, 1004, 1005, 1006, 1007, 1008, 1009, 1010, 1011, 1012, 1013, 1014, 1015, 1016, 1017, 1018, 1019, 1020, 1021, 1022, 1023, 1024, 1025, 1026, 1027, 1028, 1029, 1030, 1031, 1032, 1033, 1034, 1035, 1036, 1037, 1038, 1039, 1040, 1041, 1042, 1043, 1044, 1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052, 1053, 1054, 1055, 1056, 1057, 1058, 1059, 1060, 1061, 1062, 1063, 1064, 1065, 1066, 1067, 1068, 1069, 1070, 1071, 1072, 1073, 1074, 1075, 1076, 1077, 1078, 1079, 1080, 1081, 1082, 1083, 1084, 1085, 1086, 1087, 1088, 1089, 1090, 1091, 1092, 1093, 1094, 1095, 1096, 1097, 1098, 1099, 1100, 1101, 1102, 1103, 1104, 1105, 1106, 1107, 1108, 1109, 1110, 1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120, 1121, 1122, 1123, 1124, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1147, 1148, 1149, 1150, 1151, 1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163, 1164, 1165, 1166, 1167, 1168, 1169, 1170, 1171, 1172, 1173, 1174, 1175, 1176, 1177, 1178, 1179, 1180, 1181, 1182, 1183, 1184, 1185, 1186, 1187, 1188, 1189, 1190, 1191, 1192, 1193, 1194, 1195, 1196, 1197, 1198, 1199, 1200, 1201, 1202, 1203, 1204, 1205, 1206, 1207, 1208, 1209, 1210, 1211, 1212, 1213, 1214, 1215, 1216, 1217, 1218, 1219, 1220, 1221, 1222, 1223, 1224, 1225, 1226, 1227, 1228, 1229, 1230, 1231, 1232, 1233, 1234, 1235, 1236, 1237, 1238, 1239, 1240, 1241, 1242, 1243, 1244, 1245, 1246, 1247, 1248, 1249, 1250, 1251, 1252, 1253, 1254, 1255, 1256, 1257, 1258, 1259, 1260]\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "my_submission = pd.DataFrame({'ID': ID, 'Phase': final})\n", + "my_submission.to_csv('/content/drive/MyDrive/kaggle/dsg-challenge-1-supervised-learning/submission_lgbm.csv', index=False)\n" + ], + "metadata": { + "id": "_LLzDj0wUNrj" + }, + "execution_count": 63, + "outputs": [] + }, + { + "cell_type": "markdown", + "source": [ + "XGBoost" + ], + "metadata": { + "id": "BbRewsePXEiC" + } + }, + { + "cell_type": "code", + "source": [ + "import xgboost as xgb\n", + "from xgboost import XGBClassifier\n", + "from xgboost import cv" + ], + "metadata": { + "id": "4LHiV6iM6byg" + }, + "execution_count": 35, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "from sklearn.model_selection import train_test_split\n", + "X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.1, random_state = 123)\n" + ], + "metadata": { + "id": "7Gizx_mONMmn" + }, + "execution_count": 36, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "params = {\n", + " 'min_child_weight': [1, 5, 10],\n", + " 'gamma': [0.5, 1, 1.5, 2, 5],\n", + " 'subsample': [0.6, 0.8, 1.0],\n", + " 'colsample_bytree': [0.6, 0.8, 1.0],\n", + " 'max_depth': [3, 4, 5],\n", + " 'learning_rate':[0.1,0.01,0.02,1]\n", + " }" + ], + "metadata": { + "id": "hD78jJscLDQ3" + }, + "execution_count": 37, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "xgb_model=XGBClassifier(n_estimators=400,nthread=1,objective='multi:softmax')\n" + ], + "metadata": { + "id": "cb_3PVqTPA_I" + }, + "execution_count": 38, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "xgb_model.fit(X,y)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 248 + }, + "id": "PMX7ePXBK8RD", + "outputId": "82409f99-1741-4659-e601-233fc6aa13e5" + }, + "execution_count": 39, + "outputs": [ + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "XGBClassifier(base_score=None, booster=None, callbacks=None,\n", + " colsample_bylevel=None, colsample_bynode=None,\n", + " colsample_bytree=None, early_stopping_rounds=None,\n", + " enable_categorical=False, eval_metric=None, feature_types=None,\n", + " gamma=None, gpu_id=None, grow_policy=None, importance_type=None,\n", + " interaction_constraints=None, learning_rate=None, max_bin=None,\n", + " max_cat_threshold=None, max_cat_to_onehot=None,\n", + " max_delta_step=None, max_depth=None, max_leaves=None,\n", + " min_child_weight=None, missing=nan, monotone_constraints=None,\n", + " n_estimators=400, n_jobs=None, nthread=1, num_parallel_tree=None,\n", + " objective='multi:softmax', ...)" + ], + "text/html": [ + "
XGBClassifier(base_score=None, booster=None, callbacks=None,\n",
+              "              colsample_bylevel=None, colsample_bynode=None,\n",
+              "              colsample_bytree=None, early_stopping_rounds=None,\n",
+              "              enable_categorical=False, eval_metric=None, feature_types=None,\n",
+              "              gamma=None, gpu_id=None, grow_policy=None, importance_type=None,\n",
+              "              interaction_constraints=None, learning_rate=None, max_bin=None,\n",
+              "              max_cat_threshold=None, max_cat_to_onehot=None,\n",
+              "              max_delta_step=None, max_depth=None, max_leaves=None,\n",
+              "              min_child_weight=None, missing=nan, monotone_constraints=None,\n",
+              "              n_estimators=400, n_jobs=None, nthread=1, num_parallel_tree=None,\n",
+              "              objective='multi:softmax', ...)
In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook.
On GitHub, the HTML representation is unable to render, please try loading this page with nbviewer.org.
" + ] + }, + "metadata": {}, + "execution_count": 39 + } + ] + }, + { + "cell_type": "code", + "source": [ + "y_pred_xgb_a2=xgb_model.predict(xa2test)\n" + ], + "metadata": { + "id": "wQ9NjC6q7Lj_" + }, + "execution_count": 55, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "accuracy_score(y_pred_xgb_a2,y_a2)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "8dDJw9AU7nXs", + "outputId": "02b6208b-255a-4d3b-ccb1-ed78f33026e3" + }, + "execution_count": 56, + "outputs": [ + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "0.6952380952380952" + ] + }, + "metadata": {}, + "execution_count": 56 + } + ] + }, + { + "cell_type": "code", + "source": [ + "list2= ['D','P','S','H','R']\n", + "final=[]\n", + "for i in y_pred_xgb_a2:\n", + " final.append(list[i])\n" + ], + "metadata": { + "id": "MsysV7bIYvwU" + }, + "execution_count": 58, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "ID=[]\n", + "for i in range(1260):\n", + " ID.append(i+1)\n", + "print(ID)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "H5C-3gMqYx38", + "outputId": "7b9cd305-723f-4b9b-a5b9-5ec4996bb05a" + }, + "execution_count": 59, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, 329, 330, 331, 332, 333, 334, 335, 336, 337, 338, 339, 340, 341, 342, 343, 344, 345, 346, 347, 348, 349, 350, 351, 352, 353, 354, 355, 356, 357, 358, 359, 360, 361, 362, 363, 364, 365, 366, 367, 368, 369, 370, 371, 372, 373, 374, 375, 376, 377, 378, 379, 380, 381, 382, 383, 384, 385, 386, 387, 388, 389, 390, 391, 392, 393, 394, 395, 396, 397, 398, 399, 400, 401, 402, 403, 404, 405, 406, 407, 408, 409, 410, 411, 412, 413, 414, 415, 416, 417, 418, 419, 420, 421, 422, 423, 424, 425, 426, 427, 428, 429, 430, 431, 432, 433, 434, 435, 436, 437, 438, 439, 440, 441, 442, 443, 444, 445, 446, 447, 448, 449, 450, 451, 452, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 468, 469, 470, 471, 472, 473, 474, 475, 476, 477, 478, 479, 480, 481, 482, 483, 484, 485, 486, 487, 488, 489, 490, 491, 492, 493, 494, 495, 496, 497, 498, 499, 500, 501, 502, 503, 504, 505, 506, 507, 508, 509, 510, 511, 512, 513, 514, 515, 516, 517, 518, 519, 520, 521, 522, 523, 524, 525, 526, 527, 528, 529, 530, 531, 532, 533, 534, 535, 536, 537, 538, 539, 540, 541, 542, 543, 544, 545, 546, 547, 548, 549, 550, 551, 552, 553, 554, 555, 556, 557, 558, 559, 560, 561, 562, 563, 564, 565, 566, 567, 568, 569, 570, 571, 572, 573, 574, 575, 576, 577, 578, 579, 580, 581, 582, 583, 584, 585, 586, 587, 588, 589, 590, 591, 592, 593, 594, 595, 596, 597, 598, 599, 600, 601, 602, 603, 604, 605, 606, 607, 608, 609, 610, 611, 612, 613, 614, 615, 616, 617, 618, 619, 620, 621, 622, 623, 624, 625, 626, 627, 628, 629, 630, 631, 632, 633, 634, 635, 636, 637, 638, 639, 640, 641, 642, 643, 644, 645, 646, 647, 648, 649, 650, 651, 652, 653, 654, 655, 656, 657, 658, 659, 660, 661, 662, 663, 664, 665, 666, 667, 668, 669, 670, 671, 672, 673, 674, 675, 676, 677, 678, 679, 680, 681, 682, 683, 684, 685, 686, 687, 688, 689, 690, 691, 692, 693, 694, 695, 696, 697, 698, 699, 700, 701, 702, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 714, 715, 716, 717, 718, 719, 720, 721, 722, 723, 724, 725, 726, 727, 728, 729, 730, 731, 732, 733, 734, 735, 736, 737, 738, 739, 740, 741, 742, 743, 744, 745, 746, 747, 748, 749, 750, 751, 752, 753, 754, 755, 756, 757, 758, 759, 760, 761, 762, 763, 764, 765, 766, 767, 768, 769, 770, 771, 772, 773, 774, 775, 776, 777, 778, 779, 780, 781, 782, 783, 784, 785, 786, 787, 788, 789, 790, 791, 792, 793, 794, 795, 796, 797, 798, 799, 800, 801, 802, 803, 804, 805, 806, 807, 808, 809, 810, 811, 812, 813, 814, 815, 816, 817, 818, 819, 820, 821, 822, 823, 824, 825, 826, 827, 828, 829, 830, 831, 832, 833, 834, 835, 836, 837, 838, 839, 840, 841, 842, 843, 844, 845, 846, 847, 848, 849, 850, 851, 852, 853, 854, 855, 856, 857, 858, 859, 860, 861, 862, 863, 864, 865, 866, 867, 868, 869, 870, 871, 872, 873, 874, 875, 876, 877, 878, 879, 880, 881, 882, 883, 884, 885, 886, 887, 888, 889, 890, 891, 892, 893, 894, 895, 896, 897, 898, 899, 900, 901, 902, 903, 904, 905, 906, 907, 908, 909, 910, 911, 912, 913, 914, 915, 916, 917, 918, 919, 920, 921, 922, 923, 924, 925, 926, 927, 928, 929, 930, 931, 932, 933, 934, 935, 936, 937, 938, 939, 940, 941, 942, 943, 944, 945, 946, 947, 948, 949, 950, 951, 952, 953, 954, 955, 956, 957, 958, 959, 960, 961, 962, 963, 964, 965, 966, 967, 968, 969, 970, 971, 972, 973, 974, 975, 976, 977, 978, 979, 980, 981, 982, 983, 984, 985, 986, 987, 988, 989, 990, 991, 992, 993, 994, 995, 996, 997, 998, 999, 1000, 1001, 1002, 1003, 1004, 1005, 1006, 1007, 1008, 1009, 1010, 1011, 1012, 1013, 1014, 1015, 1016, 1017, 1018, 1019, 1020, 1021, 1022, 1023, 1024, 1025, 1026, 1027, 1028, 1029, 1030, 1031, 1032, 1033, 1034, 1035, 1036, 1037, 1038, 1039, 1040, 1041, 1042, 1043, 1044, 1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052, 1053, 1054, 1055, 1056, 1057, 1058, 1059, 1060, 1061, 1062, 1063, 1064, 1065, 1066, 1067, 1068, 1069, 1070, 1071, 1072, 1073, 1074, 1075, 1076, 1077, 1078, 1079, 1080, 1081, 1082, 1083, 1084, 1085, 1086, 1087, 1088, 1089, 1090, 1091, 1092, 1093, 1094, 1095, 1096, 1097, 1098, 1099, 1100, 1101, 1102, 1103, 1104, 1105, 1106, 1107, 1108, 1109, 1110, 1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120, 1121, 1122, 1123, 1124, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1147, 1148, 1149, 1150, 1151, 1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163, 1164, 1165, 1166, 1167, 1168, 1169, 1170, 1171, 1172, 1173, 1174, 1175, 1176, 1177, 1178, 1179, 1180, 1181, 1182, 1183, 1184, 1185, 1186, 1187, 1188, 1189, 1190, 1191, 1192, 1193, 1194, 1195, 1196, 1197, 1198, 1199, 1200, 1201, 1202, 1203, 1204, 1205, 1206, 1207, 1208, 1209, 1210, 1211, 1212, 1213, 1214, 1215, 1216, 1217, 1218, 1219, 1220, 1221, 1222, 1223, 1224, 1225, 1226, 1227, 1228, 1229, 1230, 1231, 1232, 1233, 1234, 1235, 1236, 1237, 1238, 1239, 1240, 1241, 1242, 1243, 1244, 1245, 1246, 1247, 1248, 1249, 1250, 1251, 1252, 1253, 1254, 1255, 1256, 1257, 1258, 1259, 1260]\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "my_submission_xgboost = pd.DataFrame({'ID': ID, 'Phase': final})\n", + "my_submission_xgboost.to_csv('/content/drive/MyDrive/kaggle/dsg-challenge-1-supervised-learning/submission_xgboost.csv', index=False)\n" + ], + "metadata": { + "id": "LTZHXRLdY8sd" + }, + "execution_count": 60, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "from sklearn.model_selection import RandomizedSearchCV, GridSearchCV\n", + "from sklearn.metrics import roc_auc_score\n", + "from sklearn.model_selection import StratifiedKFold" + ], + "metadata": { + "id": "9C5MT8mYX3GP" + }, + "execution_count": 45, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "folds = 5\n", + "param_comb = 5\n", + "\n", + "skf = StratifiedKFold(n_splits=folds, shuffle = True, random_state = 1001)\n", + "\n", + "randomsearch = RandomizedSearchCV(xgb_model, param_distributions=params, n_iter=param_comb, scoring='roc_auc', n_jobs=4, cv=skf.split(X,y), verbose=3, random_state=1001 )\n", + "randomsearch.fit(X, y)\n" + ], + "metadata": { + "id": "huEFEA_1XvQ9", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 169 + }, + "outputId": "a24767ab-1467-441c-89d5-331074980a0b" + }, + "execution_count": 46, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Fitting 5 folds for each of 5 candidates, totalling 25 fits\n" + ] + }, + { + "output_type": "stream", + "name": "stderr", + "text": [ + "/usr/local/lib/python3.10/dist-packages/sklearn/model_selection/_search.py:952: UserWarning: One or more of the test scores are non-finite: [nan nan nan nan nan]\n", + " warnings.warn(\n" + ] + }, + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "RandomizedSearchCV(cv=,\n", + " estimator=XGBClassifier(base_score=None, booster=None,\n", + " callbacks=None,\n", + " colsample_bylevel=None,\n", + " colsample_bynode=None,\n", + " colsample_bytree=None,\n", + " early_stopping_rounds=None,\n", + " enable_categorical=False,\n", + " eval_metric=None, feature_types=None,\n", + " gamma=None, gpu_id=None,\n", + " grow_policy=None,\n", + " importance_type...\n", + " monotone_constraints=None,\n", + " n_estimators=400, n_jobs=None,\n", + " nthread=1, num_parallel_tree=None,\n", + " objective='multi:softmax', ...),\n", + " n_iter=5, n_jobs=4,\n", + " param_distributions={'colsample_bytree': [0.6, 0.8, 1.0],\n", + " 'gamma': [0.5, 1, 1.5, 2, 5],\n", + " 'learning_rate': [0.1, 0.01, 0.02, 1],\n", + " 'max_depth': [3, 4, 5],\n", + " 'min_child_weight': [1, 5, 10],\n", + " 'subsample': [0.6, 0.8, 1.0]},\n", + " random_state=1001, scoring='roc_auc', verbose=3)" + ], + "text/html": [ + "
RandomizedSearchCV(cv=<generator object _BaseKFold.split at 0x7f07ad022960>,\n",
+              "                   estimator=XGBClassifier(base_score=None, booster=None,\n",
+              "                                           callbacks=None,\n",
+              "                                           colsample_bylevel=None,\n",
+              "                                           colsample_bynode=None,\n",
+              "                                           colsample_bytree=None,\n",
+              "                                           early_stopping_rounds=None,\n",
+              "                                           enable_categorical=False,\n",
+              "                                           eval_metric=None, feature_types=None,\n",
+              "                                           gamma=None, gpu_id=None,\n",
+              "                                           grow_policy=None,\n",
+              "                                           importance_type...\n",
+              "                                           monotone_constraints=None,\n",
+              "                                           n_estimators=400, n_jobs=None,\n",
+              "                                           nthread=1, num_parallel_tree=None,\n",
+              "                                           objective='multi:softmax', ...),\n",
+              "                   n_iter=5, n_jobs=4,\n",
+              "                   param_distributions={'colsample_bytree': [0.6, 0.8, 1.0],\n",
+              "                                        'gamma': [0.5, 1, 1.5, 2, 5],\n",
+              "                                        'learning_rate': [0.1, 0.01, 0.02, 1],\n",
+              "                                        'max_depth': [3, 4, 5],\n",
+              "                                        'min_child_weight': [1, 5, 10],\n",
+              "                                        'subsample': [0.6, 0.8, 1.0]},\n",
+              "                   random_state=1001, scoring='roc_auc', verbose=3)
In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook.
On GitHub, the HTML representation is unable to render, please try loading this page with nbviewer.org.
" + ] + }, + "metadata": {}, + "execution_count": 46 + } + ] + }, + { + "cell_type": "code", + "source": [ + "print('\\n All results:')\n", + "print(randomsearch.cv_results_)\n", + "print('\\n Best estimator:')\n", + "print(randomsearch.best_estimator_)\n", + "print('\\n Best normalized gini score for %d-fold search with %d parameter combinations:' % (folds, param_comb))\n", + "print(randomsearch.best_score_ * 2 - 1)\n", + "print('\\n Best hyperparameters:')\n", + "print(randomsearch.best_params_)\n", + "results = pd.DataFrame(randomsearch.cv_results_)\n", + "results.to_csv('xgb-random-grid-search-results-01.csv', index=False)" + ], + "metadata": { + "id": "YkegXZwm22_7", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "74a4404f-7cd8-4176-93c9-a57bc2526273" + }, + "execution_count": 47, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "\n", + " All results:\n", + "{'mean_fit_time': array([197.39904776, 227.97758846, 258.94723792, 139.75200572,\n", + " 244.76948318]), 'std_fit_time': array([ 4.87418783, 2.79416663, 2.5347546 , 1.46544292, 58.12875989]), 'mean_score_time': array([0.00361195, 0.00266466, 0.00352955, 0.00089574, 0.00095153]), 'std_score_time': array([0.00391645, 0.00138396, 0.00286 , 0.00091305, 0.00095201]), 'param_subsample': masked_array(data=[0.8, 1.0, 0.8, 0.8, 1.0],\n", + " mask=[False, False, False, False, False],\n", + " fill_value='?',\n", + " dtype=object), 'param_min_child_weight': masked_array(data=[5, 5, 10, 10, 1],\n", + " mask=[False, False, False, False, False],\n", + " fill_value='?',\n", + " dtype=object), 'param_max_depth': masked_array(data=[4, 4, 5, 4, 4],\n", + " mask=[False, False, False, False, False],\n", + " fill_value='?',\n", + " dtype=object), 'param_learning_rate': masked_array(data=[0.1, 0.01, 0.1, 0.02, 0.01],\n", + " mask=[False, False, False, False, False],\n", + " fill_value='?',\n", + " dtype=object), 'param_gamma': masked_array(data=[2, 2, 1, 1.5, 5],\n", + " mask=[False, False, False, False, False],\n", + " fill_value='?',\n", + " dtype=object), 'param_colsample_bytree': masked_array(data=[0.8, 0.8, 1.0, 0.6, 1.0],\n", + " mask=[False, False, False, False, False],\n", + " fill_value='?',\n", + " dtype=object), 'params': [{'subsample': 0.8, 'min_child_weight': 5, 'max_depth': 4, 'learning_rate': 0.1, 'gamma': 2, 'colsample_bytree': 0.8}, {'subsample': 1.0, 'min_child_weight': 5, 'max_depth': 4, 'learning_rate': 0.01, 'gamma': 2, 'colsample_bytree': 0.8}, {'subsample': 0.8, 'min_child_weight': 10, 'max_depth': 5, 'learning_rate': 0.1, 'gamma': 1, 'colsample_bytree': 1.0}, {'subsample': 0.8, 'min_child_weight': 10, 'max_depth': 4, 'learning_rate': 0.02, 'gamma': 1.5, 'colsample_bytree': 0.6}, {'subsample': 1.0, 'min_child_weight': 1, 'max_depth': 4, 'learning_rate': 0.01, 'gamma': 5, 'colsample_bytree': 1.0}], 'split0_test_score': array([nan, nan, nan, nan, nan]), 'split1_test_score': array([nan, nan, nan, nan, nan]), 'split2_test_score': array([nan, nan, nan, nan, nan]), 'split3_test_score': array([nan, nan, nan, nan, nan]), 'split4_test_score': array([nan, nan, nan, nan, nan]), 'mean_test_score': array([nan, nan, nan, nan, nan]), 'std_test_score': array([nan, nan, nan, nan, nan]), 'rank_test_score': array([1, 1, 1, 1, 1], dtype=int32)}\n", + "\n", + " Best estimator:\n", + "XGBClassifier(base_score=None, booster=None, callbacks=None,\n", + " colsample_bylevel=None, colsample_bynode=None,\n", + " colsample_bytree=0.8, early_stopping_rounds=None,\n", + " enable_categorical=False, eval_metric=None, feature_types=None,\n", + " gamma=2, gpu_id=None, grow_policy=None, importance_type=None,\n", + " interaction_constraints=None, learning_rate=0.1, max_bin=None,\n", + " max_cat_threshold=None, max_cat_to_onehot=None,\n", + " max_delta_step=None, max_depth=4, max_leaves=None,\n", + " min_child_weight=5, missing=nan, monotone_constraints=None,\n", + " n_estimators=400, n_jobs=None, nthread=1, num_parallel_tree=None,\n", + " objective='multi:softmax', ...)\n", + "\n", + " Best normalized gini score for 5-fold search with 5 parameter combinations:\n", + "nan\n", + "\n", + " Best hyperparameters:\n", + "{'subsample': 0.8, 'min_child_weight': 5, 'max_depth': 4, 'learning_rate': 0.1, 'gamma': 2, 'colsample_bytree': 0.8}\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "best_model = randomsearch.best_estimator_" + ], + "metadata": { + "id": "1smwxidb4Z2H" + }, + "execution_count": 48, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "y_a2_pred_rs=best_model.predict(xa2test)" + ], + "metadata": { + "id": "JF4VHNBfT3eZ" + }, + "execution_count": 49, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "accuracy_score(y_a2_pred_rs,y_a2)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "PcVBGk2mWINc", + "outputId": "0378a583-9cdf-4a48-b90b-cee4b842d31c" + }, + "execution_count": 50, + "outputs": [ + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "0.6682539682539682" + ] + }, + "metadata": {}, + "execution_count": 50 + } + ] + }, + { + "cell_type": "code", + "source": [ + "list= ['D','P','S','H','R']\n", + "final=[]\n", + "for i in y_a2_pred_rs:\n", + " final.append(list[i])\n" + ], + "metadata": { + "id": "GCAlfjHXZXNU" + }, + "execution_count": 51, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "ID=[]\n", + "for i in range(1260):\n", + " ID.append(i+1)\n", + "print(ID)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "zZxtHIykZlcv", + "outputId": "c8c14832-418e-460c-c938-dd1da57c43f8" + }, + "execution_count": 52, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, 329, 330, 331, 332, 333, 334, 335, 336, 337, 338, 339, 340, 341, 342, 343, 344, 345, 346, 347, 348, 349, 350, 351, 352, 353, 354, 355, 356, 357, 358, 359, 360, 361, 362, 363, 364, 365, 366, 367, 368, 369, 370, 371, 372, 373, 374, 375, 376, 377, 378, 379, 380, 381, 382, 383, 384, 385, 386, 387, 388, 389, 390, 391, 392, 393, 394, 395, 396, 397, 398, 399, 400, 401, 402, 403, 404, 405, 406, 407, 408, 409, 410, 411, 412, 413, 414, 415, 416, 417, 418, 419, 420, 421, 422, 423, 424, 425, 426, 427, 428, 429, 430, 431, 432, 433, 434, 435, 436, 437, 438, 439, 440, 441, 442, 443, 444, 445, 446, 447, 448, 449, 450, 451, 452, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 468, 469, 470, 471, 472, 473, 474, 475, 476, 477, 478, 479, 480, 481, 482, 483, 484, 485, 486, 487, 488, 489, 490, 491, 492, 493, 494, 495, 496, 497, 498, 499, 500, 501, 502, 503, 504, 505, 506, 507, 508, 509, 510, 511, 512, 513, 514, 515, 516, 517, 518, 519, 520, 521, 522, 523, 524, 525, 526, 527, 528, 529, 530, 531, 532, 533, 534, 535, 536, 537, 538, 539, 540, 541, 542, 543, 544, 545, 546, 547, 548, 549, 550, 551, 552, 553, 554, 555, 556, 557, 558, 559, 560, 561, 562, 563, 564, 565, 566, 567, 568, 569, 570, 571, 572, 573, 574, 575, 576, 577, 578, 579, 580, 581, 582, 583, 584, 585, 586, 587, 588, 589, 590, 591, 592, 593, 594, 595, 596, 597, 598, 599, 600, 601, 602, 603, 604, 605, 606, 607, 608, 609, 610, 611, 612, 613, 614, 615, 616, 617, 618, 619, 620, 621, 622, 623, 624, 625, 626, 627, 628, 629, 630, 631, 632, 633, 634, 635, 636, 637, 638, 639, 640, 641, 642, 643, 644, 645, 646, 647, 648, 649, 650, 651, 652, 653, 654, 655, 656, 657, 658, 659, 660, 661, 662, 663, 664, 665, 666, 667, 668, 669, 670, 671, 672, 673, 674, 675, 676, 677, 678, 679, 680, 681, 682, 683, 684, 685, 686, 687, 688, 689, 690, 691, 692, 693, 694, 695, 696, 697, 698, 699, 700, 701, 702, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 714, 715, 716, 717, 718, 719, 720, 721, 722, 723, 724, 725, 726, 727, 728, 729, 730, 731, 732, 733, 734, 735, 736, 737, 738, 739, 740, 741, 742, 743, 744, 745, 746, 747, 748, 749, 750, 751, 752, 753, 754, 755, 756, 757, 758, 759, 760, 761, 762, 763, 764, 765, 766, 767, 768, 769, 770, 771, 772, 773, 774, 775, 776, 777, 778, 779, 780, 781, 782, 783, 784, 785, 786, 787, 788, 789, 790, 791, 792, 793, 794, 795, 796, 797, 798, 799, 800, 801, 802, 803, 804, 805, 806, 807, 808, 809, 810, 811, 812, 813, 814, 815, 816, 817, 818, 819, 820, 821, 822, 823, 824, 825, 826, 827, 828, 829, 830, 831, 832, 833, 834, 835, 836, 837, 838, 839, 840, 841, 842, 843, 844, 845, 846, 847, 848, 849, 850, 851, 852, 853, 854, 855, 856, 857, 858, 859, 860, 861, 862, 863, 864, 865, 866, 867, 868, 869, 870, 871, 872, 873, 874, 875, 876, 877, 878, 879, 880, 881, 882, 883, 884, 885, 886, 887, 888, 889, 890, 891, 892, 893, 894, 895, 896, 897, 898, 899, 900, 901, 902, 903, 904, 905, 906, 907, 908, 909, 910, 911, 912, 913, 914, 915, 916, 917, 918, 919, 920, 921, 922, 923, 924, 925, 926, 927, 928, 929, 930, 931, 932, 933, 934, 935, 936, 937, 938, 939, 940, 941, 942, 943, 944, 945, 946, 947, 948, 949, 950, 951, 952, 953, 954, 955, 956, 957, 958, 959, 960, 961, 962, 963, 964, 965, 966, 967, 968, 969, 970, 971, 972, 973, 974, 975, 976, 977, 978, 979, 980, 981, 982, 983, 984, 985, 986, 987, 988, 989, 990, 991, 992, 993, 994, 995, 996, 997, 998, 999, 1000, 1001, 1002, 1003, 1004, 1005, 1006, 1007, 1008, 1009, 1010, 1011, 1012, 1013, 1014, 1015, 1016, 1017, 1018, 1019, 1020, 1021, 1022, 1023, 1024, 1025, 1026, 1027, 1028, 1029, 1030, 1031, 1032, 1033, 1034, 1035, 1036, 1037, 1038, 1039, 1040, 1041, 1042, 1043, 1044, 1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052, 1053, 1054, 1055, 1056, 1057, 1058, 1059, 1060, 1061, 1062, 1063, 1064, 1065, 1066, 1067, 1068, 1069, 1070, 1071, 1072, 1073, 1074, 1075, 1076, 1077, 1078, 1079, 1080, 1081, 1082, 1083, 1084, 1085, 1086, 1087, 1088, 1089, 1090, 1091, 1092, 1093, 1094, 1095, 1096, 1097, 1098, 1099, 1100, 1101, 1102, 1103, 1104, 1105, 1106, 1107, 1108, 1109, 1110, 1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120, 1121, 1122, 1123, 1124, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1147, 1148, 1149, 1150, 1151, 1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163, 1164, 1165, 1166, 1167, 1168, 1169, 1170, 1171, 1172, 1173, 1174, 1175, 1176, 1177, 1178, 1179, 1180, 1181, 1182, 1183, 1184, 1185, 1186, 1187, 1188, 1189, 1190, 1191, 1192, 1193, 1194, 1195, 1196, 1197, 1198, 1199, 1200, 1201, 1202, 1203, 1204, 1205, 1206, 1207, 1208, 1209, 1210, 1211, 1212, 1213, 1214, 1215, 1216, 1217, 1218, 1219, 1220, 1221, 1222, 1223, 1224, 1225, 1226, 1227, 1228, 1229, 1230, 1231, 1232, 1233, 1234, 1235, 1236, 1237, 1238, 1239, 1240, 1241, 1242, 1243, 1244, 1245, 1246, 1247, 1248, 1249, 1250, 1251, 1252, 1253, 1254, 1255, 1256, 1257, 1258, 1259, 1260]\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "my_submission_rs = pd.DataFrame({'ID': ID, 'Phase': final})\n", + "my_submission_rs.to_csv('/content/drive/MyDrive/kaggle/dsg-challenge-1-supervised-learning/submission_rs.csv', index=False)\n" + ], + "metadata": { + "id": "5i6cj2-wZn12" + }, + "execution_count": 53, + "outputs": [] + } + ] +} \ No newline at end of file diff --git a/assMath/ass2B/2B_Anupriya.ipynb b/assMath/ass2B/2B_Anupriya.ipynb new file mode 100644 index 00000000..f3843d94 --- /dev/null +++ b/assMath/ass2B/2B_Anupriya.ipynb @@ -0,0 +1,697 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "xdivz28Z6L02" + }, + "source": [ + "### __Assigment 2B__\n", + "#### __Question 1(a)__\n", + "\n", + "For a given distribution $p(y|x)$ that is the distribution of the outcome $y$ given the data $x$, we usually estimate mean because mean gives us the expected value of $y$ given a certain $x$. It helps us realise the central value around which we can expect $y$ to be." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "SqIZ6ebklw1G" + }, + "source": [ + "####__Question 1(b)__\n", + "So, for vanilla linear regression, we have the following if $\\hat{y}$ is the predicted outcome by the model and $textbf{w}$ is the vector of parameters (weights) then,\n", + "\n", + "$\\hat{y}= \\textbf{w}^\\textbf{T}.\\textbf{x}$\n", + "\n", + "So our task is to be able to define such a $\\hat{y}$, or in other words, find the required $\\textbf{w}$. Since this is vanilla linear regression, there is just one independent variable $x$, hence just one parameter (weight), $w$.\n", + "To do so, we'll be using MSE here, so,\n", + "\n", + "$\\nabla_wMSE_{train}=0 $, which ultimately gives,\n", + "\n", + "$w=(X^TX)^{-1}X^Ty$\n", + "\n", + "after some linear algebra jazz that is too much to type here (from Goodfellow page 108,109)\n", + "\n", + "where, for us, $X$ is the design matrix (which, in our simple case is just the transpose of the vector $\\textbf{x}$ itself since there is just one value of x per training dataset pair, $X=[-x^T-]$) and $y$ is a vector of all target values from the training set. So, one thing to note is that $X^TX$ is just a single value and it's inverse is simply $\\frac{1}{X^TX}$.\n", + "\n", + "Now, we've been given that $p(y|x) \\sim N(y:\\mu,\\sigma)$ where $\\mu=w*x$ and $\\sigma=I$.\n", + "\n", + "So, assuming we generate a dataset that follows such a distribution, we need $x$, which will just be a bunch of single values from a distribution (not specified, could be anything we want so let's take $N(0,1)$, we need a true value of $w$ that we will decide randomly and we need to find the corresponding $y$ which has a mean vector $w*x$ and covariance matrix (size n*n) equal to $I$, which will give us our required dataset. After this, we can use the above relation to estimate $w$ and that should be close to the true value of $w$.\n", + "\n", + "We could, like before, just define the loss function and minimise it using Scipy. So. we'll also try that." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "CpnDWH9DLg6K", + "outputId": "54aa8b31-dd50-450f-8e72-912bd98cc6b5" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "True value of parameter: 0.39942218451390976\n", + "Estimated value of parameter, thanks to MSE: 0.38803774521035544\n", + "Estimated value of parameter, thanks to Scipy: 0.388037742515314\n" + ] + } + ], + "source": [ + "import numpy as np\n", + "from numpy import random\n", + "\n", + "n=1000\n", + "x_mean=0\n", + "x_sd=1\n", + "X=np.random.normal(x_mean,x_sd,size=n)\n", + "\"\"\"\n", + "shape = 1000\n", + "value = 1\n", + "x_b = np.empty(shape, dtype=np.int)\n", + "x_b.fill(value)\n", + "X=np.column_stack((x, x_b))\n", + "\n", + "#print(X.shape) # matrix\n", + "\"\"\"\n", + "true_w=random.rand() # could be whatever we want technically\n", + "print(\"True value of parameter: \",true_w)\n", + "\n", + "y_mean=true_w*X\n", + "y_covariance=np.eye(n)\n", + "y = np.random.multivariate_normal(y_mean, y_covariance)\n", + "#print(y.shape) # also a vector\n", + "\n", + "#estimated_w=(np.linalg.inv(X.dot(X.T))).dot((X.T).dot(y))\n", + "\n", + "estimated_w=(np.power(X.dot(X.T),-1))*((X.T).dot(y))\n", + "print(\"Estimated value of parameter, thanks to MSE: \",estimated_w)\n", + "\n", + "# hence we can now use the line y=w*x for our vanilla linear regression model\n", + "\n", + "# however, we should be able to solve for w without doing all the math, so let's try that\n", + "\n", + "from scipy.optimize import minimize\n", + "\n", + "def loss(w,X,y):\n", + " J=0\n", + " for i in range(n):\n", + " J+=0.5*(np.power((w*X[i]-y[i]),2))\n", + " return J\n", + "initialparameters=1\n", + "result = minimize(loss, initialparameters, args=(X,y), method='L-BFGS-B')\n", + "estimated__w=result.x\n", + "print(\"Estimated value of parameter, thanks to Scipy: \",estimated__w[0])\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "zJ1STkI_RQfc" + }, + "source": [ + "The choice of sigma here, that is the covariance, is basically telling us how varied our measurements are. So, in our final objective, which was minimising the squared error term, sigma is not exactly doing anything. The given value of sigma is just for generating the dataset we need. While applying MSE and minimising the function to obtain the corresponding $w$, we didn't really need sigma. So, parameterising sigma is not exactly going to do anything.\n", + "\n", + "Yes however, when we observe our predictions, then calculating it's sigma and comparing it to the true sigma can tell us how varied our predictions are." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "eEvyl1JBREZf" + }, + "source": [ + "#### __Question 1(c)__\n", + "Having an exponential family distribution means the superpower of being able to appl GLMs (generalised linear models). Exponential families have distribution functions of the form,\n", + "\n", + "$f(x)=b(y)e^{\\eta^TT(y)-a(\\eta)}$\n", + "\n", + "So, let's choose Poisson distribution. We wanna obtain $b(y), T(y), a(\\eta)$ and $\\eta$ itself by observing the poisson distribution function,\n", + "\n", + "$f(x)=\\frac{\\lambda^y.e^{(-\\lambda)}}{y!}$, which can be rewritten as\n", + "\n", + "$f(x)=e^{y.ln(\\lambda)-\\lambda-ln(y!)}$\n", + "\n", + "So,\n", + "\n", + "$\\eta=ln(\\lambda)$ and $T(y)=y$, at least that is easy to see.\n", + "\n", + "Further for poisson distribution, $\\mu=\\lambda$ and $\\sigma^2=\\lambda$\n", + "\n", + "Now, to construct a GLM, we want to predict the expected value of $T(y)$ given $x$. So, we are looking for a prediction $h(x)=E(y|x)$. Further $\\eta$ and $x$ are related by $\\eta=w^Tx$, where $w$ is our unknown parameter.\n", + "\n", + "Here, $h(x)=E(y|x)=\\mu=\\lambda=w^Tx$\n", + "\n", + "The problem is that, by definition and usage, $\\lambda$ is always greater than or equal to $0$ and a linear relationship like $\\lambda=w^Tx$ doesn't seem nice.So, instead, we can use the relationship,\n", + "\n", + "$ln(\\lambda_i)=w^Tx$\n", + "\n", + "$=\n", + "\\begin{pmatrix}\n", + "w_0\\;w_1\n", + "\\end{pmatrix}\n", + "\\times\n", + "\\begin{pmatrix}\n", + "1\\\\\n", + "x_i\n", + "\\end{pmatrix}\n", + "$\n", + "\n", + "Such that, $\\lambda_i=e^{w_0+w_1x_i}$\n", + "\n", + "So, what we are trying to estimate are these unknown parameters with the help of this relationship between $\\lambda$, $w$ and $x$.\n", + "We need to define a loss function, then minimise it to get the desired unknown parameters. Again, problem is that this is no more gaussian! But we do know that we get the same results by minimising loss function as we do by maximising the likelihood function. So, we'll have to go by that approach.\n", + "Ultimately, we'll be minimising the negative log-likelihood function, so in a way, that's going to be like our loss function.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "tUOV4XHosr0_", + "outputId": "c0b4aee4-a5df-44bb-a735-187641bc8f98" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "True value of parameters: 0.5468524318685486 , 0.9830005256800832\n", + "Estimated value of parameter, thanks to Scipy: 0.49062509412517374 , 1.0114374335545537\n" + ] + } + ], + "source": [ + "import numpy as np\n", + "from numpy import random\n", + "\n", + "def log_likelihood_poisson(w,X,y):\n", + " L=0\n", + " for i in range(1000):\n", + " c=np.exp(w[0]+w[1]*X[i])\n", + " p=c-y[i]*(np.log(c))\n", + " L+=p\n", + " return L\n", + "\n", + "# Let's generate a simple dataset corresponding to all the above stuff\n", + "n=1000\n", + "X=np.random.randn(n)\n", + "w_0=random.rand() # could be whatever we want technically\n", + "w_1=random.rand()\n", + "print(\"True value of parameters: \",w_0,\",\",w_1)\n", + "lambda_i=np.exp(w_0+w_1*X)\n", + "y=np.random.poisson(lambda_i)\n", + "\n", + "\n", + "initialparameters=np.array([1,1]) # let's say\n", + "result = minimize(log_likelihood_poisson, initialparameters, args=(X,y), method='L-BFGS-B')\n", + "estimated__w=result.x\n", + "print(\"Estimated value of parameter, thanks to Scipy: \",estimated__w[0], \",\",estimated__w[1])\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "Zn0Xk9m938r5" + }, + "source": [ + "#### __Question 2__\n", + "\n", + "Firstly, the dataset is on my drive so path should be changed accordingly in order to run this code.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "WYDvdI2D3u9h", + "outputId": "c58e9675-6f83-470e-bec5-3b4a87391795" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Mounted at /content/drive\n" + ] + } + ], + "source": [ + "from google.colab import drive\n", + "drive.mount('/content/drive')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000 + }, + "id": "ExqUDGVV-HO2", + "outputId": "60cbaf3a-0bc3-46a5-ca55-e2662f2564ea" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Mean Squared Error (MODEL A) (Test Set A): 22.25770734371434\n", + "Mean Squared Error (MODEL A) (Test Set B): 2.5616550522775925\n", + "Mean Squared Error (MODEL A) (Test Set C): 0.062457236803248974\n", + "Mean Squared Error (MODEL B) (Test Set A): 32.73219379378818\n", + "Mean Squared Error (MODEL B) (Test Set B): 1.9577195262268569e-25\n", + "Mean Squared Error (MODEL B) (Test Set C): 2.6140814129669656\n", + "Mean Squared Error (MODEL C) (Test Set A): 16.15674668223636\n", + "Mean Squared Error (MODEL C) (Test Set B): 15.774197648432851\n", + "Mean Squared Error (MODEL C) (Test Set C): 5.699794881410673\n", + "Minimum error: 1.9577195262268569e-25\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA1IAAAIjCAYAAAAJLyrXAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAABPjElEQVR4nO3de1wWZf7/8ffNWUBAPHDjEULKs61nStdMvoqamYfCQ4YtlpWsqWXmVp4qcXE10/KwZmDfKM0OVpYmqeHXFU0t0sRMDUUT1DJBUUBhfn/4895uRWUQuVFez8djHuvMXPdcn7nvye3dNXONxTAMQwAAAACAEnNydAEAAAAAcLMhSAEAAACASQQpAAAAADCJIAUAAAAAJhGkAAAAAMAkghQAAAAAmESQAgAAAACTCFIAAAAAYBJBCgAAAABMIkgBAMrUPffco3vuuadc+rJYLJo8ebJtffLkybJYLPrtt9/Kpf+goCANGzasXPqqKC79zkvqwIEDslgsSkhIKPOaAMARCFIAUA4SEhJksViuuGzevNnRJRZr2LBhdnV6e3vrtttu04ABA/TRRx+pqKioTPrZtGmTJk+erJMnT5bJ8cpSRaztz9fTxo0bL9tvGIbq1asni8Wi++67zwEVlo0vv/xSFotFtWvXLrNrDQDKioujCwCAymTq1KkKDg6+bHvDhg0dUE3JuLu766233pIknT17VgcPHtTnn3+uAQMG6J577tGnn34qHx8fW/s1a9aY7mPTpk2aMmWKhg0bJj8/vxJ/7uzZs3JxubH/V3a12vbs2SMnJ8f9N0kPDw+999576tixo9325ORkHT58WO7u7g6qrGwkJiYqKChIBw4c0Lp16xQeHu7okgDAhiAFAOWoR48eatOmjanPnD9/XkVFRXJzc7tsX25urry8vEpdj2EYysvLU5UqVa7YxsXFRQ8//LDdtldeeUXTp0/XhAkT9Nhjj2nZsmW2fcXVWZaKiopUUFAgDw8PeXh43NC+rsXRQaVnz55avny55syZYxco33vvPbVu3brcbnG8EXJzc/Xpp58qNjZW8fHxSkxMJEgBqFC4tQ8AKpCLz5H861//0uzZsxUSEiJ3d3elpaXZnv9JS0vT4MGDVa1aNdtIxPnz5/Xyyy/b2gcFBekf//iH8vPz7Y4fFBSk++67T1999ZXatGmjKlWqaOHChaWq9fnnn1e3bt20fPly/fzzz7btxT0jNXfuXDVt2lSenp6qVq2a2rRpo/fee0/Sheeaxo0bJ0kKDg623bJ24MABSReeyYmJiVFiYqKaNm0qd3d3rV692ravuOd1fvvtNz300EPy8fFR9erV9fTTTysvL++y77m453X+fMxr1VbcM1K//PKLHnzwQfn7+8vT01MdOnTQF198Ydfmm2++kcVi0QcffKBXX31VdevWlYeHh7p27ap9+/Zd8Tu/1KBBg/T7778rKSnJtq2goEAffvihBg8eXOxncnNz9cwzz6hevXpyd3fXHXfcoX/9618yDMOuXX5+vsaMGaOaNWuqatWquv/++3X48OFij/nrr7/qb3/7mwICAuTu7q6mTZvq7bffLvF5FOeTTz7R2bNn9eCDD2rgwIH6+OOP7X5DAHA0RqQAoBxlZ2dfNkpgsVhUvXp1u23x8fHKy8vT448/Lnd3d/n7+9v2PfjggwoNDdW0adNs//I7fPhwLVmyRAMGDNAzzzyjLVu2KDY2Vrt379Ynn3xid+w9e/Zo0KBBGjFihB577DHdcccdpT6foUOHas2aNUpKStLtt99ebJtFixZp1KhRGjBggC3Q7NixQ1u2bNHgwYPVr18//fzzz3r//ff12muvqUaNGpKkmjVr2o6xbt06ffDBB4qJiVGNGjUUFBR01boeeughBQUFKTY2Vps3b9acOXP0xx9/6J133jF1fiWp7c+OHj2qu+66S2fOnNGoUaNUvXp1LVmyRPfff78+/PBD9e3b16799OnT5eTkpGeffVbZ2dmKi4vTkCFDtGXLlhLVFxQUpLCwML3//vvq0aOHJGnVqlXKzs7WwIEDNWfOHLv2hmHo/vvv1/r16xUdHa0777xTX331lcaNG6dff/1Vr732mq3t8OHD9e6772rw4MG66667tG7dOvXq1avYc+7QoYMt8NasWVOrVq1SdHS0cnJyNHr06BKdy6USExPVpUsXWa1WDRw4UM8//7w+//xzPfjgg6U6HgCUOQMAcMPFx8cbkopd3N3dbe3S09MNSYaPj49x7Ngxu2NMmjTJkGQMGjTIbntqaqohyRg+fLjd9meffdaQZKxbt862rUGDBoYkY/Xq1SWqOyoqyvDy8rri/u+//96QZIwZM8a2rXPnzkbnzp1t63369DGaNm161X5mzJhhSDLS09Mv2yfJcHJyMnbt2lXsvkmTJtnWL35H999/v127p556ypBk/PDDD4Zh/Pd7jo+Pv+Yxr1ZbgwYNjKioKNv66NGjDUnG//3f/9m2nTp1yggODjaCgoKMwsJCwzAMY/369YYko3HjxkZ+fr6t7euvv25IMnbu3HlZX3928XraunWr8cYbbxhVq1Y1zpw5YxiGYTz44INGly5dbPX16tXL9rkVK1YYkoxXXnnF7ngDBgwwLBaLsW/fPsMw/ntNPfXUU3btBg8efNn3Ex0dbQQGBhq//fabXduBAwcavr6+trqu9p1f6ujRo4aLi4uxaNEi27a77rrL6NOnzzU/CwDlhVv7AKAcvfnmm0pKSrJbVq1adVm7/v37X3HU44knnrBb//LLLyVJY8eOtdv+zDPPSNJlt5UFBwere/fupT6HP/P29pYknTp16opt/Pz8dPjwYW3durXU/XTu3FlNmjQpcfuRI0farf/973+X9N/v6kb58ssv1a5dO7vJH7y9vfX444/rwIEDSktLs2v/6KOP2j1T1qlTJ0kXbg8sqYceekhnz57VypUrderUKa1cufKKt/V9+eWXcnZ21qhRo+y2P/PMMzIMw3YtXvyeLm136eiSYRj66KOP1Lt3bxmGod9++822dO/eXdnZ2fruu+9KfC4XLV26VE5OTurfv79t26BBg7Rq1Sr98ccfpo8HADcCt/YBQDlq165diSabKG5mvyvtO3jwoJycnC6b+c9qtcrPz08HDx4s8bHNOn36tCSpatWqV2wzfvx4ff3112rXrp0aNmyobt26afDgwbr77rtL3I/ZmkNDQ+3WQ0JC5OTkZHu26UY5ePCg2rdvf9n2xo0b2/Y3a9bMtr1+/fp27apVqyZJpsJCzZo1FR4ervfee09nzpxRYWGhBgwYcMX6ateufdnv9ef6Lv6vk5OTQkJC7Npdehvo8ePHdfLkSf373//Wv//972L7PHbsWInP5aJ3331X7dq10++//67ff/9dkvSXv/xFBQUFWr58uR5//HHTxwSAskaQAoAK6Gqz6F1pn8Viue5jm/Xjjz9Kuvr07Y0bN9aePXu0cuVKrV69Wh999JHmzZuniRMnasqUKSXq53prvvS7udJ3VVhYeF39mOXs7FzsduOSiR+uZfDgwXrssceUlZWlHj16mJpC/npcfLfTww8/rKioqGLbtGjRwtQx9+7daxu9vDQQSxeenSJIAagICFIAcJNr0KCBioqKtHfvXtvIgnRhEoCTJ0+qQYMGN6zv//3f/5XFYtH//M//XLWdl5eXIiMjFRkZqYKCAvXr10+vvvqqJkyYIA8PjxKHwJLau3ev3SjWvn37VFRUZJuk4uLIz6Uv2b109E4qeUCVLvwWe/bsuWz7Tz/9ZNt/I/Tt21cjRozQ5s2b7aaiL66+r7/+WqdOnbIblbq0vovX1P79++1GoS49t4sz+hUWFpbZ1OSJiYlydXXV//7v/14WNDdu3Kg5c+YoIyPjstE8AChvPCMFADe5nj17SpJmz55tt33WrFmSVOxMa2Vh+vTpWrNmjSIjI4sdObjo4q1ZF7m5ualJkyYyDEPnzp2TJNu7sC4NNqX15ptv2q3PnTtXkmwz2/n4+KhGjRrasGGDXbt58+ZddiwztfXs2VPffvutUlJSbNtyc3P173//W0FBQaae8zLD29tb8+fP1+TJk9W7d++r1ldYWKg33njDbvtrr70mi8Vi+34u/u+ls/5deo05Ozurf//++uijj2yjk392/Phx0+eSmJioTp06KTIyUgMGDLBbLk5F//7775s+LgCUNUakAKAcrVq1yvZf///srrvu0m233VaqY7Zs2VJRUVH697//rZMnT6pz58769ttvtWTJEj3wwAPq0qXLddV8/vx5vfvuu5KkvLw8HTx4UJ999pl27NihLl26XPHZmIu6desmq9Wqu+++WwEBAdq9e7feeOMN9erVyzYq0rp1a0nSCy+8oIEDB8rV1VW9e/cu9cuG09PTdf/99ysiIkIpKSm2abxbtmxpazN8+HBNnz5dw4cPV5s2bbRhwwa792FdZKa2559/3jYV+ahRo+Tv768lS5YoPT1dH330kZycbtx/v7zSrXV/1rt3b3Xp0kUvvPCCDhw4oJYtW2rNmjX69NNPNXr0aNszUXfeeacGDRqkefPmKTs7W3fddZfWrl1b7Duupk+frvXr16t9+/Z67LHH1KRJE504cULfffedvv76a504caLE57Blyxbt27dPMTExxe6vU6eOWrVqpcTERI0fP77ExwWAG4EgBQDlaOLEicVuj4+PL3WQkqS33npLt912mxISEvTJJ5/IarVqwoQJmjRpUqmPeVF+fr6GDh0qSfL09FStWrXUunVrTZw4UX379r1mOBgxYoQSExM1a9YsnT59WnXr1tWoUaP04osv2tq0bdtWL7/8shYsWKDVq1erqKhI6enppQ5Sy5Yt08SJE/X888/LxcVFMTExmjFjhl2biRMn6vjx4/rwww/1wQcfqEePHlq1apVq1apl185MbQEBAdq0aZPGjx+vuXPnKi8vTy1atNDnn39+w0YGzXByctJnn32miRMnatmyZYqPj1dQUJBmzJhhm+Xxorfffls1a9ZUYmKiVqxYoXvvvVdffPGF6tWrZ9cuICBA3377raZOnaqPP/5Y8+bNU/Xq1dW0aVP985//NFVfYmKiJF11VK13796aPHmyduzYYfr5KwAoSxbD7BOtAAAAAFDJ8YwUAAAAAJhEkAIAAAAAkwhSAAAAAGASQQoAAAAATCJIAQAAAIBJBCkAAAAAMIn3SEkqKirSkSNHVLVqVVksFkeXAwAAAMBBDMPQqVOnVLt27au+K5EgJenIkSOXvWAQAAAAQOV16NAh1a1b94r7CVKSqlatKunCl+Xj4+PgagAAAAA4Sk5OjurVq2fLCFdCkJJst/P5+PgQpAAAAABc85EfJpsAAAAAAJMIUgAAAABgEkEKAAAAAEziGSkAAADgKgzD0Pnz51VYWOjoUlAGnJ2d5eLict2vPSJIAQAAAFdQUFCgzMxMnTlzxtGloAx5enoqMDBQbm5upT4GQQoAAAAoRlFRkdLT0+Xs7KzatWvLzc3tukcx4FiGYaigoEDHjx9Xenq6QkNDr/rS3ashSAEAAADFKCgoUFFRkerVqydPT09Hl4MyUqVKFbm6uurgwYMqKCiQh4dHqY7DZBMAAADAVZR2xAIVV1n8plwVAAAAAGASQQoAAAAATOIZKQAAAMCk6ISt5drf4mFty7U/XBsjUgAAAMAtxGKxXHWZPHnydR17xYoV12yXnJyse++9V/7+/vL09FRoaKiioqJUUFBQ4r6CgoI0e/bsErePjY2Vs7OzZsyYUeLPXA+CFAAAAHALyczMtC2zZ8+Wj4+P3bZnn332hvaflpamiIgItWnTRhs2bNDOnTs1d+5cubm53dCXGr/99tt67rnn9Pbbb9+wPv6MIAUAAADcQqxWq23x9fWVxWKx27Z06VI1btxYHh4eatSokebNm2f7bEFBgWJiYhQYGCgPDw81aNBAsbGxki6MEElS3759ZbFYbOuXWrNmjaxWq+Li4tSsWTOFhIQoIiJCixYtUpUqVWztNm7cqE6dOqlKlSqqV6+eRo0apdzcXEnSPffco4MHD2rMmDG2kbSrSU5O1tmzZzV16lTl5ORo06ZN1/ENlgxBCgAAAKgkEhMTNXHiRL366qvavXu3pk2bppdeeklLliyRJM2ZM0efffaZPvjgA+3Zs0eJiYm2wLR164XnwuLj45WZmWlbv5TValVmZqY2bNhwxTr279+viIgI9e/fXzt27NCyZcu0ceNGxcTESJI+/vhj1a1bV1OnTrWNpF3N4sWLNWjQILm6umrQoEFavHix2a/GNCabAAAAACqJSZMmaebMmerXr58kKTg4WGlpaVq4cKGioqKUkZGh0NBQdezYURaLRQ0aNLB9tmbNmpIkPz8/Wa3WK/bx4IMP6quvvlLnzp1ltVrVoUMHde3aVY888oh8fHwkXXieaciQIRo9erQkKTQ0VHPmzFHnzp01f/58+fv7y9nZWVWrVr1qX5KUk5OjDz/8UCkpKZKkhx9+WJ06ddLrr78ub2/vUn9X18KIFAAAAFAJ5Obmav/+/YqOjpa3t7dteeWVV7R//35J0rBhw5Samqo77rhDo0aN0po1a0z34+zsrPj4eB0+fFhxcXGqU6eOpk2bpqZNm9pGln744QclJCTY1dG9e3cVFRUpPT3dVH/vv/++QkJC1LJlS0nSnXfeqQYNGmjZsmWmazeDIAUAAABUAqdPn5YkLVq0SKmpqbblxx9/1ObNmyVJrVq1Unp6ul5++WWdPXtWDz30kAYMGFCq/urUqaOhQ4fqjTfe0K5du5SXl6cFCxbYahkxYoRdHT/88IP27t2rkJAQU/0sXrxYu3btkouLi21JS0u74ZNOcGsfAAAAUAkEBASodu3a+uWXXzRkyJArtvPx8VFkZKQiIyM1YMAARURE6MSJE/L395erq2upZt6rVq2aAgMDbZNJtGrVSmlpaWrYsOEVP1OSWf527typbdu26ZtvvpG/v79t+4kTJ3TPPffop59+UqNGjUzXWxIEqQro0he88QI2AAAAlIUpU6Zo1KhR8vX1VUREhPLz87Vt2zb98ccfGjt2rGbNmqXAwED95S9/kZOTk5YvXy6r1So/Pz9JF2buW7t2re6++265u7urWrVql/WxcOFCpaamqm/fvgoJCVFeXp7eeecd7dq1S3PnzpUkjR8/Xh06dFBMTIyGDx8uLy8vpaWlKSkpSW+88Yatrw0bNmjgwIFyd3dXjRo1Lutr8eLFateunf76179etq9t27ZavHjxDXuvFEEKAAAAMOlm/Q/dw4cPl6enp2bMmKFx48bJy8tLzZs3t036ULVqVcXFxWnv3r1ydnZW27Zt9eWXX8rJ6cITQTNnztTYsWO1aNEi1alTRwcOHLisj3bt2mnjxo164okndOTIEXl7e6tp06ZasWKFOnfuLElq0aKFkpOT9cILL6hTp04yDEMhISGKjIy0HWfq1KkaMWKEQkJClJ+fL8Mw7PopKCjQu+++q/Hjxxd7rv3799fMmTM1bdo0ubq6lsG3Z89iXFpRJZSTkyNfX19lZ2fbZhJxJEakAAAAHC8vL0/p6ekKDg6Wh4eHo8tBGbrab1vSbMBkEwAAAABgEkEKAAAAAEwiSAEAAACASQQpAAAAADCJIAUAAAAAJhGkAAAAAMAkghQAAAAAmESQAgAAAACTCFIAAAAAYJKLowsAAAAAbjrvRZZvf4OXlW9/uCZGpAAAAIBbiMViueoyefLk6zr2ihUrrtkuOTlZ9957r/z9/eXp6anQ0FBFRUWpoKCgxH0FBQVp9uzZJWp38dycnZ1Vu3ZtRUdH648//ihxX6VBkAIAAABuIZmZmbZl9uzZ8vHxsdv27LPP3tD+09LSFBERoTZt2mjDhg3auXOn5s6dKzc3NxUWFt6QPqdOnarMzExlZGQoMTFRGzZs0KhRo25IXxcRpAAAAIBbiNVqtS2+vr6yWCx225YuXarGjRvLw8NDjRo10rx582yfLSgoUExMjAIDA+Xh4aEGDRooNjZW0oWRH0nq27evLBaLbf1Sa9askdVqVVxcnJo1a6aQkBBFRERo0aJFqlKliq3dxo0b1alTJ1WpUkX16tXTqFGjlJubK0m65557dPDgQY0ZM8Y22nQ1VatWldVqVZ06ddSlSxdFRUXpu+++u45v8doIUgAAAEAlkZiYqIkTJ+rVV1/V7t27NW3aNL300ktasmSJJGnOnDn67LPP9MEHH2jPnj1KTEy0BaatW7dKkuLj45WZmWlbv5TValVmZqY2bNhwxTr279+viIgI9e/fXzt27NCyZcu0ceNGxcTESJI+/vhj1a1b1zbSlJmZWeJz/PXXX/X555+rffv2Jf5MaTDZBAAAAFBJTJo0STNnzlS/fv0kScHBwUpLS9PChQsVFRWljIwMhYaGqmPHjrJYLGrQoIHtszVr1pQk+fn5yWq1XrGPBx98UF999ZU6d+4sq9WqDh06qGvXrnrkkUfk4+MjSYqNjdWQIUM0evRoSVJoaKjmzJmjzp07a/78+fL395ezs7NtpOlaxo8frxdffFGFhYXKy8tT+/btNWvWrNJ+TSXCiBQAAABQCeTm5mr//v2Kjo6Wt7e3bXnllVe0f/9+SdKwYcOUmpqqO+64Q6NGjdKaNWtM9+Ps7Kz4+HgdPnxYcXFxqlOnjqZNm6amTZvaRpZ++OEHJSQk2NXRvXt3FRUVKT093XSf48aNU2pqqnbs2KG1a9dKknr16nXDnsmSGJECAAAAKoXTp09LkhYtWnTZbW/Ozs6SpFatWik9PV2rVq3S119/rYceekjh4eH68MMPTfdXp04dDR06VEOHDtXLL7+s22+/XQsWLNCUKVN0+vRpjRgxotgJIerXr2+6rxo1aqhhw4aSLoxuzZ49W2FhYVq/fr3Cw8NNH68kCFIAAABAJRAQEKDatWvrl19+0ZAhQ67YzsfHR5GRkYqMjNSAAQMUERGhEydOyN/fX66urqUa5alWrZoCAwNtk0m0atVKaWlptvBTnOuZ5e9iMDx79mypPl8SBCkAAACgkpgyZYpGjRolX19fRUREKD8/X9u2bdMff/yhsWPHatasWQoMDNRf/vIXOTk5afny5bJarfLz85N0Yea+tWvX6u6775a7u7uqVat2WR8LFy5Uamqq+vbtq5CQEOXl5emdd97Rrl27NHfuXEkXnmnq0KGDYmJiNHz4cHl5eSktLU1JSUl64403bH1t2LBBAwcOlLu7u2rUqHHF8zp16pSysrJkGIYOHTqk5557TjVr1tRdd91V9l/i/0eQAgAAAMwavMzRFZTK8OHD5enpqRkzZmjcuHHy8vJS8+bNbZM+VK1aVXFxcdq7d6+cnZ3Vtm1bffnll3JyujC1wsyZMzV27FgtWrRIderU0YEDBy7ro127dtq4caOeeOIJHTlyRN7e3mratKlWrFihzp07S5JatGih5ORkvfDCC+rUqZMMw1BISIgiIyNtx5k6dapGjBihkJAQ5efnyzCMK57XxIkTNXHiREkXJsVo27at1qxZo+rVq5fRN3c5i3G1iiqJnJwc+fr6Kjs72zaTiCNFJ9hPJbl4WFsHVQIAAFB55eXlKT09XcHBwfLw8HB0OShDV/ttS5oNmLUPAAAAAEwiSAEAAACASQQpAAAAADCJIAUAAAAAJhGkAAAAAMAkghQAAAAAmESQAgAAAACTCFIAAAAAYBJBCgAAAABMcnF0AQAAAMDNJmZtTLn290bXN8q1P1wbI1IAAADALcRisVx1mTx58nUde8WKFddsl5ycrHvvvVf+/v7y9PRUaGiooqKiVFBQUOK+goKCNHv27BK1/f777/Xggw8qICBAHh4eCg0N1WOPPaaff/65xP2ZRZACAAAAbiGZmZm2Zfbs2fLx8bHb9uyzz97Q/tPS0hQREaE2bdpow4YN2rlzp+bOnSs3NzcVFhaWeX8rV65Uhw4dlJ+fr8TERO3evVvvvvuufH199dJLL5V5fxcRpAAAAIBbiNVqtS2+vr6yWCx225YuXarGjRvLw8NDjRo10rx582yfLSgoUExMjAIDA+Xh4aEGDRooNjZW0oURIknq27evLBaLbf1Sa9askdVqVVxcnJo1a6aQkBBFRERo0aJFqlKliq3dxo0b1alTJ1WpUkX16tXTqFGjlJubK0m65557dPDgQY0ZM8Y2klacM2fO6NFHH1XPnj312WefKTw8XMHBwWrfvr3+9a9/aeHChWXwjRaPIAUAAABUEomJiZo4caJeffVV7d69W9OmTdNLL72kJUuWSJLmzJmjzz77TB988IH27NmjxMREW2DaunWrJCk+Pl6ZmZm29UtZrVZlZmZqw4YNV6xj//79ioiIUP/+/bVjxw4tW7ZMGzduVEzMhWfPPv74Y9WtW1dTp061jaQV56uvvtJvv/2m5557rtj9fn5+JflaSoXJJgAAAIBKYtKkSZo5c6b69esnSQoODlZaWpoWLlyoqKgoZWRkKDQ0VB07dpTFYlGDBg1sn61Zs6akC+HEarVesY8HH3xQX331lTp37iyr1aoOHTqoa9eueuSRR+Tj4yNJio2N1ZAhQzR69GhJUmhoqObMmaPOnTtr/vz58vf3l7Ozs6pWrXrVvvbu3StJatSo0XV9L6XBiBQAAABQCeTm5mr//v2Kjo6Wt7e3bXnllVe0f/9+SdKwYcOUmpqqO+64Q6NGjdKaNWtM9+Ps7Kz4+HgdPnxYcXFxqlOnjqZNm6amTZvaRpZ++OEHJSQk2NXRvXt3FRUVKT09vcR9GYZhur6yQpACAAAAKoHTp09LkhYtWqTU1FTb8uOPP2rz5s2SpFatWik9PV0vv/yyzp49q4ceekgDBgwoVX916tTR0KFD9cYbb2jXrl3Ky8vTggULbLWMGDHCro4ffvhBe/fuVUhISIn7uP322yVJP/30U6lqvB7c2gcAAABUAgEBAapdu7Z++eUXDRky5IrtfHx8FBkZqcjISA0YMEARERE6ceKE/P395erqWqqZ96pVq6bAwEDbZBKtWrVSWlqaGjZseMXPlGSWv27duqlGjRqKi4vTJ598ctn+kydP3rDnpBw6IjV58uTL5rX/8/2NeXl5GjlypKpXry5vb2/1799fR48etTtGRkaGevXqJU9PT9WqVUvjxo3T+fPny/tUAAAAgApvypQpio2N1Zw5c/Tzzz9r586dio+P16xZsyRJs2bN0vvvv6+ffvpJP//8s5YvXy6r1WoLI0FBQVq7dq2ysrL0xx9/FNvHwoUL9eSTT2rNmjXav3+/du3apfHjx2vXrl3q3bu3JGn8+PHatGmTYmJilJqaqr179+rTTz+1TTZxsa8NGzbo119/1W+//VZsX15eXnrrrbf0xRdf6P7779fXX3+tAwcOaNu2bXruuef0xBNPlOG3Z8/hI1JNmzbV119/bVt3cflvSWPGjNEXX3yh5cuXy9fXVzExMerXr5/+85//SJIKCwvVq1cvWa1Wbdq0SZmZmXrkkUfk6uqqadOmlfu5AAAAoHJ4o+sbji6hVIYPHy5PT0/NmDFD48aNk5eXl5o3b26b9KFq1aqKi4vT3r175ezsrLZt2+rLL7+Uk9OF8ZeZM2dq7NixWrRokerUqaMDBw5c1ke7du20ceNGPfHEEzpy5Ii8vb3VtGlTrVixQp07d5YktWjRQsnJyXrhhRfUqVMnGYahkJAQRUZG2o4zdepUjRgxQiEhIcrPz7/i81B9+vTRpk2bFBsbq8GDBysnJ0f16tXTvffeq1deeaVsv8A/sRgOfEJr8uTJWrFihVJTUy/bl52drZo1a+q9996z3Zf5008/qXHjxkpJSVGHDh20atUq3XfffTpy5IgCAgIkSQsWLND48eN1/Phxubm5laiOnJwc+fr6Kjs72zaTiCNFJ9hPJbl4WFsHVQIAAFB55eXlKT09XcHBwfLw8HB0OShDV/ttS5oNHD7ZxN69e1W7dm3ddtttGjJkiDIyMiRJ27dv17lz5xQeHm5r26hRI9WvX18pKSmSpJSUFDVv3twWoiSpe/fuysnJ0a5du67YZ35+vnJycuwWAAAAACgphwap9u3bKyEhQatXr9b8+fOVnp6uTp066dSpU8rKypKbm9tlD4cFBAQoKytLkpSVlWUXoi7uv7jvSmJjY+Xr62tb6tWrV7YnBgAAAOCW5tBnpHr06GH7c4sWLdS+fXs1aNBAH3zwgapUqXLD+p0wYYLGjh1rW794HyUAAAAAlITDb+37Mz8/P91+++3at2+frFarCgoKdPLkSbs2R48etb3d2Gq1XjaL38X1q70B2d3dXT4+PnYLAAAAAJRUhQpSp0+f1v79+xUYGKjWrVvL1dVVa9eute3fs2ePMjIyFBYWJkkKCwvTzp07dezYMVubpKQk+fj4qEmTJuVePwAAAG49DpybDTdIWfymDr2179lnn1Xv3r3VoEEDHTlyRJMmTZKzs7MGDRokX19fRUdHa+zYsfL395ePj4/+/ve/KywsTB06dJB04QVcTZo00dChQxUXF6esrCy9+OKLGjlypNzd3R15agAAALjJubq6SpLOnDlzQx87Qfk7c+aMpP/+xqXh0CB1+PBhDRo0SL///rtq1qypjh07avPmzapZs6Yk6bXXXpOTk5P69++v/Px8de/eXfPmzbN93tnZWStXrtSTTz6psLAweXl5KSoqSlOnTnXUKQEAAOAW4ezsLD8/P9vdT56enrJYLA6uCtfDMAydOXNGx44dk5+fn5ydnUt9LIe+R6qi4D1SAAAAKI5hGMrKyrrsuX3c3Pz8/GS1WosNxiXNBg4dkQIAAAAqMovFosDAQNWqVUvnzp1zdDkoA66urtc1EnURQQoAAAC4Bmdn5zL5l2/cOirUrH0AAAAAcDMgSAEAAACASQQpAAAAADCJIAUAAAAAJhGkAAAAAMAkghQAAAAAmESQAgAAAACTCFIAAAAAYBJBCgAAAABMIkgBAAAAgEkEKQAAAAAwiSAFAAAAACYRpAAAAADAJIIUAAAAAJhEkAIAAAAAkwhSAAAAAGASQQoAAAAATCJIAQAAAIBJBCkAAAAAMIkgBQAAAAAmEaQAAAAAwCSCFAAAAACYRJACAAAAAJMIUgAAAABgEkEKAAAAAEwiSAEAAACASQQpAAAAADCJIAUAAAAAJhGkAAAAAMAkghQAAAAAmESQAgAAAACTCFIAAAAAYBJBCgAAAABMIkgBAAAAgEkEKQAAAAAwiSAFAAAAACYRpAAAAADAJIIUAAAAAJhEkAIAAAAAkwhSAAAAAGASQQoAAAAATCJIAQAAAIBJBCkAAAAAMIkgBQAAAAAmEaQAAAAAwCSCFAAAAACYRJACAAAAAJMIUgAAAABgEkEKAAAAAEwiSAEAAACASQQpAAAAADCJIAUAAAAAJhGkAAAAAMAkghQAAAAAmESQAgAAAACTCFIAAAAAYBJBCgAAAABMIkgBAAAAgEkEKQAAAAAwiSAFAAAAACYRpAAAAADAJIIUAAAAAJhEkAIAAAAAkwhSAAAAAGASQQoAAAAATCJIAQAAAIBJFSZITZ8+XRaLRaNHj7Zty8vL08iRI1W9enV5e3urf//+Onr0qN3nMjIy1KtXL3l6eqpWrVoaN26czp8/X87VAwAAAKhMKkSQ2rp1qxYuXKgWLVrYbR8zZow+//xzLV++XMnJyTpy5Ij69etn219YWKhevXqpoKBAmzZt0pIlS5SQkKCJEyeW9ykAAAAAqEQcHqROnz6tIUOGaNGiRapWrZpte3Z2thYvXqxZs2bp3nvvVevWrRUfH69NmzZp8+bNkqQ1a9YoLS1N7777ru6880716NFDL7/8st58800VFBQ46pQAAAAA3OIcHqRGjhypXr16KTw83G779u3bde7cObvtjRo1Uv369ZWSkiJJSklJUfPmzRUQEGBr0717d+Xk5GjXrl1X7DM/P185OTl2CwAAAACUlIsjO1+6dKm+++47bd269bJ9WVlZcnNzk5+fn932gIAAZWVl2dr8OURd3H9x35XExsZqypQp11k9AAAAgMrKYSNShw4d0tNPP63ExER5eHiUa98TJkxQdna2bTl06FC59g8AAADg5uawILV9+3YdO3ZMrVq1kouLi1xcXJScnKw5c+bIxcVFAQEBKigo0MmTJ+0+d/ToUVmtVkmS1Wq9bBa/i+sX2xTH3d1dPj4+dgsAAAAAlJTDglTXrl21c+dOpaam2pY2bdpoyJAhtj+7urpq7dq1ts/s2bNHGRkZCgsLkySFhYVp586dOnbsmK1NUlKSfHx81KRJk3I/JwAAAACVg8OekapataqaNWtmt83Ly0vVq1e3bY+OjtbYsWPl7+8vHx8f/f3vf1dYWJg6dOggSerWrZuaNGmioUOHKi4uTllZWXrxxRc1cuRIubu7l/s5AQAAAKgcHDrZxLW89tprcnJyUv/+/ZWfn6/u3btr3rx5tv3Ozs5auXKlnnzySYWFhcnLy0tRUVGaOnWqA6sGAAAAcKuzGIZhOLoIR8vJyZGvr6+ys7MrxPNS0Qn2sxguHtbWQZUAAAAAlUtJs4HD3yMFAAAAADcbghQAAAAAmESQAgAAAACTCFIAAAAAYBJBCgAAAABMIkgBAAAAgEkEKQAAAAAwiSAFAAAAACYRpAAAAADAJIIUAAAAAJhEkAIAAAAAkwhSAAAAAGASQQoAAAAATCJIAQAAAIBJBCkAAAAAMIkgBQAAAAAmEaQAAAAAwCSCFAAAAACYRJACAAAAAJMIUgAAAABgEkEKAAAAAEwiSAEAAACASQQpAAAAADCJIAUAAAAAJhGkAAAAAMAkghQAAAAAmESQAgAAAACTCFIAAAAAYBJBCgAAAABMIkgBAAAAgEkEKQAAAAAwiSAFAAAAACYRpAAAAADAJIIUAAAAAJhEkAIAAAAAkwhSAAAAAGASQQoAAAAATCJIAQAAAIBJBCkAAAAAMIkgBQAAAAAmEaQAAAAAwCSCFAAAAACYRJACAAAAAJMIUgAAAABgUqmC1C+//FLWdQAAAADATaNUQaphw4bq0qWL3n33XeXl5ZV1TQAAAABQoZUqSH333Xdq0aKFxo4dK6vVqhEjRujbb78t69oAAAAAoEIqVZC688479frrr+vIkSN6++23lZmZqY4dO6pZs2aaNWuWjh8/XtZ1AgAAAECFcV2TTbi4uKhfv35avny5/vnPf2rfvn169tlnVa9ePT3yyCPKzMwsqzoBAAAAoMK4riC1bds2PfXUUwoMDNSsWbP07LPPav/+/UpKStKRI0fUp0+fsqoTAAAAACoMl9J8aNasWYqPj9eePXvUs2dPvfPOO+rZs6ecnC7ksuDgYCUkJCgoKKgsawUAAACACqFUQWr+/Pn629/+pmHDhikwMLDYNrVq1dLixYuvqzgAAAAAqIhKFaT27t17zTZubm6KiooqzeEBAAAAoEIr1TNS8fHxWr58+WXbly9friVLllx3UQAAAABQkZUqSMXGxqpGjRqXba9Vq5amTZt23UUBAAAAQEVWqiCVkZGh4ODgy7Y3aNBAGRkZ110UAAAAAFRkpQpStWrV0o4dOy7b/sMPP6h69erXXRQAAAAAVGSlClKDBg3SqFGjtH79ehUWFqqwsFDr1q3T008/rYEDB5Z1jQAAAABQoZRq1r6XX35ZBw4cUNeuXeXicuEQRUVFeuSRR3hGCgAAAMAtr1RBys3NTcuWLdPLL7+sH374QVWqVFHz5s3VoEGDsq4PAAAAACqcUgWpi26//XbdfvvtZVULAAAAANwUShWkCgsLlZCQoLVr1+rYsWMqKiqy279u3boyKQ4AAAAAKqJSBamnn35aCQkJ6tWrl5o1ayaLxVLWdQEAAABAhVWqILV06VJ98MEH6tmzZ1nXAwAAAAAVXqmmP3dzc1PDhg3LuhYAAAAAuCmUKkg988wzev3112UYRlnXAwAAAAAVXqlu7du4caPWr1+vVatWqWnTpnJ1dbXb//HHH5dJcQAAAABQEZUqSPn5+alv375lXQsAAAAA3BRKFaTi4+PLpPP58+dr/vz5OnDggCSpadOmmjhxonr06CFJysvL0zPPPKOlS5cqPz9f3bt317x58xQQEGA7RkZGhp588kmtX79e3t7eioqKUmxsrFxcrusVWQAAAABwRaV6RkqSzp8/r6+//loLFy7UqVOnJElHjhzR6dOnS3yMunXravr06dq+fbu2bdume++9V3369NGuXbskSWPGjNHnn3+u5cuXKzk5WUeOHFG/fv1sny8sLFSvXr1UUFCgTZs2acmSJUpISNDEiRNLe1oAAAAAcE0WoxQzRhw8eFARERHKyMhQfn6+fv75Z9122216+umnlZ+frwULFpS6IH9/f82YMUMDBgxQzZo19d5772nAgAGSpJ9++kmNGzdWSkqKOnTooFWrVum+++7TkSNHbKNUCxYs0Pjx43X8+HG5ubmVqM+cnBz5+voqOztbPj4+pa69rEQnbLVbXzysrYMqAQAAACqXkmaDUo1IPf3002rTpo3++OMPValSxba9b9++Wrt2bWkOqcLCQi1dulS5ubkKCwvT9u3bde7cOYWHh9vaNGrUSPXr11dKSookKSUlRc2bN7e71a979+7KycmxjWoVJz8/Xzk5OXYLAAAAAJRUqR4k+r//+z9t2rTpshGfoKAg/frrr6aOtXPnToWFhSkvL0/e3t765JNP1KRJE6WmpsrNzU1+fn527QMCApSVlSVJysrKsgtRF/df3HclsbGxmjJliqk6AQAAAOCiUo1IFRUVqbCw8LLthw8fVtWqVU0d64477lBqaqq2bNmiJ598UlFRUUpLSytNWSU2YcIEZWdn25ZDhw7d0P4AAAAA3FpKFaS6deum2bNn29YtFotOnz6tSZMmqWfPnqaO5ebmpoYNG6p169aKjY1Vy5Yt9frrr8tqtaqgoEAnT560a3/06FFZrVZJktVq1dGjRy/bf3Hflbi7u8vHx8duAQAAAICSKlWQmjlzpv7zn/+oSZMmysvL0+DBg2239f3zn/+8roKKioqUn5+v1q1by9XV1e6Zqz179igjI0NhYWGSpLCwMO3cuVPHjh2ztUlKSpKPj4+aNGlyXXUAAAAAwJWU6hmpunXr6ocfftDSpUu1Y8cOnT59WtHR0RoyZIjd5BPXMmHCBPXo0UP169fXqVOn9N577+mbb77RV199JV9fX0VHR2vs2LHy9/eXj4+P/v73vyssLEwdOnSQdGFkrEmTJho6dKji4uKUlZWlF198USNHjpS7u3tpTg0AAAAArqnUb611cXHRww8/fF2dHzt2TI888ogyMzPl6+urFi1a6KuvvtL//M//SJJee+01OTk5qX///nYv5L3I2dlZK1eu1JNPPqmwsDB5eXkpKipKU6dOva66AAAAAOBqSvUeqXfeeeeq+x955JFSF+QIvEcKAAAAgFTybFCqEamnn37abv3cuXM6c+aM3Nzc5OnpedMFKQAAAAAwo1STTfzxxx92y+nTp7Vnzx517NhR77//flnXCAAAAAAVSqmCVHFCQ0M1ffr0y0arAAAAAOBWU2ZBSrowAcWRI0fK8pAAAAAAUOGU6hmpzz77zG7dMAxlZmbqjTfe0N13310mhQEAAABARVWqIPXAAw/YrVssFtWsWVP33nuvZs6cWRZ1AQAAAECFVaogVVRUVNZ1AAAAAMBNo0yfkQIAAACAyqBUI1Jjx44tcdtZs2aVpgsAAAAAqLBKFaS+//57ff/99zp37pzuuOMOSdLPP/8sZ2dntWrVytbOYrGUTZUAAAAAUIGUKkj17t1bVatW1ZIlS1StWjVJF17S++ijj6pTp0565plnyrRIAAAAAKhISvWM1MyZMxUbG2sLUZJUrVo1vfLKK8zaBwAAAOCWV6oglZOTo+PHj1+2/fjx4zp16tR1FwUAAAAAFVmpglTfvn316KOP6uOPP9bhw4d1+PBhffTRR4qOjla/fv3KukYAAAAAqFBK9YzUggUL9Oyzz2rw4ME6d+7chQO5uCg6OlozZswo0wIBAAAAoKIpVZDy9PTUvHnzNGPGDO3fv1+SFBISIi8vrzItDgAAAAAqout6IW9mZqYyMzMVGhoqLy8vGYZRVnUBAAAAQIVVqiD1+++/q2vXrrr99tvVs2dPZWZmSpKio6OZ+hwAAADALa9UQWrMmDFydXVVRkaGPD09bdsjIyO1evXqMisOAAAAACqiUj0jtWbNGn311VeqW7eu3fbQ0FAdPHiwTAoDAAAAgIqqVCNSubm5diNRF504cULu7u7XXRQAAAAAVGSlClKdOnXSO++8Y1u3WCwqKipSXFycunTpUmbFAQAAAEBFVKpb++Li4tS1a1dt27ZNBQUFeu6557Rr1y6dOHFC//nPf8q6RgAAAACoUEo1ItWsWTP9/PPP6tixo/r06aPc3Fz169dP33//vUJCQsq6RgAAAACoUEyPSJ07d04RERFasGCBXnjhhRtREwAAAABUaKZHpFxdXbVjx44bUQsAAAAA3BRKdWvfww8/rMWLF5d1LQAAAABwUyjVZBPnz5/X22+/ra+//lqtW7eWl5eX3f5Zs2aVSXEAAAAAUBGZClK//PKLgoKC9OOPP6pVq1aSpJ9//tmujcViKbvqAAAAAKACMhWkQkNDlZmZqfXr10uSIiMjNWfOHAUEBNyQ4gAAAACgIjL1jJRhGHbrq1atUm5ubpkWBAAAAAAVXakmm7jo0mAFAAAAAJWBqSBlsVguewaKZ6IAAAAAVDamnpEyDEPDhg2Tu7u7JCkvL09PPPHEZbP2ffzxx2VXIQAAAABUMKaCVFRUlN36ww8/XKbFAAAAAMDNwFSQio+Pv1F1AAAAAMBN47ommwAAAACAyoggBQAAAAAmEaQAAAAAwCSCFAAAAACYRJACAAAAAJMIUgAAAABgEkEKAAAAAEwiSAEAAACASQQpAAAAADCJIAUAAAAAJhGkAAAAAMAkghQAAAAAmESQAgAAAACTCFIAAAAAYBJBCgAAAABMIkgBAAAAgEkEKQAAAAAwiSAFAAAAACYRpAAAAADAJIIUAAAAAJhEkAIAAAAAkwhSAAAAAGASQQoAAAAATCJIAQAAAIBJBCkAAAAAMIkgBQAAAAAmEaQAAAAAwCSCFAAAAACYRJACAAAAAJMIUgAAAABgEkEKAAAAAExyaJCKjY1V27ZtVbVqVdWqVUsPPPCA9uzZY9cmLy9PI0eOVPXq1eXt7a3+/fvr6NGjdm0yMjLUq1cveXp6qlatWho3bpzOnz9fnqcCAAAAoBJxaJBKTk7WyJEjtXnzZiUlJencuXPq1q2bcnNzbW3GjBmjzz//XMuXL1dycrKOHDmifv362fYXFhaqV69eKigo0KZNm7RkyRIlJCRo4sSJjjglAAAAAJWAxTAMw9FFXHT8+HHVqlVLycnJ+utf/6rs7GzVrFlT7733ngYMGCBJ+umnn9S4cWOlpKSoQ4cOWrVqle677z4dOXJEAQEBkqQFCxZo/PjxOn78uNzc3K7Zb05Ojnx9fZWdnS0fH58beo4lEZ2w1W598bC2DqoEAAAAqFxKmg0q1DNS2dnZkiR/f39J0vbt23Xu3DmFh4fb2jRq1Ej169dXSkqKJCklJUXNmze3hShJ6t69u3JycrRr165i+8nPz1dOTo7dAgAAAAAlVWGCVFFRkUaPHq27775bzZo1kyRlZWXJzc1Nfn5+dm0DAgKUlZVla/PnEHVx/8V9xYmNjZWvr69tqVevXhmfDQAAAIBbWYUJUiNHjtSPP/6opUuX3vC+JkyYoOzsbNty6NChG94nAAAAgFuHi6MLkKSYmBitXLlSGzZsUN26dW3brVarCgoKdPLkSbtRqaNHj8pqtdrafPvtt3bHuzir38U2l3J3d5e7u3sZnwUAAACAysKhI1KGYSgmJkaffPKJ1q1bp+DgYLv9rVu3lqurq9auXWvbtmfPHmVkZCgsLEySFBYWpp07d+rYsWO2NklJSfLx8VGTJk3K50QAAAAAVCoOHZEaOXKk3nvvPX366aeqWrWq7ZkmX19fValSRb6+voqOjtbYsWPl7+8vHx8f/f3vf1dYWJg6dOggSerWrZuaNGmioUOHKi4uTllZWXrxxRc1cuRIRp0AAAAA3BAODVLz58+XJN1zzz122+Pj4zVs2DBJ0muvvSYnJyf1799f+fn56t69u+bNm2dr6+zsrJUrV+rJJ59UWFiYvLy8FBUVpalTp5bXaQAAAACoZCrUe6QchfdIAQAAAJBu0vdIAQAAAMDNgCAFAAAAACYRpAAAAADAJIIUAAAAAJhEkAIAAAAAkwhSAAAAAGASQQoAAAAATCJIAQAAAIBJBCkAAAAAMIkgBQAAAAAmEaQAAAAAwCSCFAAAAACYRJACAAAAAJMIUgAAAABgEkEKAAAAAEwiSAEAAACASQQpAAAAADCJIAUAAAAAJhGkAAAAAMAkghQAAAAAmESQAgAAAACTCFIAAAAAYBJBCgAAAABMIkgBAAAAgEkEKQAAAAAwiSAFAAAAACYRpAAAAADAJIIUAAAAAJhEkAIAAAAAkwhSAAAAAGASQQoAAAAATCJIAQAAAIBJBCkAAAAAMIkgBQAAAAAmEaQAAAAAwCSCFAAAAACYRJACAAAAAJMIUgAAAABgEkEKAAAAAEwiSAEAAACASQQpAAAAADCJIAUAAAAAJhGkAAAAAMAkghQAAAAAmESQAgAAAACTCFIAAAAAYBJBCgAAAABMIkgBAAAAgEkEKQAAAAAwiSAFAAAAACYRpAAAAADAJIIUAAAAAJhEkAIAAAAAkwhSAAAAAGASQQoAAAAATCJIAQAAAIBJBCkAAAAAMIkgBQAAAAAmEaQAAAAAwCSCFAAAAACYRJACAAAAAJMIUgAAAABgEkEKAAAAAEwiSAEAAACASQQpAAAAADCJIAUAAAAAJhGkAAAAAMAkghQAAAAAmOTQILVhwwb17t1btWvXlsVi0YoVK+z2G4ahiRMnKjAwUFWqVFF4eLj27t1r1+bEiRMaMmSIfHx85Ofnp+joaJ0+fboczwIAAABAZePQIJWbm6uWLVvqzTffLHZ/XFyc5syZowULFmjLli3y8vJS9+7dlZeXZ2szZMgQ7dq1S0lJSVq5cqU2bNigxx9/vLxOAQAAAEAl5OLIznv06KEePXoUu88wDM2ePVsvvvii+vTpI0l65513FBAQoBUrVmjgwIHavXu3Vq9era1bt6pNmzaSpLlz56pnz57617/+pdq1a5fbuQAAAACoPCrsM1Lp6enKyspSeHi4bZuvr6/at2+vlJQUSVJKSor8/PxsIUqSwsPD5eTkpC1btlzx2Pn5+crJybFbAAAAAKCkKmyQysrKkiQFBATYbQ8ICLDty8rKUq1atez2u7i4yN/f39amOLGxsfL19bUt9erVK+PqAQAAANzKKmyQupEmTJig7Oxs23Lo0CFHlwQAAADgJlJhg5TVapUkHT161G770aNHbfusVquOHTtmt//8+fM6ceKErU1x3N3d5ePjY7cAAAAAQElV2CAVHBwsq9WqtWvX2rbl5ORoy5YtCgsLkySFhYXp5MmT2r59u63NunXrVFRUpPbt25d7zQAAAAAqB4fO2nf69Gnt27fPtp6enq7U1FT5+/urfv36Gj16tF555RWFhoYqODhYL730kmrXrq0HHnhAktS4cWNFREToscce04IFC3Tu3DnFxMRo4MCBzNgHAAAA4IZxaJDatm2bunTpYlsfO3asJCkqKkoJCQl67rnnlJubq8cff1wnT55Ux44dtXr1anl4eNg+k5iYqJiYGHXt2lVOTk7q37+/5syZU+7nAgAAAKDysBiGYTi6CEfLycmRr6+vsrOzK8TzUtEJW+3WFw9r66BKAAAAgMqlpNmgwj4jBQAAAAAVFUEKAAAAAEwiSAEAAACASQQpAAAAADCJIAUAAAAAJhGkAAAAAMAkghQAAAAAmESQAgAAAACTCFIAAAAAYBJBCgAAAABMIkgBAAAAgEkEKQAAAAAwiSAFAAAAACYRpAAAAADAJBdHF4Bri07Yavvz4mFtHVgJAAAAAIkRKQAAAAAwjSAFAAAAACYRpAAAAADAJIIUAAAAAJhEkAIAAAAAkwhSAAAAAGASQQoAAAAATCJIAQAAAIBJBCkAAAAAMIkgBQAAAAAmEaQAAAAAwCSCFAAAAACYRJACAAAAAJMIUgAAAABgEkEKAAAAAEwiSAEAAACASQQpAAAAADCJIAUAAAAAJhGkAAAAAMAkghQAAAAAmESQAgAAAACTXBxdAMyJTthqt754WFsHVQIAAABUXoxIAQAAAIBJBCkAAAAAMIlb+25y3OoHAAAAlD9GpAAAAADAJIIUAAAAAJhEkAIAAAAAkwhSAAAAAGASQQoAAAAATCJIAQAAAIBJBCkAAAAAMIkgBQAAAAAmEaQAAAAAwCSCFAAAAACYRJACAAAAAJNcHF0AylZ0wla79cXD2jqoEgAAAODWxYgUAAAAAJhEkAIAAAAAkwhSAAAAAGASz0hVIjw/BQAAAJQNRqQAAAAAwCRGpG5xl45CAQAAALh+jEgBAAAAgEkEKQAAAAAwiSAFAAAAACYRpAAAAADAJCabwBX9eaIKpkoHAAAA/osgVYldz3ulboV3Ut0K5wAAAADHIEihRJhGHQAAAPgvghRsyjIsmTnW1UaCGDUCAABARcRkEwAAAABgEiNSKBPc+gcAAIDK5JYZkXrzzTcVFBQkDw8PtW/fXt9++62jS0IpRSdstS0AAABARWQxDMNwdBHXa9myZXrkkUe0YMECtW/fXrNnz9by5cu1Z88e1apV65qfz8nJka+vr7Kzs+Xj41MOFV8dAaJsXPo81fV8r2X5bJaZ576u1fZqU9Rfz2dvNdf67W/18wcAACVX0mxwS9zaN2vWLD322GN69NFHJUkLFizQF198obffflvPP/+8g6vDraCsJs9wpKudg5nQdalrne+N+u6up+aK4kZNpmL2u6mo1ywAlBf+XkRp3PRBqqCgQNu3b9eECRNs25ycnBQeHq6UlJRiP5Ofn6/8/HzbenZ2tqQL6bMiKDh72tEl3BIu/T3L63sdOn+9qfZ/rnNk4vZSH9tsvyWtSbr6d3dp22udw9Vc6xzeHNL6ijVdz+99Pd/dn2uSrn7+l7a9tMY/13Gt4166/2rHvdZ3U1b9Xupan/3zfjPHLe7YV+unorie873VXc91dqPq4DeqXK719+atzlHXfkX9Z+7i73+tG/du+lv7jhw5ojp16mjTpk0KCwuzbX/uueeUnJysLVu2XPaZyZMna8qUKeVZJgAAAICbyKFDh1S3bt0r7r/pR6RKY8KECRo7dqxtvaioSCdOnFD16tVlsVgcWNmFBFyvXj0dOnSoQjyvBcfgOoDEdYALuA4gcR3gAq6D8mEYhk6dOqXatWtftd1NH6Rq1KghZ2dnHT161G770aNHZbVai/2Mu7u73N3d7bb5+fndqBJLxcfHh39AwHUASVwHuIDrABLXAS7gOrjxfH19r9nmpp/+3M3NTa1bt9batWtt24qKirR27Vq7W/0AAAAAoKzc9CNSkjR27FhFRUWpTZs2ateunWbPnq3c3FzbLH4AAAAAUJZuiSAVGRmp48ePa+LEicrKytKdd96p1atXKyAgwNGlmebu7q5JkyZddushKheuA0hcB7iA6wAS1wEu4DqoWG76WfsAAAAAoLzd9M9IAQAAAEB5I0gBAAAAgEkEKQAAAAAwiSAFAAAAACYRpCqQN998U0FBQfLw8FD79u317bffOroklKENGzaod+/eql27tiwWi1asWGG33zAMTZw4UYGBgapSpYrCw8O1d+9euzYnTpzQkCFD5OPjIz8/P0VHR+v06dPleBa4XrGxsWrbtq2qVq2qWrVq6YEHHtCePXvs2uTl5WnkyJGqXr26vL291b9//8teOp6RkaFevXrJ09NTtWrV0rhx43T+/PnyPBVch/nz56tFixa2l2qGhYVp1apVtv1cA5XP9OnTZbFYNHr0aNs2roPKYfLkybJYLHZLo0aNbPu5DiouglQFsWzZMo0dO1aTJk3Sd999p5YtW6p79+46duyYo0tDGcnNzVXLli315ptvFrs/Li5Oc+bM0YIFC7RlyxZ5eXmpe/fuysvLs7UZMmSIdu3apaSkJK1cuVIbNmzQ448/Xl6ngDKQnJyskSNHavPmzUpKStK5c+fUrVs35ebm2tqMGTNGn3/+uZYvX67k5GQdOXJE/fr1s+0vLCxUr169VFBQoE2bNmnJkiVKSEjQxIkTHXFKKIW6detq+vTp2r59u7Zt26Z7771Xffr00a5duyRxDVQ2W7du1cKFC9WiRQu77VwHlUfTpk2VmZlpWzZu3Gjbx3VQgRmoENq1a2eMHDnStl5YWGjUrl3biI2NdWBVuFEkGZ988oltvaioyLBarcaMGTNs206ePGm4u7sb77//vmEYhpGWlmZIMrZu3Wprs2rVKsNisRi//vprudWOsnXs2DFDkpGcnGwYxoXf3dXV1Vi+fLmtze7duw1JRkpKimEYhvHll18aTk5ORlZWlq3N/PnzDR8fHyM/P798TwBlplq1asZbb73FNVDJnDp1yggNDTWSkpKMzp07G08//bRhGPxdUJlMmjTJaNmyZbH7uA4qNkakKoCCggJt375d4eHhtm1OTk4KDw9XSkqKAytDeUlPT1dWVpbdNeDr66v27dvbroGUlBT5+fmpTZs2tjbh4eFycnLSli1byr1mlI3s7GxJkr+/vyRp+/btOnfunN210KhRI9WvX9/uWmjevLndS8e7d++unJwc24gGbh6FhYVaunSpcnNzFRYWxjVQyYwcOVK9evWy+70l/i6obPbu3avatWvrtttu05AhQ5SRkSGJ66Cic3F0AZB+++03FRYW2v0DIEkBAQH66aefHFQVylNWVpYkFXsNXNyXlZWlWrVq2e13cXGRv7+/rQ1uLkVFRRo9erTuvvtuNWvWTNKF39nNzU1+fn52bS+9Foq7Vi7uw81h586dCgsLU15enry9vfXJJ5+oSZMmSk1N5RqoJJYuXarvvvtOW7duvWwffxdUHu3bt1dCQoLuuOMOZWZmasqUKerUqZN+/PFHroMKjiAFAA4ycuRI/fjjj3b3wqPyuOOOO5Samqrs7Gx9+OGHioqKUnJysqPLQjk5dOiQnn76aSUlJcnDw8PR5cCBevToYftzixYt1L59ezVo0EAffPCBqlSp4sDKcC3c2lcB1KhRQ87OzpfNwHL06FFZrVYHVYXydPF3vto1YLVaL5t85Pz58zpx4gTXyU0oJiZGK1eu1Pr161W3bl3bdqvVqoKCAp08edKu/aXXQnHXysV9uDm4ubmpYcOGat26tWJjY9WyZUu9/vrrXAOVxPbt23Xs2DG1atVKLi4ucnFxUXJysubMmSMXFxcFBARwHVRSfn5+uv3227Vv3z7+PqjgCFIVgJubm1q3bq21a9fathUVFWnt2rUKCwtzYGUoL8HBwbJarXbXQE5OjrZs2WK7BsLCwnTy5Elt377d1mbdunUqKipS+/bty71mlI5hGIqJidEnn3yidevWKTg42G5/69at5erqanct7NmzRxkZGXbXws6dO+2CdVJSknx8fNSkSZPyORGUuaKiIuXn53MNVBJdu3bVzp07lZqaalvatGmjIUOG2P7MdVA5nT59Wvv371dgYCB/H1R0jp7tAhcsXbrUcHd3NxISEoy0tDTj8ccfN/z8/OxmYMHN7dSpU8b3339vfP/994YkY9asWcb3339vHDx40DAMw5g+fbrh5+dnfPrpp8aOHTuMPn36GMHBwcbZs2dtx4iIiDD+8pe/GFu2bDE2btxohIaGGoMGDXLUKaEUnnzyScPX19f45ptvjMzMTNty5swZW5snnnjCqF+/vrFu3Tpj27ZtRlhYmBEWFmbbf/78eaNZs2ZGt27djNTUVGP16tVGzZo1jQkTJjjilFAKzz//vJGcnGykp6cbO3bsMJ5//nnDYrEYa9asMQyDa6Cy+vOsfYbBdVBZPPPMM8Y333xjpKenG//5z3+M8PBwo0aNGsaxY8cMw+A6qMgIUhXI3Llzjfr16xtubm5Gu3btjM2bNzu6JJSh9evXG5IuW6KiogzDuDAF+ksvvWQEBAQY7u7uRteuXY09e/bYHeP33383Bg0aZHh7exs+Pj7Go48+apw6dcoBZ4PSKu4akGTEx8fb2pw9e9Z46qmnjGrVqhmenp5G3759jczMTLvjHDhwwOjRo4dRpUoVo0aNGsYzzzxjnDt3rpzPBqX1t7/9zWjQoIHh5uZm1KxZ0+jatastRBkG10BldWmQ4jqoHCIjI43AwEDDzc3NqFOnjhEZGWns27fPtp/roOKyGIZhOGYsDAAAAABuTjwjBQAAAAAmEaQAAAAAwCSCFAAAAACYRJACAAAAAJMIUgAAAABgEkEKAAAAAEwiSAEAAACASQQpAAAAADCJIAUAAAAAJhGkAAC3lGHDhslisVy2REREOLo0AMAtxMXRBQAAUNYiIiIUHx9vt83d3b3YtufOnZOrq6vdtoKCArm5uZnut7SfAwDcfBiRAgDcctzd3WW1Wu2WatWqSZIsFovmz5+v+++/X15eXnr11Vc1efJk3XnnnXrrrbcUHBwsDw8PSVJGRob69Okjb29v+fj46KGHHtLRo0dt/VzpcwCAWx9BCgBQ6UyePFl9+/bVzp079be//U2StG/fPn300Uf6+OOPlZqaqqKiIvXp00cnTpxQcnKykpKS9MsvvygyMtLuWJd+DgBQOXBrHwDglrNy5Up5e3vbbfvHP/6hf/zjH5KkwYMH69FHH7XbX1BQoHfeeUc1a9aUJCUlJWnnzp1KT09XvXr1JEnvvPOOmjZtqq1bt6pt27bFfg4AUDkQpAAAt5wuXbpo/vz5dtv8/f1tf27Tps1ln2nQoIFdGNq9e7fq1atnC1GS1KRJE/n5+Wn37t22IHXp5wAAlQNBCgBwy/Hy8lLDhg2vur8k20raFwCg8uEZKQAAitG4cWMdOnRIhw4dsm1LS0vTyZMn1aRJEwdWBgCoCBiRAgDccvLz85WVlWW3zcXFRTVq1CjxMcLDw9W8eXMNGTJEs2fP1vnz5/XUU0+pc+fOxd4aCACoXBiRAgDcclavXq3AwEC7pWPHjqaOYbFY9Omnn6patWr661//qvDwcN12221atmzZDaoaAHAzsRiGYTi6CAAAAAC4mTAiBQAAAAAmEaQAAAAAwCSCFAAAAACYRJACAAAAAJMIUgAAAABgEkEKAAAAAEwiSAEAAACASQQpAAAAADCJIAUAAAAAJhGkAAAAAMAkghQAAAAAmPT/AHxRBCAhSBV3AAAAAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Error Distribution (MODEL A) (Test Set A): Mean = 22.25770734371434 Standard Deviation = 53.96589716508258\n", + "Error Distribution (MODEL A) (Test Set B): Mean = 2.5616550522775925 Standard Deviation = 0.0022966488099699024\n", + "Error Distribution (MODEL A) (Test Set C): Mean = 0.062457236803248974 Standard Deviation = 0.08652385376675549\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA1IAAAIjCAYAAAAJLyrXAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAABNwklEQVR4nO3deVhV5f7//9dmFhAQBzbOhJSz5UzqsZSjqKfMoXDIsINlJVFaVp5KzUo8muaUQ2ao3zTNBitPTmnh8WhOZZqYmZFYglomKAUorN8f/lyftoKxtsgGeT6ua12173Wvdb/XXli8vPe6t80wDEMAAAAAgGJzc3UBAAAAAFDeEKQAAAAAwCKCFAAAAABYRJACAAAAAIsIUgAAAABgEUEKAAAAACwiSAEAAACARQQpAAAAALCIIAUAAAAAFhGkAAAl6rbbbtNtt91WKmPZbDaNHz/efD1+/HjZbDb98ssvpTJ+/fr1NXTo0FIZq6y49D0vrh9//FE2m02LFi0q8ZoAwBUIUgBQChYtWiSbzVbk9sUXX7i6xEINHTrUoU5/f3/dcMMN6t+/v9577z0VFBSUyDhbt27V+PHjdfr06RI5X0kqi7X9+edpy5Ytl+03DEN16tSRzWbTP/7xDxdU6LzPP//8sj8fwcHBat++vZYuXerq8gDA5OHqAgCgIpkwYYLCwsIua2/QoIELqikeb29vvfHGG5KkP/74Q0eOHNHHH3+s/v3767bbbtOHH36ogIAAs//69estj7F161a98MILGjp0qIKCgop93B9//CEPj2v7v7Ir1Xbw4EG5ubnu7yR9fHy0bNkydezY0aE9OTlZP/30k7y9vV1U2dVLSEhQmzZtJEm//vqrVqxYoXvvvVenT5/WiBEjXFwdABCkAKBU9ejRQ61bt7Z0zPnz51VQUCAvL6/L9mVnZ8vPz8/pegzDUE5OjipVqlRkHw8PD917770ObS+99JImTZqkMWPG6IEHHtCKFSvMfYXVWZIKCgqUl5cnHx8f+fj4XNOx/oqrg0rPnj21cuVKzZw50yFQLlu2TK1atSq1jzheC506dVL//v3N1w8//LBuuOEGLVu2jCAFoEzgo30AUIZcfI7klVde0fTp0xUeHi5vb2+lpKSYz/+kpKRo0KBBqlKlijkTcf78eb344otm//r16+tf//qXcnNzHc5fv359/eMf/9C6devUunVrVapUSfPnz3eq1meeeUbdunXTypUr9d1335nthT0jNWvWLDVp0kS+vr6qUqWKWrdurWXLlkm68FzT6NGjJUlhYWHmx7l+/PFHSReeyYmPj9fSpUvVpEkTeXt7a+3atea+wp7X+eWXX3TPPfcoICBAVatW1WOPPaacnJzL3ufCntf58zn/qrbCnpH64YcfdPfddys4OFi+vr5q3769/vOf/zj0ufjxtXfeeUcvv/yyateuLR8fH3Xt2lXff/99ke/5pQYOHKhff/1VGzZsMNvy8vL07rvvatCgQYUek52drSeeeEJ16tSRt7e3brrpJr3yyisyDMOhX25urkaOHKnq1aurcuXKuvPOO/XTTz8Ves6ff/5Z//znPxUSEiJvb281adJEb775ZrGvozi8vLxUpUqVaz4DCQDFxX+NAKAUZWZmXjZLYLPZVLVqVYe2pKQk5eTk6MEHH5S3t7eCg4PNfXfffbciIiI0ceJE85ffYcOGafHixerfv7+eeOIJbd++XYmJiTpw4IA++OADh3MfPHhQAwcO1PDhw/XAAw/opptucvp6hgwZovXr12vDhg268cYbC+2zYMECJSQkqH///mag2bt3r7Zv365Bgwapb9+++u677/T222/r1VdfVbVq1SRJ1atXN8+xadMmvfPOO4qPj1e1atVUv379K9Z1zz33qH79+kpMTNQXX3yhmTNn6rffftOSJUssXV9xavuz48eP69Zbb9Xvv/+uhIQEVa1aVYsXL9add96pd999V3369HHoP2nSJLm5uenJJ59UZmamJk+erMGDB2v79u3Fqq9+/fqKjIzU22+/rR49ekiS1qxZo8zMTA0YMEAzZ8506G8Yhu6880599tlniouL080336x169Zp9OjR+vnnn/Xqq6+afYcNG6a33npLgwYN0q233qpNmzapV69ehV5z+/btzcBbvXp1rVmzRnFxccrKytLjjz9erGu51JkzZ8w/K6dOndKyZcv0zTffaOHChU6dDwBKnAEAuOaSkpIMSYVu3t7eZr/U1FRDkhEQEGCcOHHC4Rzjxo0zJBkDBw50aN+zZ48hyRg2bJhD+5NPPmlIMjZt2mS21atXz5BkrF27tlh1x8bGGn5+fkXu/+qrrwxJxsiRI822zp07G507dzZf9+7d22jSpMkVx5kyZYohyUhNTb1snyTDzc3N2L9/f6H7xo0bZ76++B7deeedDv0eeeQRQ5Lx9ddfG4bxf+9zUlLSX57zSrXVq1fPiI2NNV8//vjjhiTjv//9r9l25swZIywszKhfv76Rn59vGIZhfPbZZ4Yko1GjRkZubq7Zd8aMGYYkY9++fZeN9WcXf5527txpzJ4926hcubLx+++/G4ZhGHfffbdx++23m/X16tXLPG7VqlWGJOOll15yOF///v0Nm81mfP/994Zh/N/P1COPPOLQb9CgQZe9P3FxcUZoaKjxyy+/OPQdMGCAERgYaNZ1pff8zy6+N5dubm5uxssvv3zFYwGgNPHRPgAoRa+99po2bNjgsK1Zs+ayfv369Sty1uOhhx5yeP3JJ59IkkaNGuXQ/sQTT0jSZR8rCwsLU/fu3Z2+hj/z9/eXdGH2oChBQUH66aeftHPnTqfH6dy5sxo3blzs/pc+Q/Poo49K+r/36lr55JNP1LZtW4fFH/z9/fXggw/qxx9/VEpKikP/+++/3+GZsk6dOkm68PHA4rrnnnv0xx9/aPXq1Tpz5oxWr15d5Mf6PvnkE7m7uyshIcGh/YknnpBhGObP4sX36dJ+l84uGYah9957T3fccYcMw9Avv/xibt27d1dmZqa+/PLLYl/Ln40dO9b8M7JixQoNHDhQzz77rGbMmOHU+QCgpPHRPgAoRW3bti3WYhOFrexX1L4jR47Izc3tspX/7Ha7goKCdOTIkWKf26qzZ89KkipXrlxkn6efflqffvqp2rZtqwYNGqhbt24aNGiQOnToUOxxrNYcERHh8Do8PFxubm7ms03XypEjR9SuXbvL2hs1amTub9q0qdlet25dh35VqlSRJP3222/FHrN69eqKiorSsmXL9Pvvvys/P99hkYZL66tZs+Zl9+vP9V38p5ubm8LDwx36Xfox0JMnT+r06dN6/fXX9frrrxc65okTJ4p9LX/WrFkzRUVFma/vueceZWZm6plnntGgQYOK/IsGACgtBCkAKIOutIpeUftsNttVn9uqb775RtKVl29v1KiRDh48qNWrV2vt2rV67733NGfOHI0dO1YvvPBCsca52povfW+Keq/y8/Ovahyr3N3dC203Lln44a8MGjRIDzzwgDIyMtSjRw9LS8hfjYvfI3bvvfcqNja20D7NmzcvsfG6du2q1atXa8eOHYU+rwUApYkgBQDlXL169VRQUKBDhw6ZMwvShUUATp8+rXr16l2zsf/f//t/stls+vvf/37Ffn5+foqJiVFMTIzy8vLUt29fvfzyyxozZox8fHyKHQKL69ChQw6zWN9//70KCgrMRSouzvxc+iW7l87eScUPqNKFe3Hw4MHL2r/99ltz/7XQp08fDR8+XF988YXDUvSF1ffpp5/qzJkzDrNSl9Z38Wfq8OHDDrNQl17bxRX98vPzHWaPrpXz589L+r+ZUABwJZ6RAoByrmfPnpKk6dOnO7RPmzZNkq7Z39xPmjRJ69evV0xMzGUfpfuzX3/91eG1l5eXGjduLMMwdO7cOUkyvwvr0mDjrNdee83h9axZsyTJXNkuICBA1apV0+bNmx36zZkz57JzWamtZ8+e2rFjh7Zt22a2ZWdn6/XXX1f9+vUtPedlhb+/v+bOnavx48frjjvuuGJ9+fn5mj17tkP7q6++KpvNZr4/F/956ap/l/6Mubu7q1+/fnrvvffM2ck/O3nypDOXU6TVq1dLklq0aFGi5wUAZzAjBQClaM2aNebf/v/ZrbfeqhtuuMGpc7Zo0UKxsbF6/fXXdfr0aXXu3Fk7duzQ4sWLddddd+n222+/qprPnz+vt956S5KUk5OjI0eO6KOPPtLevXt1++23F/lszEXdunWT3W5Xhw4dFBISogMHDmj27Nnq1auXOSvSqlUrSdKzzz6rAQMGyNPTU3fccYfTXzacmpqqO++8U9HR0dq2bZu5jPeffwEfNmyYJk2apGHDhql169bavHmzw/dhXWSltmeeecZcijwhIUHBwcFavHixUlNT9d5778nN7dr9/WVRH637szvuuEO33367nn32Wf34449q0aKF1q9frw8//FCPP/64+UzUzTffrIEDB2rOnDnKzMzUrbfeqo0bNxb6HVeTJk3SZ599pnbt2umBBx5Q48aNderUKX355Zf69NNPderUKaeu57///a/53V+nTp3SRx99pOTkZA0YMEANGzZ06pwAUJIIUgBQisaOHVtoe1JSktNBSpLeeOMN3XDDDVq0aJE++OAD2e12jRkzRuPGjXP6nBfl5uZqyJAhkiRfX1/VqFFDrVq10tixY9WnT5+/DAfDhw/X0qVLNW3aNJ09e1a1a9dWQkKCnnvuObNPmzZt9OKLL2revHlau3atCgoKlJqa6nSQWrFihcaOHatnnnlGHh4eio+P15QpUxz6jB07VidPntS7776rd955Rz169NCaNWtUo0YNh35WagsJCdHWrVv19NNPa9asWcrJyVHz5s318ccfl4lnetzc3PTRRx9p7NixWrFihZKSklS/fn1NmTLFXOXxojfffFPVq1fX0qVLtWrVKnXp0kX/+c9/VKdOHYd+ISEh2rFjhyZMmKD3339fc+bMUdWqVdWkSRP9+9//drrWP8+GeXl56YYbbtDLL79sfkEyALiazbD6RCsAAAAAVHA8IwUAAAAAFhGkAAAAAMAighQAAAAAWESQAgAAAACLCFIAAAAAYBFBCgAAAAAs4nukJBUUFOjYsWOqXLmybDabq8sBAAAA4CKGYejMmTOqWbPmFb8rkSAl6dixY5d9wSAAAACAiuvo0aOqXbt2kfsJUpIqV64s6cKbFRAQ4OJqAAAAALhKVlaW6tSpY2aEohCkJPPjfAEBAQQpAAAAAH/5yA+LTQAAAACARQQpAAAAALCIIAUAAAAAFvGMFAAAAHAFhmHo/Pnzys/Pd3UpKAHu7u7y8PC46q89IkgBAAAARcjLy1N6erp+//13V5eCEuTr66vQ0FB5eXk5fQ6CFAAAAFCIgoICpaamyt3dXTVr1pSXl9dVz2LAtQzDUF5enk6ePKnU1FRFRERc8Ut3r4QgBQAAABQiLy9PBQUFqlOnjnx9fV1dDkpIpUqV5OnpqSNHjigvL08+Pj5OnYfFJgAAAIArcHbGAmVXSdxTfioAAAAAwCKCFAAAAABYxDNSAAAAgEVxi3aW6ngLh7Yp1fHw15iRAgAAAK4jNpvtitv48eOv6tyrVq36y37Jycnq0qWLgoOD5evrq4iICMXGxiovL6/YY9WvX1/Tp08vdv/ExES5u7trypQpxT7mahCkAAAAgOtIenq6uU2fPl0BAQEObU8++eQ1HT8lJUXR0dFq3bq1Nm/erH379mnWrFny8vK6pl9q/Oabb+qpp57Sm2++ec3G+DOCFAAAAHAdsdvt5hYYGCibzebQtnz5cjVq1Eg+Pj5q2LCh5syZYx6bl5en+Ph4hYaGysfHR/Xq1VNiYqKkCzNEktSnTx/ZbDbz9aXWr18vu92uyZMnq2nTpgoPD1d0dLQWLFigSpUqmf22bNmiTp06qVKlSqpTp44SEhKUnZ0tSbrtttt05MgRjRw50pxJu5Lk5GT98ccfmjBhgrKysrR169areAeLhyAFAAAAVBBLly7V2LFj9fLLL+vAgQOaOHGinn/+eS1evFiSNHPmTH300Ud65513dPDgQS1dutQMTDt3XnguLCkpSenp6ebrS9ntdqWnp2vz5s1F1nH48GFFR0erX79+2rt3r1asWKEtW7YoPj5ekvT++++rdu3amjBhgjmTdiULFy7UwIED5enpqYEDB2rhwoVW3xrLWGwCAAAAqCDGjRunqVOnqm/fvpKksLAwpaSkaP78+YqNjVVaWpoiIiLUsWNH2Ww21atXzzy2evXqkqSgoCDZ7fYix7j77ru1bt06de7cWXa7Xe3bt1fXrl113333KSAgQNKF55kGDx6sxx9/XJIUERGhmTNnqnPnzpo7d66Cg4Pl7u6uypUrX3EsScrKytK7776rbdu2SZLuvfdederUSTNmzJC/v7/T79VfYUYKAAAAqACys7N1+PBhxcXFyd/f39xeeuklHT58WJI0dOhQ7dmzRzfddJMSEhK0fv16y+O4u7srKSlJP/30kyZPnqxatWpp4sSJatKkiTmz9PXXX2vRokUOdXTv3l0FBQVKTU21NN7bb7+t8PBwtWjRQpJ08803q169elqxYoXl2q0gSAEAAAAVwNmzZyVJCxYs0J49e8ztm2++0RdffCFJatmypVJTU/Xiiy/qjz/+0D333KP+/fs7NV6tWrU0ZMgQzZ49W/v371dOTo7mzZtn1jJ8+HCHOr7++msdOnRI4eHhlsZZuHCh9u/fLw8PD3NLSUm55otO8NE+AAAAoAIICQlRzZo19cMPP2jw4MFF9gsICFBMTIxiYmLUv39/RUdH69SpUwoODpanp6dTK+9VqVJFoaGh5mISLVu2VEpKiho0aFDkMcVZ5W/fvn3atWuXPv/8cwUHB5vtp06d0m233aZvv/1WDRs2tFxvcRCkyqCivuCNL2IDAADA1XjhhReUkJCgwMBARUdHKzc3V7t27dJvv/2mUaNGadq0aQoNDdUtt9wiNzc3rVy5Una7XUFBQZIurNy3ceNGdejQQd7e3qpSpcplY8yfP1979uxRnz59FB4erpycHC1ZskT79+/XrFmzJElPP/202rdvr/j4eA0bNkx+fn5KSUnRhg0bNHv2bHOszZs3a8CAAfL29la1atUuG2vhwoVq27at/va3v122r02bNlq4cOE1+14pghQAAABgUXn9C+5hw4bJ19dXU6ZM0ejRo+Xn56dmzZqZiz5UrlxZkydP1qFDh+Tu7q42bdrok08+kZvbhSeCpk6dqlGjRmnBggWqVauWfvzxx8vGaNu2rbZs2aKHHnpIx44dk7+/v5o0aaJVq1apc+fOkqTmzZsrOTlZzz77rDp16iTDMBQeHq6YmBjzPBMmTNDw4cMVHh6u3NxcGYbhME5eXp7eeustPf3004Vea79+/TR16lRNnDhRnp6eJfDuObIZl1ZUAWVlZSkwMFCZmZnmSiKuxIwUAACA6+Xk5Cg1NVVhYWHy8fFxdTkoQVe6t8XNBiw2AQAAAAAWEaQAAAAAwCKCFAAAAABYRJACAAAAAIsIUgAAAABgEUEKAAAAACwiSAEAAACARQQpAAAAALCIIAUAAAAAFnm4ugAAAACg3FkWU7rjDVpRuuPhLzEjBQAAAFxHbDbbFbfx48df1blXrVr1l/2Sk5PVpUsXBQcHy9fXVxEREYqNjVVeXl6xx6pfv76mT59erH4Xr83d3V01a9ZUXFycfvvtt2KP5QyCFAAAAHAdSU9PN7fp06crICDAoe3JJ5+8puOnpKQoOjparVu31ubNm7Vv3z7NmjVLXl5eys/PvyZjTpgwQenp6UpLS9PSpUu1efNmJSQkXJOxLiJIAQAAANcRu91uboGBgbLZbA5ty5cvV6NGjeTj46OGDRtqzpw55rF5eXmKj49XaGiofHx8VK9ePSUmJkq6MPMjSX369JHNZjNfX2r9+vWy2+2aPHmymjZtqvDwcEVHR2vBggWqVKmS2W/Lli3q1KmTKlWqpDp16ighIUHZ2dmSpNtuu01HjhzRyJEjzdmmK6lcubLsdrtq1aql22+/XbGxsfryyy+v4l38awQpAAAAoIJYunSpxo4dq5dfflkHDhzQxIkT9fzzz2vx4sWSpJkzZ+qjjz7SO++8o4MHD2rp0qVmYNq5c6ckKSkpSenp6ebrS9ntdqWnp2vz5s1F1nH48GFFR0erX79+2rt3r1asWKEtW7YoPj5ekvT++++rdu3a5kxTenp6sa/x559/1scff6x27doV+xhnsNgEAAAAUEGMGzdOU6dOVd++fSVJYWFhSklJ0fz58xUbG6u0tDRFRESoY8eOstlsqlevnnls9erVJUlBQUGy2+1FjnH33Xdr3bp16ty5s+x2u9q3b6+uXbvqvvvuU0BAgCQpMTFRgwcP1uOPPy5JioiI0MyZM9W5c2fNnTtXwcHBcnd3N2ea/srTTz+t5557Tvn5+crJyVG7du00bdo0Z9+mYmFGCgAAAKgAsrOzdfjwYcXFxcnf39/cXnrpJR0+fFiSNHToUO3Zs0c33XSTEhIStH79esvjuLu7KykpST/99JMmT56sWrVqaeLEiWrSpIk5s/T1119r0aJFDnV0795dBQUFSk1NtTzm6NGjtWfPHu3du1cbN26UJPXq1euaPZMlMSMFAAAAVAhnz56VJC1YsOCyj725u7tLklq2bKnU1FStWbNGn376qe655x5FRUXp3XfftTxerVq1NGTIEA0ZMkQvvviibrzxRs2bN08vvPCCzp49q+HDhxe6IETdunUtj1WtWjU1aNBA0oXZrenTpysyMlKfffaZoqKiLJ+vOAhSAAAAQAUQEhKimjVr6ocfftDgwYOL7BcQEKCYmBjFxMSof//+io6O1qlTpxQcHCxPT0+nZnmqVKmi0NBQczGJli1bKiUlxQw/hbmaVf4uBsM//vjDqeOLgyAFAAAAVBAvvPCCEhISFBgYqOjoaOXm5mrXrl367bffNGrUKE2bNk2hoaG65ZZb5ObmppUrV8putysoKEjShZX7Nm7cqA4dOsjb21tVqlS5bIz58+drz5496tOnj8LDw5WTk6MlS5Zo//79mjVrlqQLzzS1b99e8fHxGjZsmPz8/JSSkqINGzZo9uzZ5libN2/WgAED5O3trWrVqhV5XWfOnFFGRoYMw9DRo0f11FNPqXr16rr11ltL/k38/xGkAAAAAKsGrXB1BU4ZNmyYfH19NWXKFI0ePVp+fn5q1qyZuehD5cqVNXnyZB06dEju7u5q06aNPvnkE7m5XVhaYerUqRo1apQWLFigWrVq6ccff7xsjLZt22rLli166KGHdOzYMfn7+6tJkyZatWqVOnfuLElq3ry5kpOT9eyzz6pTp04yDEPh4eGKiYkxzzNhwgQNHz5c4eHhys3NlWEYRV7X2LFjNXbsWEkXFsVo06aN1q9fr6pVq5bQO3c5m3GliiqIrKwsBQYGKjMz01xJxJXiFhW+lOTCoW1KuRIAAICKKycnR6mpqQoLC5OPj4+ry0EJutK9LW42YNU+AAAAALCIIAUAAAAAFhGkAAAAAMAighQAAAAAWESQAgAAAACLCFIAAAAAYBFBCgAAAAAsIkgBAAAAgEVlJkhNmjRJNpvN/FZl6cIXZY0YMUJVq1aVv7+/+vXrp+PHjzscl5aWpl69esnX11c1atTQ6NGjdf78+VKuHgAAAEBF4uHqAiRp586dmj9/vpo3b+7QPnLkSP3nP//RypUrFRgYqPj4ePXt21f/+9//JEn5+fnq1auX7Ha7tm7dqvT0dN13333y9PTUxIkTXXEpAAAAqADiN8aX6nizu84u1fHw11w+I3X27FkNHjxYCxYsUJUqVcz2zMxMLVy4UNOmTVOXLl3UqlUrJSUlaevWrfriiy8kSevXr1dKSoreeust3XzzzerRo4defPFFvfbaa8rLy3PVJQEAAAAuY7PZrriNHz/+qs69atWqv+yXnJysLl26KDg4WL6+voqIiFBsbKyl39Hr16+v6dOnF6vvV199pbvvvlshISHy8fFRRESEHnjgAX333XfFHs8qlwepESNGqFevXoqKinJo3717t86dO+fQ3rBhQ9WtW1fbtm2TJG3btk3NmjVTSEiI2ad79+7KysrS/v37ixwzNzdXWVlZDhsAAABwPUhPTze36dOnKyAgwKHtySefvKbjp6SkKDo6Wq1bt9bmzZu1b98+zZo1S15eXsrPzy/x8VavXq327dsrNzdXS5cu1YEDB/TWW28pMDBQzz//fImPd5FLg9Ty5cv15ZdfKjEx8bJ9GRkZ8vLyUlBQkEN7SEiIMjIyzD5/DlEX91/cV5TExEQFBgaaW506da7ySgAAAICywW63m1tgYKBsNptD2/Lly9WoUSP5+PioYcOGmjNnjnlsXl6e4uPjFRoaKh8fH9WrV8/8Xb1+/fqSpD59+shms5mvL7V+/XrZ7XZNnjxZTZs2VXh4uKKjo7VgwQJVqlTJ7LdlyxZ16tRJlSpVUp06dZSQkKDs7GxJ0m233aYjR45o5MiR5kxaYX7//Xfdf//96tmzpz766CNFRUUpLCxM7dq10yuvvKL58+eXwDtaOJcFqaNHj+qxxx7T0qVL5ePjU6pjjxkzRpmZmeZ29OjRUh0fAAAAcIWlS5dq7Nixevnll3XgwAFNnDhRzz//vBYvXixJmjlzpj766CO98847OnjwoJYuXWoGpp07d0qSkpKSlJ6ebr6+lN1uV3p6ujZv3lxkHYcPH1Z0dLT69eunvXv3asWKFdqyZYvi4y88e/b++++rdu3amjBhgjmTVph169bpl19+0VNPPVXo/ksnZUqSyxab2L17t06cOKGWLVuabfn5+dq8ebNmz56tdevWKS8vT6dPn3Z4A44fPy673S7pwk3asWOHw3kvrup3sU9hvL295e3tXYJXAwAAAJR948aN09SpU9W3b19JUlhYmFJSUjR//nzFxsYqLS1NERER6tixo2w2m+rVq2ceW716dUkXwsmVfte+++67tW7dOnXu3Fl2u13t27dX165ddd999ykgIEDShU+IDR482FyxOyIiQjNnzlTnzp01d+5cBQcHy93dXZUrV77iWIcOHZJ04RGg0uayGamuXbtq37592rNnj7m1bt1agwcPNv/d09NTGzduNI85ePCg0tLSFBkZKUmKjIzUvn37dOLECbPPhg0bFBAQoMaNG5f6NQEAAABlVXZ2tg4fPqy4uDj5+/ub20svvaTDhw9LkoYOHao9e/bopptuUkJCgtavX295HHd3dyUlJemnn37S5MmTVatWLU2cOFFNmjQxZ5a+/vprLVq0yKGO7t27q6CgQKmpqcUeyzAMy/WVFJfNSFWuXFlNmzZ1aPPz81PVqlXN9ri4OI0aNUrBwcEKCAjQo48+qsjISLVv316S1K1bNzVu3FhDhgzR5MmTlZGRoeeee04jRoxgxgkAAAD4k7Nnz0qSFixYoHbt2jnsc3d3lyS1bNlSqampWrNmjT799FPdc889ioqK0rvvvmt5vFq1amnIkCEaMmSIXnzxRd14442aN2+eXnjhBZ09e1bDhw9XQkLCZcfVrVu32GPceOONkqRvv/3WnGwpLWXie6SK8uqrr8rNzU39+vVTbm6uunfv7vAwnLu7u1avXq2HH35YkZGR8vPzU2xsrCZMmODCqgEAAICyJyQkRDVr1tQPP/ygwYMHF9kvICBAMTExiomJUf/+/RUdHa1Tp04pODhYnp6eTq28V6VKFYWGhpqLSbRs2VIpKSlq0KBBkccUZ5W/bt26qVq1apo8ebI++OCDy/Zf+phQSSpTQerzzz93eO3j46PXXntNr732WpHH1KtXT5988sk1rgwAAAAo/1544QUlJCQoMDBQ0dHRys3N1a5du/Tbb79p1KhRmjZtmkJDQ3XLLbfIzc1NK1eulN1uN8NI/fr1tXHjRnXo0EHe3t4O3wN70fz587Vnzx716dNH4eHhysnJ0ZIlS7R//37NmjVLkvT000+rffv2io+P17Bhw+Tn56eUlBRt2LBBs2fPNsfavHmzBgwYIG9vb1WrVu2ysfz8/PTGG2/o7rvv1p133qmEhAQ1aNBAv/zyi9555x2lpaVp+fLl1+S9LFNBCgAAACgPZned7eoSnDJs2DD5+vpqypQpGj16tPz8/NSsWTNz0YfKlStr8uTJOnTokNzd3dWmTRt98skncnO7sLTC1KlTNWrUKC1YsEC1atXSjz/+eNkYbdu21ZYtW/TQQw/p2LFj8vf3V5MmTbRq1Sp17txZktS8eXMlJyfr2WefVadOnWQYhsLDwxUTE2OeZ8KECRo+fLjCw8OVm5tb5PNQvXv31tatW5WYmKhBgwYpKytLderUUZcuXfTSSy+V7Bv4JzbDlU9olRFZWVkKDAxUZmamuZKIK8UtKnwpyYVD25RyJQAAABVXTk6OUlNTFRYWVupf14Nr60r3trjZwKVfyAsAAAAA5RFBCgAAAAAsIkgBAAAAgEUEKQAAAACwiCAFAAAAXAFrs11/SuKeEqQAAACAQnh6ekqSfv/9dxdXgpJ28Z5evMfO4HukAAAAgEK4u7srKChIJ06ckCT5+vrKZrO5uCpcDcMw9Pvvv+vEiRMKCgqSu7u70+ciSAEAAABFsNvtkmSGKVwfgoKCzHvrLIIUAAAAUASbzabQ0FDVqFFD586dc3U5KAGenp5XNRN1EUEKAAAA+Avu7u4l8ss3rh8sNgEAAAAAFhGkAAAAAMAighQAAAAAWESQAgAAAACLCFIAAAAAYBFBCgAAAAAsIkgBAAAAgEUEKQAAAACwiCAFAAAAABYRpAAAAADAIoIUAAAAAFhEkAIAAAAAiwhSAAAAAGARQQoAAAAALCJIAQAAAIBFBCkAAAAAsIggBQAAAAAWEaQAAAAAwCKCFAAAAABYRJACAAAAAIsIUgAAAABgEUEKAAAAACwiSAEAAACARQQpAAAAALCIIAUAAAAAFhGkAAAAAMAighQAAAAAWESQAgAAAACLCFIAAAAAYBFBCgAAAAAsIkgBAAAAgEUEKQAAAACwiCAFAAAAABYRpAAAAADAIoIUAAAAAFhEkAIAAAAAiwhSAAAAAGARQQoAAAAALCJIAQAAAIBFBCkAAAAAsIggBQAAAAAWEaQAAAAAwCKCFAAAAABYRJACAAAAAIsIUgAAAABgEUEKAAAAACwiSAEAAACARQQpAAAAALCIIAUAAAAAFhGkAAAAAMAighQAAAAAWESQAgAAAACLCFIAAAAAYBFBCgAAAAAsIkgBAAAAgEUEKQAAAACwiCAFAAAAABYRpAAAAADAIoIUAAAAAFhEkAIAAAAAiwhSAAAAAGARQQoAAAAALCJIAQAAAIBFBCkAAAAAsIggBQAAAAAWEaQAAAAAwCKCFAAAAABYRJACAAAAAIsIUgAAAABgEUEKAAAAACwiSAEAAACARQQpAAAAALCIIAUAAAAAFhGkAAAAAMAighQAAAAAWESQAgAAAACLCFIAAAAAYBFBCgAAAAAscmmQmjt3rpo3b66AgAAFBAQoMjJSa9asMffn5ORoxIgRqlq1qvz9/dWvXz8dP37c4RxpaWnq1auXfH19VaNGDY0ePVrnz58v7UsBAAAAUIG4NEjVrl1bkyZN0u7du7Vr1y516dJFvXv31v79+yVJI0eO1Mcff6yVK1cqOTlZx44dU9++fc3j8/Pz1atXL+Xl5Wnr1q1avHixFi1apLFjx7rqkgAAAABUADbDMAxXF/FnwcHBmjJlivr376/q1atr2bJl6t+/vyTp22+/VaNGjbRt2za1b99ea9as0T/+8Q8dO3ZMISEhkqR58+bp6aef1smTJ+Xl5VWsMbOyshQYGKjMzEwFBARcs2srrrhFOwttXzi0TSlXAgAAAFQsxc0GZeYZqfz8fC1fvlzZ2dmKjIzU7t27de7cOUVFRZl9GjZsqLp162rbtm2SpG3btqlZs2ZmiJKk7t27Kysry5zVKkxubq6ysrIcNgAAAAAoLpcHqX379snf31/e3t566KGH9MEHH6hx48bKyMiQl5eXgoKCHPqHhIQoIyNDkpSRkeEQoi7uv7ivKImJiQoMDDS3OnXqlOxFAQAAALiuuTxI3XTTTdqzZ4+2b9+uhx9+WLGxsUpJSbmmY44ZM0aZmZnmdvTo0Ws6HgAAAIDri4erC/Dy8lKDBg0kSa1atdLOnTs1Y8YMxcTEKC8vT6dPn3aYlTp+/LjsdrskyW63a8eOHQ7nu7iq38U+hfH29pa3t3cJXwkAAACAisLlM1KXKigoUG5urlq1aiVPT09t3LjR3Hfw4EGlpaUpMjJSkhQZGal9+/bpxIkTZp8NGzYoICBAjRs3LvXaAQAAAFQMLp2RGjNmjHr06KG6devqzJkzWrZsmT7//HOtW7dOgYGBiouL06hRoxQcHKyAgAA9+uijioyMVPv27SVJ3bp1U+PGjTVkyBBNnjxZGRkZeu655zRixAhmnAAAAABcMy4NUidOnNB9992n9PR0BQYGqnnz5lq3bp3+/ve/S5JeffVVubm5qV+/fsrNzVX37t01Z84c83h3d3etXr1aDz/8sCIjI+Xn56fY2FhNmDDBVZcEAAAAoAIoc98j5Qp8jxQAAAAAqRx+jxQAAAAAlBcEKQAAAACwiCAFAAAAABYRpAAAAADAIoIUAAAAAFhEkAIAAAAAiwhSAAAAAGARQQoAAAAALCJIAQAAAIBFBCkAAAAAsIggBQAAAAAWEaQAAAAAwCKCFAAAAABYRJACAAAAAIsIUgAAAABgEUEKAAAAACwiSAEAAACARQQpAAAAALCIIAUAAAAAFhGkAAAAAMAighQAAAAAWESQAgAAAACLCFIAAAAAYBFBCgAAAAAsIkgBAAAAgEUEKQAAAACwiCAFAAAAABYRpAAAAADAIoIUAAAAAFhEkAIAAAAAiwhSAAAAAGARQQoAAAAALHIqSP3www8lXQcAAAAAlBtOBakGDRro9ttv11tvvaWcnJySrgkAAAAAyjSngtSXX36p5s2ba9SoUbLb7Ro+fLh27NhR0rUBAAAAQJnkVJC6+eabNWPGDB07dkxvvvmm0tPT1bFjRzVt2lTTpk3TyZMnS7pOAAAAACgzrmqxCQ8PD/Xt21crV67Uv//9b33//fd68sknVadOHd13331KT08vqToBAAAAoMy4qiC1a9cuPfLIIwoNDdW0adP05JNP6vDhw9qwYYOOHTum3r17l1SdAAAAAFBmeDhz0LRp05SUlKSDBw+qZ8+eWrJkiXr27Ck3twu5LCwsTIsWLVL9+vVLslYAAAAAKBOcClJz587VP//5Tw0dOlShoaGF9qlRo4YWLlx4VcUBAAAAQFnkVJA6dOjQX/bx8vJSbGysM6cHAAAAgDLNqWekkpKStHLlysvaV65cqcWLF191UQAAAABQljkVpBITE1WtWrXL2mvUqKGJEydedVEAAAAAUJY5FaTS0tIUFhZ2WXu9evWUlpZ21UUBAAAAQFnmVJCqUaOG9u7de1n7119/rapVq151UQAAAABQljkVpAYOHKiEhAR99tlnys/PV35+vjZt2qTHHntMAwYMKOkaAQAAAKBMcWrVvhdffFE//vijunbtKg+PC6coKCjQfffdxzNSAAAAAK57TgUpLy8vrVixQi+++KK+/vprVapUSc2aNVO9evVKuj4AAAAAKHOcClIX3XjjjbrxxhtLqhYAAAAAKBecClL5+flatGiRNm7cqBMnTqigoMBh/6ZNm0qkOAAAAAAoi5wKUo899pgWLVqkXr16qWnTprLZbCVdFwAAAACUWU4FqeXLl+udd95Rz549S7oeAAAAACjznFr+3MvLSw0aNCjpWgAAAACgXHAqSD3xxBOaMWOGDMMo6XoAAAAAoMxz6qN9W7Zs0WeffaY1a9aoSZMm8vT0dNj//vvvl0hxAAAAAFAWORWkgoKC1KdPn5KuBQAAAADKBaeCVFJSUknXAQAAAADlhlPPSEnS+fPn9emnn2r+/Pk6c+aMJOnYsWM6e/ZsiRUHAAAAAGWRUzNSR44cUXR0tNLS0pSbm6u///3vqly5sv79738rNzdX8+bNK+k6AQAAAKDMcGpG6rHHHlPr1q3122+/qVKlSmZ7nz59tHHjxhIrDgAAAADKIqdmpP773/9q69at8vLycmivX7++fv755xIpDAAAAADKKqdmpAoKCpSfn39Z+08//aTKlStfdVEAAAAAUJY5FaS6deum6dOnm69tNpvOnj2rcePGqWfPniVVGwAAAACUSU59tG/q1Knq3r27GjdurJycHA0aNEiHDh1StWrV9Pbbb5d0jQAAAABQpjgVpGrXrq2vv/5ay5cv1969e3X27FnFxcVp8ODBDotPAAAAAMD1yKkgJUkeHh669957S7IWAAAAACgXnApSS5YsueL+++67z6liAAAAAKA8cCpIPfbYYw6vz507p99//11eXl7y9fUlSAEAAAC4rjm1at9vv/3msJ09e1YHDx5Ux44dWWwCAAAAwHXPqSBVmIiICE2aNOmy2SoAAAAAuN6UWJCSLixAcezYsZI8JQAAAACUOU49I/XRRx85vDYMQ+np6Zo9e7Y6dOhQIoUBAAAAQFnlVJC66667HF7bbDZVr15dXbp00dSpU0uiLgAAAAAos5wKUgUFBSVdBwAAAACUGyX6jBQAAAAAVAROzUiNGjWq2H2nTZvmzBAAAAAAUGY5FaS++uorffXVVzp37pxuuukmSdJ3330nd3d3tWzZ0uxns9lKpkoAAAAAKEOcClJ33HGHKleurMWLF6tKlSqSLnxJ7/33369OnTrpiSeeKNEiAQAAAKAsceoZqalTpyoxMdEMUZJUpUoVvfTSS6zaBwAAAOC651SQysrK0smTJy9rP3nypM6cOXPVRQEAAABAWeZUkOrTp4/uv/9+vf/++/rpp5/0008/6b333lNcXJz69u1b0jUCAAAAQJni1DNS8+bN05NPPqlBgwbp3LlzF07k4aG4uDhNmTKlRAsEAAAAgLLGqSDl6+urOXPmaMqUKTp8+LAkKTw8XH5+fiVaHAAAAACURVf1hbzp6elKT09XRESE/Pz8ZBhGSdUFAAAAAGWWU0Hq119/VdeuXXXjjTeqZ8+eSk9PlyTFxcWx9DkAAACA655TQWrkyJHy9PRUWlqafH19zfaYmBitXbu2xIoDAAAAgLLIqWek1q9fr3Xr1ql27doO7RERETpy5EiJFAYAAAAAZZVTM1LZ2dkOM1EXnTp1St7e3lddFAAAAACUZU4FqU6dOmnJkiXma5vNpoKCAk2ePFm33357iRUHAAAAAGWRUx/tmzx5srp27apdu3YpLy9PTz31lPbv369Tp07pf//7X0nXCAAAAABlilMzUk2bNtV3332njh07qnfv3srOzlbfvn311VdfKTw8vNjnSUxMVJs2bVS5cmXVqFFDd911lw4ePOjQJycnRyNGjFDVqlXl7++vfv366fjx4w590tLS1KtXL/n6+qpGjRoaPXq0zp8/78ylAQAAAMBfsjwjde7cOUVHR2vevHl69tlnr2rw5ORkjRgxQm3atNH58+f1r3/9S926dVNKSor55b4jR47Uf/7zH61cuVKBgYGKj49X3759zZmv/Px89erVS3a7XVu3blV6erruu+8+eXp6auLEiVdVHwAAAAAUxmY48S261atX19atWxUREVGixZw8eVI1atRQcnKy/va3vykzM1PVq1fXsmXL1L9/f0nSt99+q0aNGmnbtm1q37691qxZo3/84x86duyYQkJCJEnz5s3T008/rZMnT8rLy+svx83KylJgYKAyMzMVEBBQotfkjLhFOwttXzi0TSlXAgAAAFQsxc0GTn20795779XChQudLq4omZmZkqTg4GBJ0u7du3Xu3DlFRUWZfRo2bKi6detq27ZtkqRt27apWbNmZoiSpO7duysrK0v79+8vdJzc3FxlZWU5bAAAAABQXE4tNnH+/Hm9+eab+vTTT9WqVSvzY3gXTZs2zfI5CwoK9Pjjj6tDhw5q2rSpJCkjI0NeXl4KCgpy6BsSEqKMjAyzz59D1MX9F/cVJjExUS+88ILlGgEAAABAshikfvjhB9WvX1/ffPONWrZsKUn67rvvHPrYbDanChkxYoS++eYbbdmyxanjrRgzZoxGjRplvs7KylKdOnWu+bgAAAAArg+WglRERITS09P12WefSZJiYmI0c+bMy2aErIqPj9fq1au1efNm1a5d22y32+3Ky8vT6dOnHWaljh8/LrvdbvbZsWOHw/kurup3sc+lvL29+eJgAAAAAE6z9IzUpetSrFmzRtnZ2U4PbhiG4uPj9cEHH2jTpk0KCwtz2N+qVSt5enpq48aNZtvBgweVlpamyMhISVJkZKT27dunEydOmH02bNiggIAANW7c2OnaAAAAAKAoTj0jdZETC/45GDFihJYtW6YPP/xQlStXNp9pCgwMVKVKlRQYGKi4uDiNGjVKwcHBCggI0KOPPqrIyEi1b99ektStWzc1btxYQ4YM0eTJk5WRkaHnnntOI0aMYNYJAAAAwDVhKUjZbLbLnoFy9pkoSZo7d64k6bbbbnNoT0pK0tChQyVJr776qtzc3NSvXz/l5uaqe/fumjNnjtnX3d1dq1ev1sMPP6zIyEj5+fkpNjZWEyZMcLouAAAAALgSS98j5ebmph49epgzPR9//LG6dOly2ap977//fslWeY3xPVIAAAAApOJnA0szUrGxsQ6v7733XueqAwAAAIByzFKQSkpKulZ1AAAAAEC5YWnVPgAAAAAAQQoAAAAALCNIAQAAAIBFBCkAAAAAsIggBQAAAAAWEaQAAAAAwCKCFAAAAABYRJACAAAAAIsIUgAAAABgEUEKAAAAACwiSAEAAACARQQpAAAAALCIIAUAAAAAFhGkAAAAAMAighQAAAAAWESQAgAAAACLCFIAAAAAYBFBCgAAAAAsIkgBAAAAgEUEKQAAAACwiCAFAAAAABYRpAAAAADAIoIUAAAAAFhEkAIAAAAAiwhSAAAAAGARQQoAAAAALCJIAQAAAIBFBCkAAAAAsIggBQAAAAAWEaQAAAAAwCKCFAAAAABYRJACAAAAAIsIUgAAAABgEUEKAAAAACwiSAEAAACARQQpAAAAALCIIAUAAAAAFhGkAAAAAMAighQAAAAAWESQAgAAAACLCFIAAAAAYBFBCgAAAAAsIkgBAAAAgEUEKQAAAACwiCAFAAAAABYRpAAAAADAIoIUAAAAAFhEkAIAAAAAiwhSAAAAAGARQQoAAAAALCJIAQAAAIBFBCkAAAAAsIggBQAAAAAWEaQAAAAAwCKCFAAAAABYRJACAAAAAIsIUgAAAABgkYerC0DxxS3aWeS+hUPblGIlAAAAQMXGjBQAAAAAWESQAgAAAACLCFIAAAAAYBFBCgAAAAAsIkgBAAAAgEUEKQAAAACwiCAFAAAAABYRpAAAAADAIoIUAAAAAFhEkAIAAAAAiwhSAAAAAGARQQoAAAAALCJIAQAAAIBFBCkAAAAAsIggBQAAAAAWEaQAAAAAwCKCVBn2nWa4ugQAAAAAhSBIAQAAAIBFBCkAAAAAsIggBQAAAAAWEaQAAAAAwCKCFAAAAABYRJACAAAAAIsIUgAAAABgEUEKAAAAACwiSAEAAACARQQpAAAAALCIIAUAAAAAFhGkAAAAAMAighQAAAAAWOTSILV582bdcccdqlmzpmw2m1atWuWw3zAMjR07VqGhoapUqZKioqJ06NAhhz6nTp3S4MGDFRAQoKCgIMXFxens2bOleBUAAAAAKhqXBqns7Gy1aNFCr732WqH7J0+erJkzZ2revHnavn27/Pz81L17d+Xk5Jh9Bg8erP3792vDhg1avXq1Nm/erAcffLC0LgEAAABABeThysF79OihHj16FLrPMAxNnz5dzz33nHr37i1JWrJkiUJCQrRq1SoNGDBABw4c0Nq1a7Vz5061bt1akjRr1iz17NlTr7zyimrWrFlq1wIAAACg4iizz0ilpqYqIyNDUVFRZltgYKDatWunbdu2SZK2bdumoKAgM0RJUlRUlNzc3LR9+/Yiz52bm6usrCyHDQAAAACKq8wGqYyMDElSSEiIQ3tISIi5LyMjQzVq1HDY7+HhoeDgYLNPYRITExUYGGhuderUKeHqAQAAAFzPymyQupbGjBmjzMxMczt69KirSwIAAABQjpTZIGW32yVJx48fd2g/fvy4uc9ut+vEiRMO+8+fP69Tp06ZfQrj7e2tgIAAhw0AAAAAiqvMBqmwsDDZ7XZt3LjRbMvKytL27dsVGRkpSYqMjNTp06e1e/dus8+mTZtUUFCgdu3alXrNAAAAACoGl67ad/bsWX3//ffm69TUVO3Zs0fBwcGqW7euHn/8cb300kuKiIhQWFiYnn/+edWsWVN33XWXJKlRo0aKjo7WAw88oHnz5uncuXOKj4/XgAEDWLEPAAAAwDXj0iC1a9cu3X777ebrUaNGSZJiY2O1aNEiPfXUU8rOztaDDz6o06dPq2PHjlq7dq18fHzMY5YuXar4+Hh17dpVbm5u6tevn2bOnFnq1wIAAACg4rAZhmG4ughXy8rKUmBgoDIzM8vE81Jxi3ZKkr7TDN2ox4p1zMKhba5lSQAAAECFUNxsUGafkQIAAACAsoogBQAAAAAWEaQAAAAAwCKCFAAAAABYRJACAAAAAIsIUgAAAABgEUEKAAAAACwiSAEAAACARQQpAAAAALCIIAUAAAAAFhGkAAAAAMAighQAAAAAWESQAgAAAACLCFIAAAAAYBFBCgAAAAAsIkgBAAAAgEUEKQAAAACwiCAFAAAAABYRpAAAAADAIg9XF4CSEbdoZ6HtC4e2KeVKAAAAgOsfM1IAAAAAYBFBCgAAAAAsIkgBAAAAgEUEKQAAAACwiCAFAAAAABYRpAAAAADAIoIUAAAAAFhEkAIAAAAAiwhSAAAAAGARQQoAAAAALCJIAQAAAIBFBCkAAAAAsIggBQAAAAAWEaQAAAAAwCKCVBn3nWa4ugQAAAAAlyBIAQAAAIBFBCkAAAAAsMjD1QXg2opbtLPIfQuHtinFSgAAAIDrBzNSAAAAAGARQQoAAAAALCJIAQAAAIBFBCkAAAAAsIggBQAAAAAWEaQAAAAAwCKCFAAAAABYRJACAAAAAIsIUgAAAABgEUEKAAAAACwiSAEAAACARQQpAAAAALCIIAUAAAAAFhGkAAAAAMAiD1cXANeJW7Sz0PaFQ9uUciUAAABA+cKMFAAAAABYRJACAAAAAIsIUgAAAABgEUEKAAAAACwiSAEAAACARQQpAAAAALCIIAUAAAAAFhGkAAAAAMAighQAAAAAWOTh6gJQuO80w2Vjxy3aWeS+hUPblGIlAAAAQNnEjBQAAAAAWESQAgAAAACLCFIAAAAAYBFBCgAAAAAsIkgBAAAAgEUEKQAAAACwiCAFAAAAABYRpAAAAADAIoIUAAAAAFhEkAIAAAAAizxcXQDKl7hFOwttXzi0TSlXAgAAALgOM1IAAAAAYBEzUrjmmMUCAADA9YYZKQAAAACwiCBVDnynGa4uAQAAAMCfEKQAAAAAwCKCFAAAAABYxGITcJmiFqGQil6IgoUrAAAAUBYQpFAirhSKAAAAgOsNQQooIcyWAQAAVBw8IwUAAAAAFjEjhTKpND4q6MwzWgAAAIBEkMJ1gme0AAAAUJr4aB8AAAAAWMSMFHCN8RFCAACA6w9BCigEK/DBGYRmAAAqDoJUOfGdZuhGPebqMoAiESIAAEBFct0Eqddee01TpkxRRkaGWrRooVmzZqlt27auLgu4orI683W9haKSvh4WNwEAANdFkFqxYoVGjRqlefPmqV27dpo+fbq6d++ugwcPqkaNGq4uD7DMmV/8S/qYopRkiLjeAhsAAKg4bIZhGK4u4mq1a9dObdq00ezZsyVJBQUFqlOnjh599FE988wzf3l8VlaWAgMDlZmZqYCAgGtd7l+KW7RT32nGZe18tA8oeSUZMktyfGeUVjB15r0py8G4rM4MAyg9/MUe/qy42aDcz0jl5eVp9+7dGjNmjNnm5uamqKgobdu2rdBjcnNzlZuba77OzMyUdOFNKwvy/jir8zp3WXuKXlEDPeSCioDrV1F/7vP+OFsq4w+Z+9l1NY4z4782uFWh7SOW7i6xY4rqLxV9r8vK/xMK48x7c71x5l6XxvilWQNKzpX+m1+W/1tQkkrrZ9rVf3aL4+I9/6v5pnI/I3Xs2DHVqlVLW7duVWRkpNn+1FNPKTk5Wdu3b7/smPHjx+uFF14ozTIBAAAAlCNHjx5V7dq1i9xf7meknDFmzBiNGjXKfF1QUKBTp06patWqstlsLqzsQgKuU6eOjh49WiY+ZgjncB/LP+7h9YH7eH3gPl4fuI/lX0W5h4Zh6MyZM6pZs+YV+5X7IFWtWjW5u7vr+PHjDu3Hjx+X3W4v9Bhvb295e3s7tAUFBV2rEp0SEBBwXf+AVhTcx/KPe3h94D5eH7iP1wfuY/lXEe5hYGDgX/ZxK4U6rikvLy+1atVKGzduNNsKCgq0ceNGh4/6AQAAAEBJKfczUpI0atQoxcbGqnXr1mrbtq2mT5+u7Oxs3X///a4uDQAAAMB16LoIUjExMTp58qTGjh2rjIwM3XzzzVq7dq1CQkJcXZpl3t7eGjdu3GUfPUT5wn0s/7iH1wfu4/WB+3h94D6Wf9xDR+V+1T4AAAAAKG3l/hkpAAAAAChtBCkAAAAAsIggBQAAAAAWEaQAAAAAwCKCVBny2muvqX79+vLx8VG7du20Y8cOV5eEP9m8ebPuuOMO1axZUzabTatWrXLYbxiGxo4dq9DQUFWqVElRUVE6dOiQQ59Tp05p8ODBCggIUFBQkOLi4nT27NlSvIqKLTExUW3atFHlypVVo0YN3XXXXTp48KBDn5ycHI0YMUJVq1aVv7+/+vXrd9kXfqelpalXr17y9fVVjRo1NHr0aJ0/f740L6VCmzt3rpo3b25+IWRkZKTWrFlj7ucelk+TJk2SzWbT448/brZxL8u+8ePHy2azOWwNGzY093MPy4eff/5Z9957r6pWrapKlSqpWbNm2rVrl7mf33EKR5AqI1asWKFRo0Zp3Lhx+vLLL9WiRQt1795dJ06ccHVp+P9lZ2erRYsWeu211wrdP3nyZM2cOVPz5s3T9u3b5efnp+7duysnJ8fsM3jwYO3fv18bNmzQ6tWrtXnzZj344IOldQkVXnJyskaMGKEvvvhCGzZs0Llz59StWzdlZ2ebfUaOHKmPP/5YK1euVHJyso4dO6a+ffua+/Pz89WrVy/l5eVp69atWrx4sRYtWqSxY8e64pIqpNq1a2vSpEnavXu3du3apS5duqh3797av3+/JO5hebRz507Nnz9fzZs3d2jnXpYPTZo0UXp6urlt2bLF3Mc9LPt+++03dejQQZ6enlqzZo1SUlI0depUValSxezD7zhFMFAmtG3b1hgxYoT5Oj8/36hZs6aRmJjowqpQFEnGBx98YL4uKCgw7Ha7MWXKFLPt9OnThre3t/H2228bhmEYKSkphiRj586dZp81a9YYNpvN+Pnnn0utdvyfEydOGJKM5ORkwzAu3DNPT09j5cqVZp8DBw4Ykoxt27YZhmEYn3zyieHm5mZkZGSYfebOnWsEBAQYubm5pXsBMFWpUsV44403uIfl0JkzZ4yIiAhjw4YNRufOnY3HHnvMMAz+PJYX48aNM1q0aFHoPu5h+fD0008bHTt2LHI/v+MUjRmpMiAvL0+7d+9WVFSU2ebm5qaoqCht27bNhZWhuFJTU5WRkeFwDwMDA9WuXTvzHm7btk1BQUFq3bq12ScqKkpubm7avn17qdcMKTMzU5IUHBwsSdq9e7fOnTvncB8bNmyounXrOtzHZs2aOXzhd/fu3ZWVlWXOiKD05Ofna/ny5crOzlZkZCT3sBwaMWKEevXq5XDPJP48lieHDh1SzZo1dcMNN2jw4MFKS0uTxD0sLz766CO1bt1ad999t2rUqKFbbrlFCxYsMPfzO07RCFJlwC+//KL8/HyH/4hIUkhIiDIyMlxUFay4eJ+udA8zMjJUo0YNh/0eHh4KDg7mPrtAQUGBHn/8cXXo0EFNmzaVdOEeeXl5KSgoyKHvpfexsPt8cR9Kx759++Tv7y9vb2899NBD+uCDD9S4cWPuYTmzfPlyffnll0pMTLxsH/eyfGjXrp0WLVqktWvXau7cuUpNTVWnTp105swZ7mE58cMPP2ju3LmKiIjQunXr9PDDDyshIUGLFy+WxO84V+Lh6gIAwBVGjBihb775xuGz/Cg/brrpJu3Zs0eZmZl69913FRsbq+TkZFeXBQuOHj2qxx57TBs2bJCPj4+ry4GTevToYf578+bN1a5dO9WrV0/vvPOOKlWq5MLKUFwFBQVq3bq1Jk6cKEm65ZZb9M0332jevHmKjY11cXVlGzNSZUC1atXk7u5+2So2x48fl91ud1FVsOLifbrSPbTb7ZctHnL+/HmdOnWK+1zK4uPjtXr1an322WeqXbu22W6325WXl6fTp0879L/0PhZ2ny/uQ+nw8vJSgwYN1KpVKyUmJqpFixaaMWMG97Ac2b17t06cOKGWLVvKw8NDHh4eSk5O1syZM+Xh4aGQkBDuZTkUFBSkG2+8Ud9//z1/HsuJ0NBQNW7c2KGtUaNG5kc0+R2naASpMsDLy0utWrXSxo0bzbaCggJt3LhRkZGRLqwMxRUWFia73e5wD7OysrR9+3bzHkZGRur06dPavXu32WfTpk0qKChQu3btSr3misgwDMXHx+uDDz7Qpk2bFBYW5rC/VatW8vT0dLiPBw8eVFpamsN93Ldvn8P/MDZs2KCAgIDL/keE0lNQUKDc3FzuYTnStWtX7du3T3v27DG31q1ba/Dgwea/cy/Ln7Nnz+rw4cMKDQ3lz2M50aFDh8u+CuS7775TvXr1JPE7zhW5erULXLB8+XLD29vbWLRokZGSkmI8+OCDRlBQkMMqNnCtM2fOGF999ZXx1VdfGZKMadOmGV999ZVx5MgRwzAMY9KkSUZQUJDx4YcfGnv37jV69+5thIWFGX/88Yd5jujoaOOWW24xtm/fbmzZssWIiIgwBg4c6KpLqnAefvhhIzAw0Pj888+N9PR0c/v999/NPg899JBRt25dY9OmTcauXbuMyMhIIzIy0tx//vx5o2nTpka3bt2MPXv2GGvXrjWqV69ujBkzxhWXVCE988wzRnJyspGammrs3bvXeOaZZwybzWasX7/eMAzuYXn251X7DIN7WR488cQTxueff26kpqYa//vf/4yoqCijWrVqxokTJwzD4B6WBzt27DA8PDyMl19+2Th06JCxdOlSw9fX13jrrbfMPvyOUziCVBkya9Yso27duoaXl5fRtm1b44svvnB1SfiTzz77zJB02RYbG2sYxoXlQZ9//nkjJCTE8Pb2Nrp27WocPHjQ4Ry//vqrMXDgQMPf398ICAgw7r//fuPMmTMuuJqKqbD7J8lISkoy+/zxxx/GI488YlSpUsXw9fU1+vTpY6Snpzuc58cffzR69OhhVKpUyahWrZrxxBNPGOfOnSvlq6m4/vnPfxr16tUzvLy8jOrVqxtdu3Y1Q5RhcA/Ls0uDFPey7IuJiTFCQ0MNLy8vo1atWkZMTIzx/fffm/u5h+XDxx9/bDRt2tTw9vY2GjZsaLz++usO+/kdp3A2wzAM18yFAQAAAED5xDNSAAAAAGARQQoAAAAALCJIAQAAAIBFBCkAAAAAsIggBQAAAAAWEaQAAAAAwCKCFAAAAABYRJACAAAAAIsIUgAAAABgEUEKAHBdGTp0qGw222VbdHS0q0sDAFxHPFxdAAAAJS06OlpJSUkObd7e3oX2PXfunDw9PR3a8vLy5OXlZXlcZ48DAJQ/zEgBAK473t7estvtDluVKlUkSTabTXPnztWdd94pPz8/vfzyyxo/frxuvvlmvfHGGwoLC5OPj48kKS0tTb1795a/v78CAgJ0zz336Pjx4+Y4RR0HALj+EaQAABXO+PHj1adPH+3bt0///Oc/JUnff/+93nvvPb3//vvas2ePCgoK1Lt3b506dUrJycnasGGDfvjhB8XExDic69LjAAAVAx/tAwBcd1avXi1/f3+Htn/961/617/+JUkaNGiQ7r//fof9eXl5WrJkiapXry5J2rBhg/bt26fU1FTVqVNHkrRkyRI1adJEO3fuVJs2bQo9DgBQMRCkAADXndtvv11z5851aAsODjb/vXXr1pcdU69ePYcwdODAAdWpU8cMUZLUuHFjBQUF6cCBA2aQuvQ4AEDFQJACAFx3/Pz81KBBgyvuL05bcccCAFQ8PCMFAEAhGjVqpKNHj+ro0aNmW0pKik6fPq3GjRu7sDIAQFnAjBQA4LqTm5urjIwMhzYPDw9Vq1at2OeIiopSs2bNNHjwYE2fPl3nz5/XI488os6dOxf60UAAQMXCjBQA4Lqzdu1ahYaGOmwdO3a0dA6bzaYPP/xQVapU0d/+9jdFRUXphhtu0IoVK65R1QCA8sRmGIbh6iIAAAAAoDxhRgoAAAAALCJIAQAAAIBFBCkAAAAAsIggBQAAAAAWEaQAAAAAwCKCFAAAAABYRJACAAAAAIsIUgAAAABgEUEKAAAAACwiSAEAAACARQQpAAAAALDo/wMmJqyyR4ZraAAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Error Distribution (MODEL B) (Test Set A): Mean = 32.73219379378818 Standard Deviation = 65.61751153849882\n", + "Error Distribution (MODEL B) (Test Set B): Mean = 1.9577195262268569e-25 Standard Deviation = 1.892899978318431e-25\n", + "Error Distribution (MODEL B) (Test Set C): Mean = 2.6140814129669656 Standard Deviation = 0.8034601498911891\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA1IAAAIjCAYAAAAJLyrXAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAABQ/0lEQVR4nO3deVwW9f7//+cFCAgIiAsXKiEh5W4nV0qPmh5FrcylXAs7aJsccyuzxS0Tj6a5L8cM9ZOmWWnlyYXU4OPXJbVQEzM1FE1QywRFAYX5/eHP69PlyiBwgTzut9vcDjPznnm/5mL0+Ow9874shmEYAgAAAADkmZOjCwAAAACAkoYgBQAAAAAmEaQAAAAAwCSCFAAAAACYRJACAAAAAJMIUgAAAABgEkEKAAAAAEwiSAEAAACASQQpAAAAADCJIAUAKFCtWrVSq1atiqQvi8WiMWPG2NbHjBkji8Wi33//vUj6r169uvr161ckfRUX13/meXX06FFZLBYtWrSowGsCAEcgSAFAEVi0aJEsFsstl+3btzu6xJvq16+fXZ1eXl66//771b17d33++efKzc0tkH62bt2qMWPG6Ny5cwVyvoJUHGv76/20ZcuWG/YbhqHAwEBZLBY9/vjjDqjw7p06dUrDhw9XzZo15eHhIU9PTzVs2FDjx48vVr8LAKWXi6MLAIDSZNy4cQoODr5he40aNRxQTd64ubnpww8/lCRdunRJx44d09dff63u3burVatW+vLLL+Xt7W1rv2HDBtN9bN26VWPHjlW/fv3k6+ub5+MuXbokF5fC/b+y29V28OBBOTk57r9Juru7a9myZWrevLnd9ri4OJ04cUJubm4Oquzu7Ny5Ux07dtSFCxfUt29fNWzYUJK0a9cuTZw4UfHx8fm6zwCgIBGkAKAIdejQQY0aNTJ1zJUrV5SbmytXV9cb9mVkZMjT0zPf9RiGoczMTJUtW/aWbVxcXNS3b1+7bePHj9fEiRM1cuRIDRgwQCtWrLDtu1mdBSk3N1fZ2dlyd3eXu7t7ofZ1J44OKh07dtTKlSs1Y8YMu0C5bNkyNWzYsMgecSxI586dU5cuXeTs7Kwff/xRNWvWtNv/3nvvacGCBQ6qDgD+D4/2AUAxcu09kvfff1/Tpk1TSEiI3NzclJiYaHv/JzExUb1791b58uVtIxFXrlzRu+++a2tfvXp1vfnmm8rKyrI7f/Xq1fX4449r/fr1atSokcqWLav58+fnq9Y33nhD7dq108qVK/XLL7/Ytt/sHamZM2eqTp068vDwUPny5dWoUSMtW7ZM0tX3ml577TVJUnBwsO2RtaNHj0q6+k5OVFSUli5dqjp16sjNzU3r1q2z7bvZ+zq///67nnnmGXl7e6tChQp69dVXlZmZecPnfLP3df56zjvVdrN3pH799Vc9/fTT8vPzk4eHh5o1a6b//ve/dm2+++47WSwWffrpp3rvvfdUrVo1ubu7q02bNjp8+PAtP/Pr9erVS3/88YdiY2Nt27Kzs/XZZ5+pd+/eNz0mIyNDw4YNU2BgoNzc3PTggw/q/fffl2EYdu2ysrI0ZMgQVapUSeXKldOTTz6pEydO3PScv/32m/75z3/K399fbm5uqlOnjj766KM8X8dfzZ8/X7/99pumTp16Q4iSJH9/f7399tv5OjcAFCRGpACgCKWlpd0wSmCxWFShQgW7bTExMcrMzNQLL7wgNzc3+fn52fY9/fTTCg0N1YQJE2z/+O3fv78WL16s7t27a9iwYdqxY4eio6N14MABrVq1yu7cBw8eVK9evfTiiy9qwIABevDBB/N9Pc8++6w2bNig2NhYPfDAAzdts2DBAg0aNEjdu3e3BZq9e/dqx44d6t27t7p27apffvlFn3zyiT744ANVrFhRklSpUiXbOTZt2qRPP/1UUVFRqlixoqpXr37bup555hlVr15d0dHR2r59u2bMmKE///xTS5YsMXV9eantr06dOqVHHnlEFy9e1KBBg1ShQgUtXrxYTz75pD777DN16dLFrv3EiRPl5OSk4cOHKy0tTZMmTVKfPn20Y8eOPNVXvXp1hYWF6ZNPPlGHDh0kSWvXrlVaWpp69uypGTNm2LU3DENPPvmkNm/erMjISD300ENav369XnvtNf3222/64IMPbG379++vjz/+WL1799YjjzyiTZs2qVOnTje95mbNmtkCb6VKlbR27VpFRkYqPT1dgwcPztO1XPPVV1+pbNmy6t69u6njAKDIGQCAQhcTE2NIuuni5uZma5eUlGRIMry9vY3Tp0/bnWP06NGGJKNXr1522xMSEgxJRv/+/e22Dx8+3JBkbNq0ybYtKCjIkGSsW7cuT3VHREQYnp6et9z/448/GpKMIUOG2La1bNnSaNmypW29c+fORp06dW7bz+TJkw1JRlJS0g37JBlOTk7G/v37b7pv9OjRtvVrn9GTTz5p1+6VV14xJBl79uwxDOP/PueYmJg7nvN2tQUFBRkRERG29cGDBxuSjP/93/+1bTt//rwRHBxsVK9e3cjJyTEMwzA2b95sSDJq1aplZGVl2dpOnz7dkGTs27fvhr7+6tr9tHPnTmPWrFlGuXLljIsXLxqGYRhPP/200bp1a1t9nTp1sh23evVqQ5Ixfvx4u/N1797dsFgsxuHDhw3D+L976pVXXrFr17t37xs+n8jISCMgIMD4/fff7dr27NnT8PHxsdV1u8/8r8qXL280aNDgtm0AoDjg0T4AKEKzZ89WbGys3bJ27dob2nXr1u2Wox4vvfSS3fo333wjSRo6dKjd9mHDhknSDY+VBQcHq3379vm+hr/y8vKSJJ0/f/6WbXx9fXXixAnt3Lkz3/20bNlStWvXznP7gQMH2q3/61//kvR/n1Vh+eabb9SkSRO7yR+8vLz0wgsv6OjRo0pMTLRr//zzz9u9U9aiRQtJVx8PzKtnnnlGly5d0po1a3T+/HmtWbPmlo/1ffPNN3J2dtagQYPstg8bNkyGYdjuxWuf0/Xtrh9dMgxDn3/+uZ544gkZhqHff//dtrRv315paWn64Ycf8nwtkpSenq5y5cqZOgYAHIFH+wCgCDVp0iRPk03cbGa/W+07duyYnJycbpj5z2q1ytfXV8eOHcvzuc26cOGCJN32H74jRozQt99+qyZNmqhGjRpq166devfurUcffTTP/ZitOTQ01G49JCRETk5OtnebCsuxY8fUtGnTG7bXqlXLtr9u3bq27ffdd59du/Lly0uS/vzzzzz3WalSJbVt21bLli3TxYsXlZOTc8vH4o4dO6YqVarc8Pv6a33X/tfJyUkhISF27a5/DPTMmTM6d+6c/vOf/+g///nPTfs8ffp0nq9Fkry9vW8bzAGguCBIAUAxdLtZ9G61z2Kx3PW5zfrpp58k3X769lq1aungwYNas2aN1q1bp88//1xz5szRqFGjNHbs2Dz1c7c1X//Z3OqzysnJuat+zHJ2dr7pduO6iR/upHfv3howYIBSU1PVoUMHU1PI341r3yPWt29fRURE3LRN/fr1TZ2zZs2aSkhIUHZ2dqHPAAkAd4NH+wCghAsKClJubq4OHTpkt/3UqVM6d+6cgoKCCq3v//mf/5HFYtE//vGP27bz9PRUjx49FBMTo+TkZHXq1EnvvfeebSa9vIbAvLr+szh8+LByc3Ntk1RcG/m5/otdrx+9M1tbUFCQDh48eMP2n3/+2ba/MHTp0kVOTk7avn37LR/ru9b/yZMnbxjxub6+a/fUkSNH7Npdf23XZvTLyclR27Ztb7pUrlzZ1LU88cQTunTpkj7//HNTxwFAUSNIAUAJ17FjR0nStGnT7LZPnTpVkm4601pBmDhxojZs2KAePXrc8CjdX/3xxx92666urqpdu7YMw9Dly5clyfZdWNcHm/yaPXu23frMmTMlyTaznbe3typWrKj4+Hi7dnPmzLnhXGZq69ixo77//ntt27bNti0jI0P/+c9/VL16dVPveZnh5eWluXPnasyYMXriiSduW19OTo5mzZplt/2DDz6QxWKxfT7X/vf6Wf+uv8ecnZ3VrVs3ff7557bRyb86c+aM6Wt56aWXFBAQoGHDhtlNq3/N6dOnNX78eNPnBYCCxqN9AFCE1q5da/uv/3/1yCOP6P7778/XORs0aKCIiAj95z//0blz59SyZUt9//33Wrx4sZ566im1bt36rmq+cuWKPv74Y0lSZmamjh07pq+++kp79+5V69atb/luzDXt2rWT1WrVo48+Kn9/fx04cECzZs1Sp06dbO/qNGzYUJL01ltvqWfPnipTpoyeeOKJfH/ZcFJSkp588kmFh4dr27Zttmm8GzRoYGvTv39/TZw4Uf3791ejRo0UHx9/03+4m6ntjTfesE1FPmjQIPn5+Wnx4sVKSkrS559/Lienwvvvl7d6tO6vnnjiCbVu3VpvvfWWjh49qgYNGmjDhg368ssvNXjwYNs7UQ899JB69eqlOXPmKC0tTY888og2btx40++4mjhxojZv3qymTZtqwIABql27ts6ePasffvhB3377rc6ePWvqOsqXL69Vq1apY8eOeuihh9S3b1/b7+CHH37QJ598orCwMFPnBIDCQJACgCI0atSom26PiYnJd5CSpA8//FD333+/Fi1apFWrVslqtWrkyJEaPXp0vs95TVZWlp599llJkoeHhypXrqyGDRtq1KhRtkfKbufFF1/U0qVLNXXqVF24cEHVqlXToEGD7L5UtXHjxnr33Xc1b948rVu3Trm5uUpKSsp3kFqxYoVGjRqlN954Qy4uLoqKitLkyZPt2owaNUpnzpzRZ599pk8//VQdOnTQ2rVrb3gUzUxt/v7+2rp1q0aMGKGZM2cqMzNT9evX19dff11oI4NmODk56auvvtKoUaO0YsUKxcTEqHr16po8ebJtlsdrPvroI1WqVElLly7V6tWr9dhjj+m///2vAgMD7dr5+/vr+++/17hx4/TFF19ozpw5qlChgurUqaN///vf+aqzadOm+umnnzR58mT997//1f/8z//IyclJtWrV0htvvKGoqKh8fwYAUFAshtk3WgEAAACglOMdKQAAAAAwiSAFAAAAACYRpAAAAADAJIIUAAAAAJhEkAIAAAAAkwhSAAAAAGAS3yMlKTc3VydPnlS5cuVksVgcXQ4AAAAABzEMQ+fPn1eVKlVu+12JBClJJ0+evOELBgEAAACUXsePH1e1atVuuZ8gJalcuXKSrn5Y3t7eDq4GAAAAgKOkp6crMDDQlhFuhSAl2R7n8/b2JkgBAAAAuOMrP0w2AQAAAAAmEaQAAAAAwCSCFAAAAACYxDtSAAAAwG0YhqErV64oJyfH0aWgADg7O8vFxeWuv/aIIAUAAADcQnZ2tlJSUnTx4kVHl4IC5OHhoYCAALm6uub7HAQpAAAA4CZyc3OVlJQkZ2dnValSRa6urnc9igHHMgxD2dnZOnPmjJKSkhQaGnrbL929HYcGqblz52ru3Lk6evSoJKlOnToaNWqUOnToIEnKzMzUsGHDtHz5cmVlZal9+/aaM2eO/P39bedITk7Wyy+/rM2bN8vLy0sRERGKjo6WiwsZEQAAAPmXnZ2t3NxcBQYGysPDw9HloICULVtWZcqU0bFjx5SdnS13d/d8ncehk01Uq1ZNEydO1O7du7Vr1y499thj6ty5s/bv3y9JGjJkiL7++mutXLlScXFxOnnypLp27Wo7PicnR506dVJ2dra2bt2qxYsXa9GiRRo1apSjLgkAAAD3mPyOWKD4KojfqcUwDKMAaikwfn5+mjx5srp3765KlSpp2bJl6t69uyTp559/Vq1atbRt2zY1a9ZMa9eu1eOPP66TJ0/aRqnmzZunESNG6MyZM3l+5jE9PV0+Pj5KS0vjC3kBAAAg6erTUUlJSQoODs73qAWKp9v9bvOaDYpNvM7JydHy5cuVkZGhsLAw7d69W5cvX1bbtm1tbWrWrKn77rtP27ZtkyRt27ZN9erVs3vUr3379kpPT7eNat1MVlaW0tPT7RYAAAAAyCuHv0i0b98+hYWFKTMzU15eXlq1apVq166thIQEubq6ytfX1669v7+/UlNTJUmpqal2Iera/mv7biU6Olpjx44t2AsBAABAqRG5aGeR9rewX+Mi7Q935vARqQcffFAJCQnasWOHXn75ZUVERCgxMbFQ+xw5cqTS0tJsy/Hjxwu1PwAAAKCoWCyW2y5jxoy5q3OvXr36ju3i4uL02GOPyc/PTx4eHgoNDVVERISys7Pz3Ff16tU1bdq0PLePjo6Ws7OzJk+enOdj7obDg5Srq6tq1Kihhg0bKjo6Wg0aNND06dNltVqVnZ2tc+fO2bU/deqUrFarJMlqterUqVM37L+271bc3Nzk7e1ttwAAAAD3gpSUFNsybdo0eXt7220bPnx4ofafmJio8PBwNWrUSPHx8dq3b59mzpwpV1fXQv1S448++kivv/66Pvroo0Lr468cHqSul5ubq6ysLDVs2FBlypTRxo0bbfsOHjyo5ORkhYWFSZLCwsK0b98+nT592tYmNjZW3t7eql27dpHXDgAAADia1Wq1LT4+PrJYLHbbli9frlq1asnd3V01a9bUnDlzbMdmZ2crKipKAQEBcnd3V1BQkKKjoyVdHSGSpC5dushisdjWr7dhwwZZrVZNmjRJdevWVUhIiMLDw7VgwQKVLVvW1m7Lli1q0aKFypYtq8DAQA0aNEgZGRmSpFatWunYsWMaMmSIbSTtduLi4nTp0iWNGzdO6enp2rp16118gnnj0CA1cuRIxcfH6+jRo9q3b59Gjhyp7777Tn369JGPj48iIyM1dOhQbd68Wbt379bzzz+vsLAwNWvWTJLUrl071a5dW88++6z27Nmj9evX6+2339bAgQPl5ubmyEsDAAAAip2lS5dq1KhReu+993TgwAFNmDBB77zzjhYvXixJmjFjhr766it9+umnOnjwoJYuXWoLTDt3Xn0vLCYmRikpKbb161mtVqWkpCg+Pv6WdRw5ckTh4eHq1q2b9u7dqxUrVmjLli2KioqSJH3xxReqVq2axo0bZxtJu52FCxeqV69eKlOmjHr16qWFCxea/WhMc+hkE6dPn9Zzzz2nlJQU+fj4qH79+lq/fr3+8Y9/SJI++OADOTk5qVu3bnZfyHuNs7Oz1qxZo5dffllhYWHy9PRURESExo0b56hLAgAAAIqt0aNHa8qUKbbvZg0ODlZiYqLmz5+viIgIJScnKzQ0VM2bN5fFYlFQUJDt2EqVKkmSfH19b/sazdNPP63169erZcuWslqtatasmdq0aaPnnnvO9kpNdHS0+vTpo8GDB0uSQkNDNWPGDLVs2VJz586Vn5+fnJ2dVa5cudv2JV2drvyzzz6zzezdt29ftWjRQtOnT5eXl1e+P6s7cWiQulNSdHd31+zZszV79uxbtgkKCtI333xT0KUBAAAA95SMjAwdOXJEkZGRGjBggG37lStX5OPjI0nq16+f/vGPf+jBBx9UeHi4Hn/8cbVr185UP87OzoqJidH48eO1adMm7dixQxMmTNC///1vff/99woICNCePXu0d+9eLV261HacYRjKzc1VUlKSatWqlef+PvnkE4WEhKhBgwaSpIceekhBQUFasWKFIiMjTdVuRrF7RwoAAABAwbtw4YIkacGCBUpISLAtP/30k7Zv3y5Jevjhh5WUlKR3331Xly5d0jPPPKPu3bvnq7+qVavq2Wef1axZs7R//35lZmZq3rx5tlpefPFFuzr27NmjQ4cOKSQkxFQ/Cxcu1P79++Xi4mJbEhMTC33SCYd/jxQAAACAwufv768qVaro119/VZ8+fW7ZztvbWz169FCPHj3UvXt3hYeH6+zZs/Lz81OZMmXyNfNe+fLlFRAQYJtM4uGHH1ZiYqJq1Khxy2PyMsvfvn37tGvXLn333Xfy8/OzbT979qxatWqln3/+WTVr1jRdb14QpIqhv37BG1++BgAAgIIyduxYDRo0SD4+PgoPD1dWVpZ27dqlP//8U0OHDtXUqVMVEBCgv/3tb3JyctLKlStltVrl6+sr6erMfRs3btSjjz4qNzc3lS9f/oY+5s+fr4SEBHXp0kUhISHKzMzUkiVLtH//fs2cOVOSNGLECDVr1kxRUVHq37+/PD09lZiYqNjYWM2aNcvWV3x8vHr27Ck3NzdVrFjxhr4WLlyoJk2a6O9///sN+xo3bqyFCxcW2vdKEaQAAAAAk0rqf+zu37+/PDw8NHnyZL322mvy9PRUvXr1bJM+lCtXTpMmTdKhQ4fk7Oysxo0b65tvvpGT09U3gqZMmaKhQ4dqwYIFqlq1qo4ePXpDH02aNNGWLVv00ksv6eTJk/Ly8lKdOnW0evVqtWzZUpJUv359xcXF6a233lKLFi1kGIZCQkLUo0cP23nGjRunF198USEhIcrKypJhGHb9ZGdn6+OPP9aIESNueq3dunXTlClTNGHCBJUpU6YAPj17FuP6ikqh9PR0+fj4KC0trVh8OS8jUgAAAI6XmZmppKQkBQcHy93d3dHloADd7neb12zAZBMAAAAAYBJBCgAAAABMIkgBAAAAgEkEKQAAAAAwiSAFAAAAACYRpAAAAADAJIIUAAAAAJhEkAIAAAAAkwhSAAAAAGCSi6MLAAAAAEqcZT2Ktr/eK4q2P9wRI1IAAADAPcRisdx2GTNmzF2de/Xq1XdsFxcXp8cee0x+fn7y8PBQaGioIiIilJ2dnee+qlevrmnTpuWp3bVrc3Z2VpUqVRQZGak///wzz33lB0EKAAAAuIekpKTYlmnTpsnb29tu2/Dhwwu1/8TERIWHh6tRo0aKj4/Xvn37NHPmTLm6uionJ6dQ+hw3bpxSUlKUnJyspUuXKj4+XoMGDSqUvq4hSAEAAAD3EKvValt8fHxksVjsti1fvly1atWSu7u7atasqTlz5tiOzc7OVlRUlAICAuTu7q6goCBFR0dLujryI0ldunSRxWKxrV9vw4YNslqtmjRpkurWrauQkBCFh4drwYIFKlu2rK3dli1b1KJFC5UtW1aBgYEaNGiQMjIyJEmtWrXSsWPHNGTIENto0+2UK1dOVqtVVatWVevWrRUREaEffvjhLj7FOyNIAQAAAKXE0qVLNWrUKL333ns6cOCAJkyYoHfeeUeLFy+WJM2YMUNfffWVPv30Ux08eFBLly61BaadO3dKkmJiYpSSkmJbv57ValVKSori4+NvWceRI0cUHh6ubt26ae/evVqxYoW2bNmiqKgoSdIXX3yhatWq2UaaUlJS8nyNv/32m77++ms1bdo0z8fkB5NNAAAAAKXE6NGjNWXKFHXt2lWSFBwcrMTERM2fP18RERFKTk5WaGiomjdvLovFoqCgINuxlSpVkiT5+vrKarXeso+nn35a69evV8uWLWW1WtWsWTO1adNGzz33nLy9vSVJ0dHR6tOnjwYPHixJCg0N1YwZM9SyZUvNnTtXfn5+cnZ2to003cmIESP09ttvKycnR5mZmWratKmmTp2a348pTxiRAgAAAEqBjIwMHTlyRJGRkfLy8rIt48eP15EjRyRJ/fr1U0JCgh588EENGjRIGzZsMN2Ps7OzYmJidOLECU2aNElVq1bVhAkTVKdOHdvI0p49e7Ro0SK7Otq3b6/c3FwlJSWZ7vO1115TQkKC9u7dq40bN0qSOnXqVGjvZEmMSAEAAAClwoULFyRJCxYsuOGxN2dnZ0nSww8/rKSkJK1du1bffvutnnnmGbVt21afffaZ6f6qVq2qZ599Vs8++6zeffddPfDAA5o3b57Gjh2rCxcu6MUXX7zphBD33Xef6b4qVqyoGjVqSLo6ujVt2jSFhYVp8+bNatu2renz5QVBCgAAACgF/P39VaVKFf3666/q06fPLdt5e3urR48e6tGjh7p3767w8HCdPXtWfn5+KlOmTL5GecqXL6+AgADbZBIPP/ywEhMTbeHnZu5mlr9rwfDSpUv5Oj4vCFIAAABAKTF27FgNGjRIPj4+Cg8PV1ZWlnbt2qU///xTQ4cO1dSpUxUQEKC//e1vcnJy0sqVK2W1WuXr6yvp6sx9Gzdu1KOPPio3NzeVL1/+hj7mz5+vhIQEdenSRSEhIcrMzNSSJUu0f/9+zZw5U9LVd5qaNWumqKgo9e/fX56enkpMTFRsbKxmzZpl6ys+Pl49e/aUm5ubKlaseMvrOn/+vFJTU2UYho4fP67XX39dlSpV0iOPPFLwH+L/jyAFAAAAmNV7haMryJf+/fvLw8NDkydP1muvvSZPT0/Vq1fPNulDuXLlNGnSJB06dEjOzs5q3LixvvnmGzk5XZ1aYcqUKRo6dKgWLFigqlWr6ujRozf00aRJE23ZskUvvfSSTp48KS8vL9WpU0erV69Wy5YtJUn169dXXFyc3nrrLbVo0UKGYSgkJEQ9evSwnWfcuHF68cUXFRISoqysLBmGccvrGjVqlEaNGiXp6qQYjRs31oYNG1ShQoUC+uRuZDFuV1EpkZ6eLh8fH6WlpdlmEnGkyEX/N5Xkwn6NHVgJAABA6ZWZmamkpCQFBwfL3d3d0eWgAN3ud5vXbMCsfQAAAABgEkEKAAAAAEwiSAEAAACASQQpAAAAADCJIAUAAAAAJhGkAAAAAMAkghQAAAAAmESQAgAAAACTCFIAAAAAYJKLowsAAAAASpqojVFF2t+sNrOKtD/cGSNSAAAAwD3EYrHcdhkzZsxdnXv16tV3bBcXF6fHHntMfn5+8vDwUGhoqCIiIpSdnZ3nvqpXr65p06blqe2PP/6op59+Wv7+/nJ3d1doaKgGDBigX375Jc/9mUWQAgAAAO4hKSkptmXatGny9va22zZ8+PBC7T8xMVHh4eFq1KiR4uPjtW/fPs2cOVOurq7Kyckp8P7WrFmjZs2aKSsrS0uXLtWBAwf08ccfy8fHR++8806B93cNQQoAAAC4h1itVtvi4+Mji8Vit2358uWqVauW3N3dVbNmTc2ZM8d2bHZ2tqKiohQQECB3d3cFBQUpOjpa0tURIknq0qWLLBaLbf16GzZskNVq1aRJk1S3bl2FhIQoPDxcCxYsUNmyZW3ttmzZohYtWqhs2bIKDAzUoEGDlJGRIUlq1aqVjh07piFDhthG0m7m4sWLev7559WxY0d99dVXatu2rYKDg9W0aVO9//77mj9/fgF8ojdHkAIAAABKiaVLl2rUqFF67733dODAAU2YMEHvvPOOFi9eLEmaMWOGvvrqK3366ac6ePCgli5dagtMO3fulCTFxMQoJSXFtn49q9WqlJQUxcfH37KOI0eOKDw8XN26ddPevXu1YsUKbdmyRVFRV989++KLL1StWjWNGzfONpJ2M+vXr9fvv/+u119//ab7fX198/Kx5AuTTQAAAAClxOjRozVlyhR17dpVkhQcHKzExETNnz9fERERSk5OVmhoqJo3by6LxaKgoCDbsZUqVZJ0NZxYrdZb9vH0009r/fr1atmypaxWq5o1a6Y2bdroueeek7e3tyQpOjpaffr00eDBgyVJoaGhmjFjhlq2bKm5c+fKz89Pzs7OKleu3G37OnTokCSpZs2ad/W55AcjUgAAAEApkJGRoSNHjigyMlJeXl62Zfz48Tpy5IgkqV+/fkpISNCDDz6oQYMGacOGDab7cXZ2VkxMjE6cOKFJkyapatWqmjBhgurUqWMbWdqzZ48WLVpkV0f79u2Vm5urpKSkPPdlGIbp+goKQQoAAAAoBS5cuCBJWrBggRISEmzLTz/9pO3bt0uSHn74YSUlJendd9/VpUuX9Mwzz6h79+756q9q1ap69tlnNWvWLO3fv1+ZmZmaN2+erZYXX3zRro49e/bo0KFDCgkJyXMfDzzwgCTp559/zleNd4NH+wAAAIBSwN/fX1WqVNGvv/6qPn363LKdt7e3evTooR49eqh79+4KDw/X2bNn5efnpzJlyuRr5r3y5csrICDANpnEww8/rMTERNWoUeOWx+Rllr927dqpYsWKmjRpklatWnXD/nPnzhXae1IEKQAAAKCUGDt2rAYNGiQfHx+Fh4crKytLu3bt0p9//qmhQ4dq6tSpCggI0N/+9jc5OTlp5cqVslqttjBSvXp1bdy4UY8++qjc3NxUvnz5G/qYP3++EhIS1KVLF4WEhCgzM1NLlizR/v37NXPmTEnSiBEj1KxZM0VFRal///7y9PRUYmKiYmNjNWvWLFtf8fHx6tmzp9zc3FSxYsUb+vL09NSHH36op59+Wk8++aQGDRqkGjVq6Pfff9enn36q5ORkLV++vFA+S4IUAAAAYNKsNrMcXUK+9O/fXx4eHpo8ebJee+01eXp6ql69erZJH8qVK6dJkybp0KFDcnZ2VuPGjfXNN9/IyenqG0FTpkzR0KFDtWDBAlWtWlVHjx69oY8mTZpoy5Yteumll3Ty5El5eXmpTp06Wr16tVq2bClJql+/vuLi4vTWW2+pRYsWMgxDISEh6tGjh+0848aN04svvqiQkBBlZWXd8n2ozp07a+vWrYqOjlbv3r2Vnp6uwMBAPfbYYxo/fnzBfoB/YTEc+YZWMZGeni4fHx+lpaXZZhJxpMhF/zeV5MJ+jR1YCQAAQOmVmZmppKQkBQcHy93d3dHloADd7neb12zAZBMAAAAAYBJBCgAAAABMIkgBAAAAgEkEKQAAAAAwiSAFAAAA3AZzs917CuJ3SpACAAAAbqJMmTKSpIsXLzq4EhS0a7/Ta7/j/OB7pAAAAICbcHZ2lq+vr06fPi1J8vDwkMVicXBVuBuGYejixYs6ffq0fH195ezsnO9zEaQAAACAW7BarZJkC1O4N/j6+tp+t/lFkAIAAABuwWKxKCAgQJUrV9bly5cdXQ4KQJkyZe5qJOoaghQAAABwB87OzgXyj2/cO5hsAgAAAABMIkgBAAAAgEkEKQAAAAAwiSAFAAAAACYRpAAAAADAJIIUAAAAAJhEkAIAAAAAkxwapKKjo9W4cWOVK1dOlStX1lNPPaWDBw/atWnVqpUsFovd8tJLL9m1SU5OVqdOneTh4aHKlSvrtdde05UrV4ryUgAAAACUIg79Qt64uDgNHDhQjRs31pUrV/Tmm2+qXbt2SkxMlKenp63dgAEDNG7cONu6h4eH7eecnBx16tRJVqtVW7duVUpKip577jmVKVNGEyZMKNLrAQAAAFA6ODRIrVu3zm590aJFqly5snbv3q2///3vtu0eHh6yWq03PceGDRuUmJiob7/9Vv7+/nrooYf07rvvasSIERozZoxcXV0L9RoAAAAAlD7F6h2ptLQ0SZKfn5/d9qVLl6pixYqqW7euRo4cqYsXL9r2bdu2TfXq1ZO/v79tW/v27ZWenq79+/fftJ+srCylp6fbLQAAAACQVw4dkfqr3NxcDR48WI8++qjq1q1r2967d28FBQWpSpUq2rt3r0aMGKGDBw/qiy++kCSlpqbahShJtvXU1NSb9hUdHa2xY8cW0pUAAAAAuNcVmyA1cOBA/fTTT9qyZYvd9hdeeMH2c7169RQQEKA2bdroyJEjCgkJyVdfI0eO1NChQ23r6enpCgwMzF/hAAAAAEqdYvFoX1RUlNasWaPNmzerWrVqt23btGlTSdLhw4clSVarVadOnbJrc239Vu9Vubm5ydvb224BAAAAgLxyaJAyDENRUVFatWqVNm3apODg4Dsek5CQIEkKCAiQJIWFhWnfvn06ffq0rU1sbKy8vb1Vu3btQqkbAAAAQOnm0Ef7Bg4cqGXLlunLL79UuXLlbO80+fj4qGzZsjpy5IiWLVumjh07qkKFCtq7d6+GDBmiv//976pfv74kqV27dqpdu7aeffZZTZo0SampqXr77bc1cOBAubm5OfLyAAAAANyjHDoiNXfuXKWlpalVq1YKCAiwLStWrJAkubq66ttvv1W7du1Us2ZNDRs2TN26ddPXX39tO4ezs7PWrFkjZ2dnhYWFqW/fvnruuefsvncKAAAAAAqSQ0ekDMO47f7AwEDFxcXd8TxBQUH65ptvCqosAAAAALitYjHZBAAAAACUJAQpAAAAADCJIAUAAAAAJhGkAAAAAMAkghQAAAAAmESQAgAAAACTCFIAAAAAYBJBCgAAAABMIkgBAAAAgEkEKQAAAAAwiSAFAAAAACYRpAAAAADAJIIUAAAAAJhEkAIAAAAAkwhSAAAAAGASQQoAAAAATCJIAQAAAIBJBCkAAAAAMIkgBQAAAAAmEaQAAAAAwCSCFAAAAACYRJACAAAAAJMIUgAAAABgEkEKAAAAAEwiSAEAAACASQQpAAAAADCJIAUAAAAAJhGkAAAAAMAkghQAAAAAmESQAgAAAACTCFIAAAAAYBJBCgAAAABMIkgBAAAAgEkEKQAAAAAwiSAFAAAAACYRpAAAAADAJIIUAAAAAJhEkAIAAAAAkwhSAAAAAGASQQoAAAAATCJIAQAAAIBJBCkAAAAAMIkgBQAAAAAmEaQAAAAAwCSCFAAAAACYRJACAAAAAJMIUgAAAABgEkEKAAAAAEwiSAEAAACASQQpAAAAADCJIAUAAAAAJhGkAAAAAMAkghQAAAAAmESQAgAAAACTCFIAAAAAYBJBCgAAAABMIkgBAAAAgEkEKQAAAAAwiSAFAAAAACYRpAAAAADAJIIUAAAAAJhEkAIAAAAAkwhSAAAAAGASQQoAAAAATHJokIqOjlbjxo1Vrlw5Va5cWU899ZQOHjxo1yYzM1MDBw5UhQoV5OXlpW7duunUqVN2bZKTk9WpUyd5eHiocuXKeu2113TlypWivBQAAAAApYhDg1RcXJwGDhyo7du3KzY2VpcvX1a7du2UkZFhazNkyBB9/fXXWrlypeLi4nTy5El17drVtj8nJ0edOnVSdna2tm7dqsWLF2vRokUaNWqUIy4JAAAAQClgMQzDcHQR15w5c0aVK1dWXFyc/v73vystLU2VKlXSsmXL1L17d0nSzz//rFq1amnbtm1q1qyZ1q5dq8cff1wnT56Uv7+/JGnevHkaMWKEzpw5I1dX1xv6ycrKUlZWlm09PT1dgYGBSktLk7e3d9Fc7G1ELtpp+3lhv8YOrAQAAAAoXdLT0+Xj43PHbFCs3pFKS0uTJPn5+UmSdu/ercuXL6tt27a2NjVr1tR9992nbdu2SZK2bdumevXq2UKUJLVv317p6enav3//TfuJjo6Wj4+PbQkMDCysSwIAAABwDyo2QSo3N1eDBw/Wo48+qrp160qSUlNT5erqKl9fX7u2/v7+Sk1NtbX5a4i6tv/avpsZOXKk0tLSbMvx48cL+GoAAAAA3MtcHF3ANQMHDtRPP/2kLVu2FHpfbm5ucnNzK/R+AAAAANybisWIVFRUlNasWaPNmzerWrVqtu1Wq1XZ2dk6d+6cXftTp07JarXa2lw/i9+19WttAAAAAKAgOTRIGYahqKgorVq1Sps2bVJwcLDd/oYNG6pMmTLauHGjbdvBgweVnJyssLAwSVJYWJj27dun06dP29rExsbK29tbtWvXLpoLAQAAAFCqOPTRvoEDB2rZsmX68ssvVa5cOds7TT4+Pipbtqx8fHwUGRmpoUOHys/PT97e3vrXv/6lsLAwNWvWTJLUrl071a5dW88++6wmTZqk1NRUvf322xo4cCCP7wEAAAAoFA4NUnPnzpUktWrVym57TEyM+vXrJ0n64IMP5OTkpG7duikrK0vt27fXnDlzbG2dnZ21Zs0avfzyywoLC5Onp6ciIiI0bty4oroMAAAAAKVMsfoeKUfJ61zxRYXvkQIAAAAco0R+jxQAAAAAlAQEKQAAAAAwiSAFAAAAACYRpAAAAADAJIIUAAAAAJhEkAIAAAAAkwhSAAAAAGASQQoAAAAATCJIAQAAAIBJBCkAAAAAMIkgBQAAAAAmEaQAAAAAwCSCFAAAAACYRJACAAAAAJMIUgAAAABgEkEKAAAAAEwiSAEAAACASQQpAAAAADCJIAUAAAAAJhGkAAAAAMAkghQAAAAAmESQAgAAAACTCFIAAAAAYBJBCgAAAABMIkgBAAAAgEkEKQAAAAAwiSAFAAAAACYRpAAAAADApHwFqV9//bWg6wAAAACAEiNfQapGjRpq3bq1Pv74Y2VmZhZ0TQAAAABQrOUrSP3www+qX7++hg4dKqvVqhdffFHff/99QdcGAAAAAMVSvoLUQw89pOnTp+vkyZP66KOPlJKSoubNm6tu3bqaOnWqzpw5U9B1AgAAAECxcVeTTbi4uKhr165auXKl/v3vf+vw4cMaPny4AgMD9dxzzyklJaWg6gQAAACAYuOugtSuXbv0yiuvKCAgQFOnTtXw4cN15MgRxcbG6uTJk+rcuXNB1QkAAAAAxYZLfg6aOnWqYmJidPDgQXXs2FFLlixRx44d5eR0NZcFBwdr0aJFql69ekHWCgAAAADFQr6C1Ny5c/XPf/5T/fr1U0BAwE3bVK5cWQsXLryr4gAAAACgOMpXkDp06NAd27i6uioiIiI/pwcAAACAYi1f70jFxMRo5cqVN2xfuXKlFi9efNdFAQAAAEBxlq8gFR0drYoVK96wvXLlypowYcJdFwUAAAAAxVm+glRycrKCg4Nv2B4UFKTk5OS7LgoAAAAAirN8BanKlStr7969N2zfs2ePKlSocNdFAQAAAEBxlq8g1atXLw0aNEibN29WTk6OcnJytGnTJr366qvq2bNnQdcIAAAAAMVKvmbte/fdd3X06FG1adNGLi5XT5Gbm6vnnnuOd6QAAAAA3PPyFaRcXV21YsUKvfvuu9qzZ4/Kli2revXqKSgoqKDrAwAAAIBiJ19B6poHHnhADzzwQEHVAgAAAAAlQr6CVE5OjhYtWqSNGzfq9OnTys3Ntdu/adOmAikOAAAAAIqjfAWpV199VYsWLVKnTp1Ut25dWSyWgq4LAAAAAIqtfAWp5cuX69NPP1XHjh0Luh4AAAAAKPbyNf25q6uratSoUdC1AAAAAECJkK8gNWzYME2fPl2GYRR0PQAAAABQ7OXr0b4tW7Zo8+bNWrt2rerUqaMyZcrY7f/iiy8KpDgAAAAAKI7yFaR8fX3VpUuXgq4FAAAAAEqEfAWpmJiYgq4DAAAAAEqMfL0jJUlXrlzRt99+q/nz5+v8+fOSpJMnT+rChQsFVhwAAAAAFEf5GpE6duyYwsPDlZycrKysLP3jH/9QuXLl9O9//1tZWVmaN29eQdcJAAAAAMVGvkakXn31VTVq1Eh//vmnypYta9vepUsXbdy4scCKAwAAAIDiKF8jUv/7v/+rrVu3ytXV1W579erV9dtvvxVIYQAAAABQXOVrRCo3N1c5OTk3bD9x4oTKlSt310UBAAAAQHGWryDVrl07TZs2zbZusVh04cIFjR49Wh07diyo2gAAAACgWMrXo31TpkxR+/btVbt2bWVmZqp37946dOiQKlasqE8++aSgawQAAACAYiVfQapatWras2ePli9frr179+rChQuKjIxUnz597CafAAAAAIB7Ub6ClCS5uLiob9++BVkLAAAAAJQI+QpSS5Ysue3+5557Ll/FAAAAAEBJkK8g9eqrr9qtX758WRcvXpSrq6s8PDzyHKTi4+M1efJk7d69WykpKVq1apWeeuop2/5+/fpp8eLFdse0b99e69ats62fPXtW//rXv/T111/LyclJ3bp10/Tp0+Xl5ZWfSwMAAACAO8rXrH1//vmn3XLhwgUdPHhQzZs3NzXZREZGhho0aKDZs2ffsk14eLhSUlJsy/Xn79Onj/bv36/Y2FitWbNG8fHxeuGFF/JzWQAAAACQJ/l+R+p6oaGhmjhxovr27auff/45T8d06NBBHTp0uG0bNzc3Wa3Wm+47cOCA1q1bp507d6pRo0aSpJkzZ6pjx456//33VaVKFXMXAQAAAAB5kK8RqVtxcXHRyZMnC/KU+u6771S5cmU9+OCDevnll/XHH3/Y9m3btk2+vr62ECVJbdu2lZOTk3bs2HHLc2ZlZSk9Pd1uAQAAAIC8yteI1FdffWW3bhiGUlJSNGvWLD366KMFUph09bG+rl27Kjg4WEeOHNGbb76pDh06aNu2bXJ2dlZqaqoqV65sd4yLi4v8/PyUmpp6y/NGR0dr7NixBVYnAAAAgNIlX0HqrxNCSJLFYlGlSpX02GOPacqUKQVRlySpZ8+etp/r1aun+vXrKyQkRN99953atGmT7/OOHDlSQ4cOta2np6crMDDwrmoFAAAAUHrkK0jl5uYWdB15cv/996tixYo6fPiw2rRpI6vVqtOnT9u1uXLlis6ePXvL96qkq+9dubm5FXa5AAAAAO5RBfqOVGE7ceKE/vjjDwUEBEiSwsLCdO7cOe3evdvWZtOmTcrNzVXTpk0dVSYAAACAe1y+RqT++ljcnUydOvWW+y5cuKDDhw/b1pOSkpSQkCA/Pz/5+flp7Nix6tatm6xWq44cOaLXX39dNWrUUPv27SVJtWrVUnh4uAYMGKB58+bp8uXLioqKUs+ePZmxDwAAAEChyVeQ+vHHH/Xjjz/q8uXLevDBByVJv/zyi5ydnfXwww/b2lksltueZ9euXWrdurVt/VpAi4iI0Ny5c7V3714tXrxY586dU5UqVdSuXTu9++67do/lLV26VFFRUWrTpo3tC3lnzJiRn8sCAAAAgDzJV5B64oknVK5cOS1evFjly5eXdPVLep9//nm1aNFCw4YNy9N5WrVqJcMwbrl//fr1dzyHn5+fli1blrfCAQAAAKAA5OsdqSlTpig6OtoWoiSpfPnyGj9+fIHO2gcAAAAAxVG+glR6errOnDlzw/YzZ87o/Pnzd10UAAAAABRn+QpSXbp00fPPP68vvvhCJ06c0IkTJ/T5558rMjJSXbt2LegaAQAAAKBYydc7UvPmzdPw4cPVu3dvXb58+eqJXFwUGRmpyZMnF2iBAAAAAFDc5CtIeXh4aM6cOZo8ebKOHDkiSQoJCZGnp2eBFgcAAAAAxdFdfSFvSkqKUlJSFBoaKk9Pz9vOwAcAAAAA94p8Bak//vhDbdq00QMPPKCOHTsqJSVFkhQZGZnnqc8BAAAAoKTKV5AaMmSIypQpo+TkZHl4eNi29+jRQ+vWrSuw4gAAAACgOMrXO1IbNmzQ+vXrVa1aNbvtoaGhOnbsWIEUBgAAAADFVb5GpDIyMuxGoq45e/as3Nzc7rooAAAAACjO8hWkWrRooSVLltjWLRaLcnNzNWnSJLVu3brAigMAAACA4ihfj/ZNmjRJbdq00a5du5Sdna3XX39d+/fv19mzZ/X//t//K+gaAQAAAKBYydeIVN26dfXLL7+oefPm6ty5szIyMtS1a1f9+OOPCgkJKegaAQAAAKBYMT0idfnyZYWHh2vevHl66623CqMmAAAAACjWTI9IlSlTRnv37i2MWgAAAACgRMjXo319+/bVwoULC7oWAAAAACgR8jXZxJUrV/TRRx/p22+/VcOGDeXp6Wm3f+rUqQVSHAAAAAAUR6aC1K+//qrq1avrp59+0sMPPyxJ+uWXX+zaWCyWgqsOAAAAAIohU0EqNDRUKSkp2rx5sySpR48emjFjhvz9/QulOAAAAAAojky9I2UYht362rVrlZGRUaAFAQAAAEBxl6/JJq65PlgBAAAAQGlgKkhZLJYb3oHinSgAAAAApY2pd6QMw1C/fv3k5uYmScrMzNRLL710w6x9X3zxRcFVCAAAAADFjKkgFRERYbfet2/fAi0GAAAAAEoCU0EqJiamsOoAAAAAgBLjriabAAAAAIDSiCAFAAAAACYRpAAAAADAJFPvSKHoRS7aabe+sF9jB1UCAAAA4BpGpAAAAADAJIIUAAAAAJhEkAIAAAAAkwhSAAAAAGASQQoAAAAATCJIAQAAAIBJBCkAAAAAMIkgBQAAAAAmEaQAAAAAwCSCFAAAAACYRJACAAAAAJMIUgAAAABgEkEKAAAAAEwiSAEAAACASQQpAAAAADCJIAUAAAAAJhGkAAAAAMAkghQAAAAAmESQKqGiNkY5ugQAAACg1CJIAQAAAIBJBCkAAAAAMIkgBQAAAAAmEaQAAAAAwCSCFAAAAACYRJACAAAAAJMIUgAAAABgEkEKAAAAAEwiSAEAAACASQQpAAAAADCJIAUAAAAAJhGkAAAAAMAkghQAAAAAmESQAgAAAACTCFIAAAAAYBJBCgAAAABMcmiQio+P1xNPPKEqVarIYrFo9erVdvsNw9CoUaMUEBCgsmXLqm3btjp06JBdm7Nnz6pPnz7y9vaWr6+vIiMjdeHChSK8CgAAAACljUODVEZGhho0aKDZs2ffdP+kSZM0Y8YMzZs3Tzt27JCnp6fat2+vzMxMW5s+ffpo//79io2N1Zo1axQfH68XXnihqC4BAAAAQCnk4sjOO3TooA4dOtx0n2EYmjZtmt5++2117txZkrRkyRL5+/tr9erV6tmzpw4cOKB169Zp586datSokSRp5syZ6tixo95//31VqVKlyK4FAAAAQOlRbN+RSkpKUmpqqtq2bWvb5uPjo6ZNm2rbtm2SpG3btsnX19cWoiSpbdu2cnJy0o4dO2557qysLKWnp9stAAAAAJBXxTZIpaamSpL8/f3ttvv7+9v2paamqnLlynb7XVxc5OfnZ2tzM9HR0fLx8bEtgYGBBVx94YraGOXoEgAAAIBSrdgGqcI0cuRIpaWl2Zbjx487uiQAAAAAJUixDVJWq1WSdOrUKbvtp06dsu2zWq06ffq03f4rV67o7NmztjY34+bmJm9vb7sFAAAAAPKq2Aap4OBgWa1Wbdy40bYtPT1dO3bsUFhYmCQpLCxM586d0+7du21tNm3apNzcXDVt2rTIawYAAABQOjh01r4LFy7o8OHDtvWkpCQlJCTIz89P9913nwYPHqzx48crNDRUwcHBeuedd1SlShU99dRTkqRatWopPDxcAwYM0Lx583T58mVFRUWpZ8+ezNgHAAAAoNA4NEjt2rVLrVu3tq0PHTpUkhQREaFFixbp9ddfV0ZGhl544QWdO3dOzZs317p16+Tu7m47ZunSpYqKilKbNm3k5OSkbt26acaMGUV+LQAAAABKD4cGqVatWskwjFvut1gsGjdunMaNG3fLNn5+flq2bFlhlAcAAAAAN1Vs35ECAAAAgOKKIAUAAAAAJjn00T4UrMhFO+3WF/Zr7KBKAAAAgHsbI1IAAAAAYBJBCgAAAABM4tG+Eu76x/kAAAAAFD5GpAAAAADAJIIUAAAAAJhEkAIAAAAAkwhSAAAAAGASQQoAAAAATCJIAQAAAIBJBCkAAAAAMInvkSphIhft1C86Z/sZAAAAQNFjRAoAAAAATCJIAQAAAIBJBCkAAAAAMIkgVcL8oumOLgEAAAAo9QhSAAAAAGASQQoAAAAATCJIAQAAAIBJBCkAAAAAMIkgBQAAAAAmEaQAAAAAwCQXRxeAwhO5aKft54X9GjuwEgAAAODewogUAAAAAJhEkAIAAAAAkwhSAAAAAGASQQoAAAAATCJIAQAAAIBJBCkAAAAAMIkgBQAAAAAmEaQAAAAAwCSCFAAAAACYRJACAAAAAJMIUgAAAABgEkEKAAAAAEwiSAEAAACASQQpAAAAADCJIAUAAAAAJhGkAAAAAMAkghQAAAAAmESQAgAAAACTCFIAAAAAYBJBCgAAAABMIkgBAAAAgEkEKQAAAAAwiSBVgv2i6Y4uAQAAACiVCFIAAAAAYBJBCgAAAABMcnF0ASgakYt22q0v7NfYQZUAAAAAJR8jUgAAAABgEkEKAAAAAEwiSAEAAACASQQpAAAAADCJIAUAAAAAJhGkAAAAAMAkghQAAAAAmESQAgAAAACTCFIAAAAAYBJBCgAAAABMIkgBAAAAgEkEqRLkF013dAkAAAAARJACAAAAANOKdZAaM2aMLBaL3VKzZk3b/szMTA0cOFAVKlSQl5eXunXrplOnTjmwYgAAAAClQbEOUpJUp04dpaSk2JYtW7bY9g0ZMkRff/21Vq5cqbi4OJ08eVJdu3Z1YLUAAAAASgMXRxdwJy4uLrJarTdsT0tL08KFC7Vs2TI99thjkqSYmBjVqlVL27dvV7NmzYq6VAAAAAClRLEfkTp06JCqVKmi+++/X3369FFycrIkaffu3bp8+bLatm1ra1uzZk3dd9992rZt223PmZWVpfT0dLsFAAAAAPKqWAeppk2batGiRVq3bp3mzp2rpKQktWjRQufPn1dqaqpcXV3l6+trd4y/v79SU1Nve97o6Gj5+PjYlsDAwEK8CgAAAAD3mmL9aF+HDh1sP9evX19NmzZVUFCQPv30U5UtWzbf5x05cqSGDh1qW09PTydMAQAAAMizYj0idT1fX1898MADOnz4sKxWq7Kzs3Xu3Dm7NqdOnbrpO1V/5ebmJm9vb7sFAAAAAPKqRAWpCxcu6MiRIwoICFDDhg1VpkwZbdy40bb/4MGDSk5OVlhYmAOrBAAAAHCvK9aP9g0fPlxPPPGEgoKCdPLkSY0ePVrOzs7q1auXfHx8FBkZqaFDh8rPz0/e3t7617/+pbCwMGbsAwAAAFCoinWQOnHihHr16qU//vhDlSpVUvPmzbV9+3ZVqlRJkvTBBx/IyclJ3bp1U1ZWltq3b685c+Y4uGoAAAAA97piHaSWL19+2/3u7u6aPXu2Zs+eXUQVAQAAAEAJe0cKAAAAAIoDghQAAAAAmESQAgAAAACTCFIAAAAAYBJBCgAAAABMIkgBAAAAgEkEKQAAAAAwiSAFAAAAACYRpAAAAADAJIIUAAAAAJhEkAIAAAAAkwhSAAAAAGASQQoAAAAATCJIAQAAAIBJBCkAAAAAMIkgBQAAAAAmEaQAAAAAwCSCFAAAAACYRJACAAAAAJMIUgAAAABgEkEKAAAAAEwiSAEAAACASQQpAAAAADCJIAUAAAAAJhGkSrhfNN3RJQAAAAClDkEKAAAAAEwiSAEAAACASQQpAAAAADCJIAUAAAAAJhGkAAAAAMAkF0cXAMeIXLTT9vPCfo0dWAkAAABQ8jAiBQAAAAAmEaQAAAAAwCSCFAAAAACYRJACAAAAAJMIUgAAAABgEkEKAAAAAEwiSAEAAACASQQpAAAAADCJIHUP+EXTHV0CAAAAUKoQpAAAAADAJIJUCcGoEwAAAFB8EKQAAAAAwCSCFAAAAACYRJACAAAAAJMIUgAAAABgEkEKAAAAAEwiSAEAAACASQQpAAAAADCJIAUAAAAAJhGkAAAAAMAkF0cXgNIpctFO288L+zV2YCUAAACAeYxIAQAAAIBJBCkAAAAAMIlH+2D3mJ3Eo3YAAADAnTAiBQAAAAAmEaQAAAAAwCQe7YMpPAYIAAAAMCIFAAAAAKYxIoUbXD/qlNe2jE4BAACgtGBE6h7xi6Y7ugQAAACg1CBIAQAAAIBJPNoHh7vdo4T3yuOCpXGSDh77BAAA9zKCFArM7cKCmfeu8nrO6/fn9x/rZkKOmWssinpKinvxmgAAQOl2zzzaN3v2bFWvXl3u7u5q2rSpvv/+e0eXBAAAAOAedU+MSK1YsUJDhw7VvHnz1LRpU02bNk3t27fXwYMHVblyZUeXBwfJ7yODhTWyVNjM9GfmmvI66ldYo455PbYojsuve3VEjsc3ARRH9+rfuSh+7okgNXXqVA0YMEDPP/+8JGnevHn673//q48++khvvPGGg6srOr9ouh7Qq44uo1AVdTgprD7N/AO0MN4hK6hzFvZnIxXM/wHeTZ23+10VRljP7/UX1COqpUFpv34zinNYLs61AcVVcfv7r6T/OS7xQSo7O1u7d+/WyJEjbducnJzUtm1bbdu27abHZGVlKSsry7aelpYmSUpPTy/cYvMo+9KFG7Zd0eW8Hasbj3WUv36eN7umwuzvTn0WVG0FcR4zdRdG/2bquZtab9XHnc5xuz+Xt6vtVu3MnP9OfRTUPXa7tnn9e8nMcfnt43bnKS5/f+ZFQV1/aVCcf8fFuTY4Fn/Gb624fTbF9c/xtVoMw7htO4txpxbF3MmTJ1W1alVt3bpVYWFhtu2vv/664uLitGPHjhuOGTNmjMaOHVuUZQIAAAAoQY4fP65q1ardcn+JH5HKj5EjR2ro0KG29dzcXJ09e1YVKlSQxWJxYGVXE3BgYKCOHz8ub29vh9aC0ol7EI7GPQhH4x6Eo3EPOpZhGDp//ryqVKly23YlPkhVrFhRzs7OOnXqlN32U6dOyWq13vQYNzc3ubm52W3z9fUtrBLzxdvbmz84cCjuQTga9yAcjXsQjsY96Dg+Pj53bFPipz93dXVVw4YNtXHjRtu23Nxcbdy40e5RPwAAAAAoKCV+REqShg4dqoiICDVq1EhNmjTRtGnTlJGRYZvFDwAAAAAK0j0RpHr06KEzZ85o1KhRSk1N1UMPPaR169bJ39/f0aWZ5ubmptGjR9/w6CFQVLgH4Wjcg3A07kE4GvdgyVDiZ+0DAAAAgKJW4t+RAgAAAICiRpACAAAAAJMIUgAAAABgEkEKAAAAAEwiSBUjs2fPVvXq1eXu7q6mTZvq+++/d3RJuEfEx8friSeeUJUqVWSxWLR69Wq7/YZhaNSoUQoICFDZsmXVtm1bHTp0yK7N2bNn1adPH3l7e8vX11eRkZG6cOFCEV4FSrLo6Gg1btxY5cqVU+XKlfXUU0/p4MGDdm0yMzM1cOBAVahQQV5eXurWrdsNX7aenJysTp06ycPDQ5UrV9Zrr72mK1euFOWloISaO3eu6tevb/uC07CwMK1du9a2n/sPRWnixImyWCwaPHiwbRv3YMlDkComVqxYoaFDh2r06NH64Ycf1KBBA7Vv316nT592dGm4B2RkZKhBgwaaPXv2TfdPmjRJM2bM0Lx587Rjxw55enqqffv2yszMtLXp06eP9u/fr9jYWK1Zs0bx8fF64YUXiuoSUMLFxcVp4MCB2r59u2JjY3X58mW1a9dOGRkZtjZDhgzR119/rZUrVyouLk4nT55U165dbftzcnLUqVMnZWdna+vWrVq8eLEWLVqkUaNGOeKSUMJUq1ZNEydO1O7du7Vr1y499thj6ty5s/bv3y+J+w9FZ+fOnZo/f77q169vt517sAQyUCw0adLEGDhwoG09JyfHqFKlihEdHe3AqnAvkmSsWrXKtp6bm2tYrVZj8uTJtm3nzp0z3NzcjE8++cQwDMNITEw0JBk7d+60tVm7dq1hsViM3377rchqx73j9OnThiQjLi7OMIyr91yZMmWMlStX2tocOHDAkGRs27bNMAzD+OabbwwnJycjNTXV1mbu3LmGt7e3kZWVVbQXgHtC+fLljQ8//JD7D0Xm/PnzRmhoqBEbG2u0bNnSePXVVw3D4O/AkooRqWIgOztbu3fvVtu2bW3bnJyc1LZtW23bts2BlaE0SEpKUmpqqt395+Pjo6ZNm9ruv23btsnX11eNGjWytWnbtq2cnJy0Y8eOIq8ZJV9aWpokyc/PT5K0e/duXb582e4+rFmzpu677z67+7BevXp2X7bevn17paen20YVgLzIycnR8uXLlZGRobCwMO4/FJmBAweqU6dOdveaxN+BJZWLowuA9PvvvysnJ8fuD4Yk+fv76+eff3ZQVSgtUlNTJemm99+1fampqapcubLdfhcXF/n5+dnaAHmVm5urwYMH69FHH1XdunUlXb3HXF1d5evra9f2+vvwZvfptX3Anezbt09hYWHKzMyUl5eXVq1apdq1ayshIYH7D4Vu+fLl+uGHH7Rz584b9vF3YMlEkAIAFKmBAwfqp59+0pYtWxxdCkqZBx98UAkJCUpLS9Nnn32miIgIxcXFOboslALHjx/Xq6++qtjYWLm7uzu6HBQQHu0rBipWrChnZ+cbZmY5deqUrFarg6pCaXHtHrvd/We1Wm+Y+OTKlSs6e/Ys9yhMiYqK0po1a7R582ZVq1bNtt1qtSo7O1vnzp2za3/9fXiz+/TaPuBOXF1dVaNGDTVs2FDR0dFq0KCBpk+fzv2HQrd7926dPn1aDz/8sFxcXOTi4qK4uDjNmDFDLi4u8vf35x4sgQhSxYCrq6saNmyojRs32rbl5uZq48aNCgsLc2BlKA2Cg4NltVrt7r/09HTt2LHDdv+FhYXp3Llz2r17t63Npk2blJubq6ZNmxZ5zSh5DMNQVFSUVq1apU2bNik4ONhuf8OGDVWmTBm7+/DgwYNKTk62uw/37dtnF+pjY2Pl7e2t2rVrF82F4J6Sm5urrKws7j8UujZt2mjfvn1KSEiwLY0aNVKfPn1sP3MPlkCOnu0CVy1fvtxwc3MzFi1aZCQmJhovvPCC4evrazczC5Bf58+fN3788Ufjxx9/NCQZU6dONX788Ufj2LFjhmEYxsSJEw1fX1/jyy+/NPbu3Wt07tzZCA4ONi5dumQ7R3h4uPG3v/3N2LFjh7FlyxYjNDTU6NWrl6MuCSXMyy+/bPj4+BjfffedkZKSYlsuXrxoa/PSSy8Z9913n7Fp0yZj165dRlhYmBEWFmbbf+XKFaNu3bpGu3btjISEBGPdunVGpUqVjJEjRzriklDCvPHGG0ZcXJyRlJRk7N2713jjjTcMi8VibNiwwTAM7j8Uvb/O2mcY3IMlEUGqGJk5c6Zx3333Ga6urkaTJk2M7du3O7ok3CM2b95sSLphiYiIMAzj6hTo77zzjuHv72+4ubkZbdq0MQ4ePGh3jj/++MPo1auX4eXlZXh7exvPP/+8cf78eQdcDUqim91/koyYmBhbm0uXLhmvvPKKUb58ecPDw8Po0qWLkZKSYneeo0ePGh06dDDKli1rVKxY0Rg2bJhx+fLlIr4alET//Oc/jaCgIMPV1dWoVKmS0aZNG1uIMgzuPxS964MU92DJYzEMw3DMWBgAAAAAlEy8IwUAAAAAJhGkAAAAAMAkghQAAAAAmESQAgAAAACTCFIAAAAAYBJBCgAAAABMIkgBAAAAgEkEKQAAAAAwiSAFAAAAACYRpAAA95R+/frJYrHcsISHhzu6NADAPcTF0QUAAFDQwsPDFRMTY7fNzc3tpm0vX76sMmXK2G3Lzs6Wq6ur6X7zexwAoORhRAoAcM9xc3OT1Wq1W8qXLy9Jslgsmjt3rp588kl5enrqvffe05gxY/TQQw/pww8/VHBwsNzd3SVJycnJ6ty5s7y8vOTt7a1nnnlGp06dsvVzq+MAAPc+ghQAoNQZM2aMunTpon379umf//ynJOnw4cP6/PPP9cUXXyghIUG5ubnq3Lmzzp49q7i4OMXGxurXX39Vjx497M51/XEAgNKBR/sAAPecNWvWyMvLy27bm2++qTfffFOS1Lt3bz3//PN2+7Ozs7VkyRJVqlRJkhQbG6t9+/YpKSlJgYGBkqQlS5aoTp062rlzpxo3bnzT4wAApQNBCgBwz2ndurXmzp1rt83Pz8/2c6NGjW44JigoyC4MHThwQIGBgbYQJUm1a9eWr6+vDhw4YAtS1x8HACgdCFIAgHuOp6enatSocdv9edmW174AAKUP70gBAHATtWrV0vHjx3X8+HHbtsTERJ07d061a9d2YGUAgOKAESkAwD0nKytLqampdttcXFxUsWLFPJ+jbdu2qlevnvr06aNp06bpypUreuWVV9SyZcubPhoIAChdGJECANxz1q1bp4CAALulefPmps5hsVj05Zdfqnz58vr73/+utm3b6v7779eKFSsKqWoAQEliMQzDcHQRAAAAAFCSMCIFAAAAACYRpAAAAADAJIIUAAAAAJhEkAIAAAAAkwhSAAAAAGASQQoAAAAATCJIAQAAAIBJBCkAAAAAMIkgBQAAAAAmEaQAAAAAwCSCFAAAAACY9P8BuotSnh4RhXcAAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Error Distribution (MODEL C) (Test Set A): Mean = 16.15674668223636 Standard Deviation = 38.162560581973146\n", + "Error Distribution (MODEL C) (Test Set B): Mean = 15.774197648432851 Standard Deviation = 0.03993390753228745\n", + "Error Distribution (MODEL C) (Test Set C): Mean = 5.699794881410673 Standard Deviation = 1.1902720177778146\n" + ] + } + ], + "source": [ + "import numpy as np\n", + "import pandas as pd\n", + "from sklearn.linear_model import LinearRegression\n", + "from sklearn.metrics import mean_squared_error\n", + "import matplotlib.pyplot as plt\n", + "\n", + "ds_trainA = pd.read_csv('/content/drive/MyDrive/Q2/train_set_A.csv')\n", + "ds_trainB = pd.read_csv('/content/drive/MyDrive/Q2/train_set_B.csv')\n", + "ds_trainC = pd.read_csv('/content/drive/MyDrive/Q2/train_set_C.csv')\n", + "ds_testA = pd.read_csv('/content/drive/MyDrive/Q2/test_set_A.csv')\n", + "ds_testB = pd.read_csv('/content/drive/MyDrive/Q2/test_set_B.csv')\n", + "ds_testC = pd.read_csv('/content/drive/MyDrive/Q2/test_set_C.csv')\n", + "\n", + "x_train_A, y_train_A = ds_trainA[['x']], ds_trainA['y']\n", + "x_train_B, y_train_B = ds_trainB[['x']], ds_trainB['y']\n", + "x_train_C, y_train_C = ds_trainC[['x']], ds_trainC['y']\n", + "x_test_A, y_test_A = ds_testA[['x']], ds_testA['y']\n", + "x_test_B, y_test_B = ds_testB[['x']], ds_testB['y']\n", + "x_test_C, y_test_C = ds_testC[['x']], ds_testC['y']\n", + "\n", + "model_A = LinearRegression().fit(x_train_A, y_train_A)\n", + "model_B = LinearRegression().fit(x_train_B, y_train_B)\n", + "model_C = LinearRegression().fit(x_train_C, y_train_C)\n", + "\n", + "\n", + "# now since it's not necessary for train set A and test set A to come from the same distribution\n", + "# in order to find the actual data distribution, we'll have to see which train and test set pair\n", + "# has the least MSE, that pair would be the one coming from the actual distribution\n", + "\n", + "mse_AA= mean_squared_error(y_test_A, model_A.predict(x_test_A))\n", + "mse_AB= mean_squared_error(y_test_B, model_A.predict(x_test_A))\n", + "mse_AC= mean_squared_error(y_test_C, model_A.predict(x_test_A))\n", + "\n", + "mse_BA = mean_squared_error(y_test_A, model_B.predict(x_test_B))\n", + "mse_BB = mean_squared_error(y_test_B, model_B.predict(x_test_B))\n", + "mse_BC = mean_squared_error(y_test_C, model_B.predict(x_test_B))\n", + "\n", + "mse_CA = mean_squared_error(y_test_A, model_C.predict(x_test_C))\n", + "mse_CB = mean_squared_error(y_test_B, model_C.predict(x_test_C))\n", + "mse_CC = mean_squared_error(y_test_C, model_C.predict(x_test_C))\n", + "\n", + "print(\"Mean Squared Error (MODEL A) (Test Set A):\", mse_AA)\n", + "print(\"Mean Squared Error (MODEL A) (Test Set B):\", mse_AB)\n", + "print(\"Mean Squared Error (MODEL A) (Test Set C):\", mse_AC)\n", + "\n", + "print(\"Mean Squared Error (MODEL B) (Test Set A):\", mse_BA)\n", + "print(\"Mean Squared Error (MODEL B) (Test Set B):\", mse_BB)\n", + "print(\"Mean Squared Error (MODEL B) (Test Set C):\", mse_BC)\n", + "\n", + "print(\"Mean Squared Error (MODEL C) (Test Set A):\", mse_CA)\n", + "print(\"Mean Squared Error (MODEL C) (Test Set B):\", mse_CB)\n", + "print(\"Mean Squared Error (MODEL C) (Test Set C):\", mse_CC)\n", + "\n", + "errors=np.array([mse_AA,mse_AB,mse_AC,mse_BA,mse_BB,mse_BC,mse_CA,mse_CB,mse_CC])\n", + "min_error=np.min(errors)\n", + "print(\"Minimum error: \",min_error)\n", + "\n", + "# clearly it's train and test set B that comes from the original distribution\n", + "\n", + "# now let's plot stuff\n", + "\n", + "error_AA = np.power((y_test_A - model_A.predict(x_test_A)),2)\n", + "error_AB = np.power((y_test_B - model_A.predict(x_test_B)),2)\n", + "error_AC = np.power((y_test_C - model_A.predict(x_test_C)),2)\n", + "\n", + "error_BA = np.power((y_test_A - model_B.predict(x_test_A)),2)\n", + "error_BB = np.power((y_test_B - model_B.predict(x_test_B)),2)\n", + "error_BC = np.power((y_test_C - model_B.predict(x_test_C)),2)\n", + "\n", + "error_CA = np.power((y_test_A - model_C.predict(x_test_A)),2)\n", + "error_CB = np.power((y_test_B - model_C.predict(x_test_B)),2)\n", + "error_CC = np.power((y_test_C - model_C.predict(x_test_C)),2)\n", + "\n", + "\n", + "# model A\n", + "plt.figure(figsize=(10, 6))\n", + "plt.hist(error_AA, bins='auto', alpha=0.7, label='Test Set A')\n", + "plt.hist(error_AB, bins='auto', alpha=0.7, label='Test Set B')\n", + "plt.hist(error_AC, bins='auto', alpha=0.7, label='Test Set C')\n", + "plt.xlabel('Error')\n", + "plt.ylabel('Frequency')\n", + "plt.title('Error Distribution Model A')\n", + "plt.legend()\n", + "plt.show()\n", + "\n", + "distribution_AA = np.mean(error_AA), np.std(error_AA)\n", + "distribution_AB = np.mean(error_AB), np.std(error_AB)\n", + "distribution_AC = np.mean(error_AC), np.std(error_AC)\n", + "\n", + "print(\"Error Distribution (MODEL A) (Test Set A): Mean =\", distribution_AA[0], \"Standard Deviation =\", distribution_AA[1])\n", + "print(\"Error Distribution (MODEL A) (Test Set B): Mean =\", distribution_AB[0], \"Standard Deviation =\", distribution_AB[1])\n", + "print(\"Error Distribution (MODEL A) (Test Set C): Mean =\", distribution_AC[0], \"Standard Deviation =\", distribution_AC[1])\n", + "\n", + "# model B\n", + "\n", + "plt.figure(figsize=(10, 6))\n", + "plt.hist(error_BA, bins='auto', alpha=0.7, label='Test Set A')\n", + "plt.hist(error_BB, bins='auto', alpha=0.7, label='Test Set B')\n", + "plt.hist(error_BC, bins='auto', alpha=0.7, label='Test Set C')\n", + "plt.xlabel('Error')\n", + "plt.ylabel('Frequency')\n", + "plt.title('Error Distribution Model B')\n", + "plt.legend()\n", + "plt.show()\n", + "\n", + "distribution_BA = np.mean(error_BA), np.std(error_BA)\n", + "distribution_BB = np.mean(error_BB), np.std(error_BB)\n", + "distribution_BC = np.mean(error_BC), np.std(error_BC)\n", + "\n", + "print(\"Error Distribution (MODEL B) (Test Set A): Mean =\", distribution_BA[0], \"Standard Deviation =\", distribution_BA[1])\n", + "print(\"Error Distribution (MODEL B) (Test Set B): Mean =\", distribution_BB[0], \"Standard Deviation =\", distribution_BB[1])\n", + "print(\"Error Distribution (MODEL B) (Test Set C): Mean =\", distribution_BC[0], \"Standard Deviation =\", distribution_BC[1])\n", + "\n", + "# model C\n", + "\n", + "plt.figure(figsize=(10, 6))\n", + "plt.hist(error_CA, bins='auto', alpha=0.7, label='Test Set A')\n", + "plt.hist(error_CB, bins='auto', alpha=0.7, label='Test Set B')\n", + "plt.hist(error_CC, bins='auto', alpha=0.7, label='Test Set C')\n", + "plt.xlabel('Error')\n", + "plt.ylabel('Frequency')\n", + "plt.title('Error Distribution Model C')\n", + "plt.legend()\n", + "plt.show()\n", + "\n", + "distribution_CA = np.mean(error_CA), np.std(error_CA)\n", + "distribution_CB = np.mean(error_CB), np.std(error_CB)\n", + "distribution_CC = np.mean(error_CC), np.std(error_CC)\n", + "\n", + "print(\"Error Distribution (MODEL C) (Test Set A): Mean =\", distribution_CA[0], \"Standard Deviation =\", distribution_CA[1])\n", + "print(\"Error Distribution (MODEL C) (Test Set B): Mean =\", distribution_CB[0], \"Standard Deviation =\", distribution_CB[1])\n", + "print(\"Error Distribution (MODEL C) (Test Set C): Mean =\", distribution_CC[0], \"Standard Deviation =\", distribution_CC[1])\n", + "\n", + "# by comparing each of the parameters, we can see how mean and standard deviation are also lowest for train and test set B pair.\n", + "# test set A always has the highest error for each model\n", + "# further, test set A gives least error on model C, test set B on model B and test set C on model A\n", + "# we can see the second least error is for model A on test set C\n", + "\n", + "# We can observe these plots and see how they're different, however I can't see what vital information we can obtain from this" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "h-IYfEDyUQ7e" + }, + "source": [ + "#### __Question 3__\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 125, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "E9xIHA5vhr7y", + "outputId": "3d385032-ad88-4d29-b05f-7e0c33c19529" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "True weights are, w0= 4 and w1= 3\n", + "Maximum iterations reached.\n", + "Weights estimated by Batch Gradient Descent are, w0= 3.114909961958382 and w1= 22.272814185384274\n", + "Converged in 1 iterations.\n", + "Weights estimated by Fisher Scoring are, w0= 2.501166396975581 and w1= 0.3212582200477815\n" + ] + } + ], + "source": [ + "from os import waitstatus_to_exitcode\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "from sklearn.linear_model import LogisticRegression\n", + "\n", + "\n", + "n=1000\n", + "f=1\n", + "x=np.random.normal(0, 1, (n,f))\n", + "w0=4\n", + "w1=3\n", + "w=np.array([w0,w1])\n", + "x_b=np.ones(n)\n", + "x_new=np.column_stack((x_b,x))\n", + "z=np.dot(x_new,w)\n", + "p=1 / (1 + np.exp(-z))\n", + "y=np.random.binomial(1, p)\n", + "print(\"True weights are, w0= \",w0,\" and w1= \",w1)\n", + "\n", + "\n", + "def loglikelihood(w0,w1,x,y):\n", + " LL=0\n", + " m=x.shape[0]\n", + " for i in range(m):\n", + " z=w0+w1*x[i]\n", + " p=1/(1+np.exp(-z))\n", + " LL+=y[i]*(np.log(p))+(1-y[i])*(np.log(1-p))\n", + " return LL\n", + "\n", + "def cost_function(w0,w1,x, y):\n", + " m=x.shape[0]\n", + " C=0\n", + " for i in range(m):\n", + " z=w0+w1*x[i]\n", + " p=1 / (1 + np.exp(-z))\n", + " C+= np.power((p-y[i]),2)\n", + " return C\n", + "\n", + "def batchgradientdescent(w0,w1,x,y,l,i,c):\n", + " converged=False\n", + " i_current=0\n", + " m=x.shape[0]\n", + " m=x.shape[0]\n", + " J=0\n", + " for i in range(m):\n", + " z__=w0+w1*x[i]\n", + " p__=1 / (1 + np.exp(-z__))\n", + " J+= np.power((p__-y[i]),2)\n", + "\n", + " while not converged:\n", + "\n", + " w=np.array([w0,w1])\n", + " x_b=np.ones(m)\n", + " x_new=np.column_stack((x_b,x))\n", + " z=np.dot(x_new,w)\n", + " p=1/(1+np.exp(-z))\n", + "\n", + " update0=np.sum((1/m)*(p-y))\n", + " update1=np.sum((1/m)*((p-y)*x))\n", + "\n", + " temp_w0=w0-l*update0\n", + " temp_w1=w1-l*update1\n", + "\n", + " w0=temp_w0\n", + " w1=temp_w1\n", + "\n", + " e=0\n", + " for i in range(m):\n", + " z_=w0+w1*x[i]\n", + " p_=1 / (1 + np.exp(-z_))\n", + " e+= np.power((p_-y[i]),2)\n", + " if abs(J-e)<=c:\n", + " print(\"Converged in: \",i_current,\" iterations.\")\n", + " converged=True\n", + "\n", + " J=e\n", + " i_current+=1\n", + "\n", + " if i_current==i:\n", + " print(\"Maximum iterations reached.\")\n", + " converged=True\n", + " return w0,w1\n", + "\n", + "bgd_estimated_w0,bgd_estimated_w1=batchgradientdescent(0,0,x,y,0.01,5000,0.0001)\n", + "print(\"Weights estimated by Batch Gradient Descent are, w0= \",bgd_estimated_w0,\" and w1= \",bgd_estimated_w1)\n", + "\n", + "# I can't figure out why it's giving such a shitty results? :( Sometimes it's giving a reasonable value and sometimes it's just absurd (especially w1), there's no consistency\n", + "\n", + "def fisherscoring(X,Y,c):\n", + "\n", + " # desgin matrix X\n", + "\n", + " # response vector is y as usual, written as Y\n", + "\n", + " # probability vector p\n", + " w_initial=np.matrix(np.ones(np.shape(X)[1])).T\n", + " z = np.dot(X,w_initial)\n", + " p_initial=1/(1+np.exp(-z))\n", + "\n", + " # weight matrix\n", + " W_= (np.array(p_initial) * np.array(1 - p_initial))\n", + " W= np.matrix(np.diag(W_[:, 0]))\n", + "\n", + " # c is the threshold value above which iterations continue\n", + " c=0.001\n", + "\n", + " #First derivative of Log-Likelihood with respect to each weight\n", + " U=(X.T)*(Y-p_initial)\n", + "\n", + " # Second derivative of Log-Likelihood with respect to each weight\n", + " H=(X.T)*(W)*X\n", + "\n", + " i_current=0\n", + " while True:\n", + " i_current+=1\n", + " w_current=w_initial+ (np.linalg.inv(H))*U\n", + "\n", + " if (np.abs(np.array(w_current)-np.array(w_initial)) < c).all:\n", + " model_parameters = w_current\n", + " break\n", + " else:\n", + " z=np.dot(X,w_current)\n", + " p_initial=1/(1+np.exp(-z))\n", + " W_ = (np.array(p_initial) * np.array(1 - p_initial))\n", + " W = np.matrix(np.diag(W_[:, 0]))\n", + " H= X.T * W * X\n", + " U= X.T * (Y - p_initial)\n", + " w_initial= w_current\n", + " print(\"Converged in \",i_current,\" iterations.\")\n", + " return np.array(model_parameters)\n", + "\n", + "fs_estimated_w0,fs_estimated_w1=fisherscoring(x_new,y,0.001)\n", + "print(\"Weights estimated by Fisher Scoring are, w0= \",fs_estimated_w0[0],\" and w1= \",fs_estimated_w1[0])\n", + "\n", + "# again there's some issue in the value of w1 and weirdly, there's only one iteration happening? :(\n", + "# gave up after a while cause everything seems absurd now ugh" + ] + }, + { + "cell_type": "markdown", + "source": [ + "#### __Question 4__\n", + "\n", + "a. The sample size n is extremely large, and the number of predictors p is small.\n", + "\n", + "The performance of a flexible model would be better. This is because n is extremely large, so we have a lot of information to learn from. This is good because the problem of overfitting reduces. Overfitting usually occurs when we use a flexible model for small n. That's not the case here, though. We have sufficient data to ensure we don't overfit. Besides the number of predictors (independent variables is small) so we have a simpler predictor-response relationship and more generalisation.\n", + "For a flexible model, we usually have high variance and low bias. High variance will occur here if there is a very non-linear relationship between predictor (input) and the outcome (response). But with smaller number of features, we can expect a simpler model mostly, so the variance shouldn't be very high.\n", + "\n", + "b. The number of predictors p is extremely large, and the number of observations n is small.\n", + "\n", + "Now, here an inflexible model is better.\n", + "Small n means the risk of overfitting is high with a flexible model. Plus, the number of predictors is extremely large, so that increases the risk because we have noise in the data. So, it's better we have an inflexible model such that we can have at least some generalisation.\n", + "Inflexible model implies lower variance and high number of predictors implies the possibility of a complex relationship between response and predictors so we might have low bias. In case there is simple relation (as should be for an inflexible model), we can expect higher bias.\n", + "\n", + "c. The relationship between the predictors and response is highly non-linear.\n", + "\n", + "A flexible model is better since it can capture the intricacies of the non-linear relationship hence give more accurate results. An inflexible model will miss out most details. However, depending on the sample size, we might see overfitting of data.\n", + "Highly flexible model means lower bias but higher variance." + ], + "metadata": { + "id": "FWwl6GCEARkb" + } + } + ], + "metadata": { + "colab": { + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3", + "name": "python3" + }, + "language_info": { + "name": "python" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} \ No newline at end of file diff --git a/assMath/ass2a/2A_Anupriya.ipynb b/assMath/ass2a/2A_Anupriya.ipynb new file mode 100644 index 00000000..55c657f6 --- /dev/null +++ b/assMath/ass2a/2A_Anupriya.ipynb @@ -0,0 +1,572 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "provenance": [] + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "language_info": { + "name": "python" + } + }, + "cells": [ + { + "cell_type": "markdown", + "source": [ + "## **Assignment 2A**\n" + ], + "metadata": { + "id": "Nn59ntocyMfK" + } + }, + { + "cell_type": "markdown", + "source": [ + "### __Question 1__\n", + "\n", + "The posterior, likelihood and prior are related according to,\n", + "\n", + "$ P(\\theta | y) = \\frac{P(y | \\theta) P(\\theta)}{P(y)} $\n", + "\n", + "Let A denote that Alice is working, B denote that Bob is working and $d$ denote the data that the boss has collected.\n", + "\n", + "$ \\lambda_a (=10) $ is the average number of tickets Alice collects and $ \\lambda_b (=15) $ is the average number of tickets Bob collects and note that the number of tickets $ X$ that they collect is modelled by a possion distribution,\n", + "\n", + "$ P(X=x)= \\frac{\\lambda ^x e^{-\\lambda}}{x!}$\n", + "\n", + "Now, odds that Alice is working in place of Bob = $O(A)=\\frac{P(A)}{P(B)}=\\frac{1}{10}$\n", + "\n", + "Thus, $P(A)=\\frac{1}{11}$ and $P(B)=\\frac{10}{11}$\n", + "\n", + "Further, we need the posterior odds that Alice is filling in for Bob, that is,\n", + "\n", + "$O($Alice works|data$)=\\frac{P(Alice\\;works|data)}{P(Bob\\;works|data)}=\\frac{P(A|d)}{P(B|d)}$\n", + "\n", + "Here,\n", + "\n", + "$P(A|d)=\\frac{P(d|A).P(A)}{P(d)}=\\frac{\\prod\\limits_{i=1}^{5}\\frac{\\lambda_a ^{x_i} e^{-\\lambda}}{x_i!}.P(A)}{P(d)} = \\frac{\\frac{10^{12+10+11+4+11} e^{-50}}{12! 10! 11! 4! 11!}}{P(d)}.\\frac{1}{11}$\n", + "\n", + "$P(B|d)=\\frac{P(d|B). P(B)}{P(d)}= \\frac{\\prod\\limits_{i=1}^{5}\\frac{\\lambda_b ^{x_i} e^{-\\lambda}}{x_i!}.P(B)}{P(d)} =\\frac{ \\frac{15^{12+10+11+4+11} e^{-75}}{12! 10! 11! 4! 11!}}{P(d)}. \\frac{10}{11}$\n", + "\n", + "$O($Alice works|data$)=\\frac{P(d|A)}{P(d|B)}\\frac{P(A)}{P(B)} = (\\frac{10}{15})^{48}e^{25} (\\frac{1}{10}) ≈ 25.409$" + ], + "metadata": { + "id": "mFqla4Vxeys9" + } + }, + { + "cell_type": "markdown", + "source": [ + "### __Question 2__\n", + "\n", + "#### __Part (a)__\n", + "According to the question,\n", + "\n", + "$f(\\theta)\\sim N(5,9) \\implies P(\\theta)= \\frac{1}{3\\sqrt{2\\pi}}e^{\\frac{{-(\\theta-5)^2}}{18}}$ ...prior\n", + "\n", + "$f(x|\\theta) \\sim N(\\theta,4) \\implies P(x|\\theta)=\\frac{1}{2\\sqrt{2\\pi}}e^{\\frac{{-(x-\\theta)^2}}{8}}$ ...likelihood\n", + "\n", + "Where x=6, hence,\n", + "\n", + "$P(x=6|\\theta)= \\frac{1}{2\\sqrt{2\\pi}}e^{\\frac{{-(6-\\theta)^2}}{8}} $\n", + "\n", + "Then from the usual Bayesian update table,\n", + "\n", + "Posterior, i.e., $f(\\theta|x)=f(x|\\theta).f(\\theta)=\\frac{1}{12\\pi}(e^{-\\frac{(\\theta-5)^2}{18}}.e^{\\frac{-(6-\\theta)^2}{8}}) = \\frac{1}{12\\pi}(e^{{\\frac{-(\\theta-5)^2}{18}}-{\\frac{(6-\\theta)^2}{8}}})$\n", + "\n", + "We need to modify the posterior to obtain what the mean and variance will be for the Normal distribution,\n", + "\n", + "$\\frac{(\\theta-5)^2}{18}+ \\frac{(6-\\theta)^2}{8}= \\frac{1}{2}(\\frac{\\theta^2-10\\theta+25}{9} + \\frac{\\theta^2-12\\theta+36}{4})$\n", + "$= \\frac{1}{2}(\\frac{13\\theta^2-148\\theta+424}{36})$\n", + "$=\\frac{1}{2}(13\\frac{(\\theta^2-\\frac{74}{13})^2+36}{36}) $\n", + "$=\\frac{1}{2}(\\frac{(\\theta^2-\\frac{74}{13})^2+ 36}{\\frac{36}{13}})$\n", + "\n", + "Hence,\n", + "$f(\\theta|x) \\propto e^{-\\frac{1}{2}(\\frac{(\\theta^2-\\frac{74}{13})^2}{\\frac{36}{13}})}$\n", + "\n", + "$\\therefore f(\\theta|x)\\sim N(\\frac{74}{13},\\frac{36}{13})$" + ], + "metadata": { + "id": "-O6Dk3i4ywGa" + } + }, + { + "cell_type": "markdown", + "source": [ + "#### __Part (b)__\n", + "\n", + "Using the formulas given to us,\n", + "\n", + "$\\sigma^2 = 4$\n", + "\n", + "$\\mu_{prior}=5$\n", + "\n", + "$\\sigma_{prior}^2=9$\n", + "\n", + "$\\bar{x}=6$\n", + "\n", + "$n=4$\n", + "\n", + "We get,\n", + "\n", + "$a=\\frac{1}{\\sigma_{prior}^2}=\\frac{1}{9}$\n", + "\n", + "$b=\\frac{n}{\\sigma^2}=1$\n", + "\n", + "$\\mu_{post}=\\frac{{a\\mu_{prior}+b\\bar{x}}}{a+b}=\\frac{\\frac{5}{9}+6}{\\frac{1}{9}+1}=5.9$\n", + "\n", + "$\\sigma_{post}^2=\\frac{1}{a+b}=\\frac{1}{\\frac{1}{9}+1}=0.9$\n", + "\n", + "Thus, the posterior on theta, $f(\\theta|x)\\sim N(5.9,0.9)$\n", + "\n", + "The plot showing the prior and posterior is-" + ], + "metadata": { + "id": "pJyTh7MjFFL_" + } + }, + { + "cell_type": "code", + "source": [ + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "from scipy.stats import norm\n", + "x = np.arange(-10, 30, 0.0001)\n", + "plt.plot(x, norm.pdf(x, 5, 3), label='μ: 5, σ^2: 9')\n", + "plt.plot(x, norm.pdf(x, 5.9, 0.948683298), label='μ:5.9, σ^2: 0.9')\n", + "\n", + "plt.legend()" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 447 + }, + "id": "ZX6hDNLZI-7n", + "outputId": "a5595ae1-6759-4fe3-c96a-466e4857f80d" + }, + "execution_count": null, + "outputs": [ + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "" + ] + }, + "metadata": {}, + "execution_count": 8 + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "
" + ], + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAiMAAAGdCAYAAADAAnMpAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAABQk0lEQVR4nO3deVzUdf4H8NfMwAw3iJwigvct5AGRa2mSWFZ2rh2bR61mZYfUVrSb2onmka35y47VbLU0W+1Uy0gsk7JQvKVEEEW5PAbkmIGZz++PYUZQ0BmYme93htfz8ZhmmPke72+ovPhcX4UQQoCIiIhIIkqpCyAiIqL2jWGEiIiIJMUwQkRERJJiGCEiIiJJMYwQERGRpBhGiIiISFIMI0RERCQphhEiIiKSlIfUBVjDaDTi5MmT8Pf3h0KhkLocIiIisoIQApWVlejUqROUypbbP1wijJw8eRLR0dFSl0FEREStcPz4cXTu3LnFz10ijPj7+wMwXUxAQIDE1RAREZE1KioqEB0dbfk53hKXCCPmrpmAgACGESIiIhdzpSEWHMBKREREkmIYISIiIkkxjBAREZGkXGLMCBERtZ7BYEBdXZ3UZZAbUqlU8PDwaPOyGwwjRERu7Pz58zhx4gSEEFKXQm7Kx8cHkZGRUKvVrT4GwwgRkZsyGAw4ceIEfHx8EBoaykUjya6EENDr9SgrK0N+fj569ux52YXNLodhhIjITdXV1UEIgdDQUHh7e0tdDrkhb29veHp64tixY9Dr9fDy8mrVcTiAlYjIzbFFhBypta0hTY5hhzqIiIiIWo1hhIiIyE42bNgADw8P9OrVC6WlpVKX4zIYRoiIyC2NHDkSCoWiyWP69OkOO9/WrVtx3333Yc6cOQgLC8PYsWNRUVHRZJuCggI89NBD6Nq1K7y9vdG9e3fMnj0ber3e5vN9+umniI+Ph4+PD2JiYjB//nx7XYrTcQArERG5ralTp+Lll1+2fO3j4+OQ82RnZ+P222/Hm2++ienTp2PmzJkYP348xo8fj82bN0Oj0QAADh8+DKPRiHfffRc9evTA/v37MXXqVFRVVWHBggVWn2/Tpk24//77sWTJEowZMwaHDh3C1KlT4e3tjRkzZjjkGh2JLSNE7qD8CLDtDaD0sNSVELVZbGwsFi9e3OS9+Ph4zJkzx+Zj+fj4ICIiwvKw9WarBoMBL7zwAqKioqBSqZq0skyePBkAkJubi5tvvhlLliyxtLz4+vrim2++gb+/P+69914YDAYAwNixY7FixQqMGTMG3bp1w6233opnnnkG69evt6mu//73v7jtttswffp0dOvWDePGjUNaWhrmzZvnkmvKsGWEyNXVVgArxgJVZcCvy4DHswHvDlJXRTIkhEBNnUGSc3t7quw6q2fy5MkoKChAZmbmZbdbvXo1Vq1ahYiICNxyyy148cUXbWodWb58ORYtWoQlS5Zg5MiR2Lx5M2bOnImHH34Yf/vb3wAAvXv3xqlTpy7ZV6PR4Msvv7ziObRaLYKDg5u8p1AosGLFCkvguZhOp7vkOry9vXHixAkcO3YMsbGx1l2gTDCMELm6PWtMQQQAqk8De9YCVzuuX5xcV02dAf1mfSvJuQ++nAIftf1+5ERGRsJoNF52m/vuuw8xMTHo1KkT9u7di+eeew65ubk2tUIsW7YMU6ZMwdSpUwEAPXv2xPbt23HixAkkJSW16RoA4MiRI1iyZMklXTS9e/dGYGBgi/ulpKRg5syZmDx5MkaNGoUjR45g4cKFAIBTp04xjBCRk/2xyfQc1AU4Vwgc+ophhNxeenr6FbeZNm2a5fXAgQMRGRmJ0aNHIy8vD927d7fqPEeOHMFTTz3V5L3hw4fjzTfftKne5hQVFWHs2LG4++67LWHH7PDhy3e5Tp06FXl5ebj55ptRV1eHgIAAPPnkk5gzZ45d1v1wNoYRIldWrweOZZlej3kV+HQiUPQ7UK8DPDTS1kay4+2pwsGXUyQ7d1uYx1y0RWJiIgBTwLA2jHh6el5yboPBAJWqbddz8uRJjBo1Ctdccw3ee+89m/dXKBSYN28eXn/9dRQXFyM0NBQZGRkAgG7durWpNikwjBC5suK9QH0N4NMR6HML4BMCVJcDp/YC0cOkro5kRqFQ2LWrxJFKSkosr+vq6nD8+PE2HzMnJweAqYvHWv3798fPP//cZOzGzz//jL59+7a6jqKiIowaNQpDhgzBihUr2tSSoVKpEBUVBQD45JNPkJSUhNDQ0FYfTyqu8aeSiJpXetD0HDEQUCqByDggLwMo2c8wQi5t+fLlGD16NGJiYvDWW29Bq9UiLy8PJSUlCA8PR1paGoqKivDRRx81u39eXh4+/vhj3HTTTejYsSP27t2LmTNn4tprr8WgQYOsruPZZ5/F7bffjsGDByM5ORlfffUVPv/8c/zwww+tuq6ioiKMHDkSMTExWLBgAcrKyiyfRUREWF736dMH6enpuP3225s9Tnl5OT777DOMHDkStbW1WLFiBdatW4dt27a1qi6puV7HEhFdYJ7KG9av4bnht7XSQ9LUQ2Qnt9xyC5544gkMHDgQZ86cwauvvor169fj+++/B2AapFlYWNji/mq1Gt9//z3GjBmDPn364Omnn8add96Jr776qsl2sbGxl50yPG7cOCxduhQLFy5E//798e6772LlypW49tprW3VdW7ZswZEjR5CRkYHOnTsjMjLS8mgsNzcXWq32ssdauXIlhg4diuHDh+PAgQPIzMxEQkJCq+qSmkK4wITkiooKBAYGQqvV2jxHnMitfXQbcHQrcOsSYPBEYPdq4ItHgdgRwOSvpa6OJFZbW4v8/Hx07dq11XdTlUJsbCyeeuqpSwaO2lt1dTU6duyITZs2YeTIkQ49lzu73J8za39+s2WEyJWZW0BCG1pEwvqYnstypamHyIVs3boV119/PYOIDDCMELmquhrgfLHpdceGmQEdupqeq0oBfbU0dRG5iHHjxuGbb76RugwCB7ASua5zDbML1P4XVlz17gBoAgBdhWnNEXNLCZELKSgokLoEcjK2jBC5Km3D4L2gLoB5mW2FAgiKMb0+d0yauoiIbMQwQuSqzjUKI411aAgjZxlGiMg1tCqMLF26FLGxsfDy8kJiYiJ27txp1X5r1qyBQqHAbbfd1prTElFjljAS3fT9DrENnzOMEJFrsDmMrF27FqmpqZg9ezZ27dqFuLg4pKSkoLS09LL7FRQU4JlnnsGIESNaXSwRNdJSy4j563Mtr8FARCQnNoeRRYsWYerUqZgyZQr69euHZcuWwcfHB8uXL29xH4PBgPvvvx8vvfSSS66ZTyRLLYUR/4bFk86XgIjIFdgURvR6PbKzs5GcnHzhAEolkpOTkZWV1eJ+L7/8MsLCwvDQQw9ZdR6dToeKioomDyK6SMUp03NAVNP3zWGk8pRz6yEiaiWbwkh5eTkMBgPCw8ObvB8eHo7i4uJm99m+fTv+85//4P3337f6POnp6QgMDLQ8oqOjr7wTUXsixIWWD7+mfx/h33B/i8pi03ZEJCtLliyBQqHANddcg+pqrgcEOHg2TWVlJR544AG8//77CAkJsXq/tLQ0aLVay8Med2skcis1ZwFjnem1X1jTz8zhxKAHqs84ty4iiRQUFEChUFzy+OWXXy67X0ZGBq655hr4+/sjIiICzz33HOrr6x1W5+rVq/HMM8/g3//+N86cOYM777wTdXV1TbbZs2cP7r33XkRHR8Pb2xt9+/bFW2+91arzZWZmYvDgwdBoNOjRowc+/PDDK+7z6aefIj4+Hj4+PoiJicH8+fNbdW5b2LToWUhICFQqVZNbOwOmWz03vtugWV5eHgoKCnDLLbdY3jMajaYTe3ggNzcX3bt3v2Q/jUYDjUZjS2lE7Yu5VcS7A+Bx0d8VDzXgEwJUl5u6anw7Or8+Iol8//336N+/v+Xrjh1b/vO/Z88e3HTTTfjnP/+Jjz76CEVFRZg+fToMBgMWLFhg99o2btyI6dOnY926dbj11lsxYcIE3HDDDZg8eTJWrVoFRcN6QdnZ2QgLC8OqVasQHR2NHTt2YNq0aVCpVJgxY4bV58vPz8e4ceMwffp0rF69GhkZGfj73/+OyMhIpKSkNLvPpk2bcP/992PJkiUYM2YMDh06hKlTp8Lb29umc9tM2CghIUHMmDHD8rXBYBBRUVEiPT39km1ramrEvn37mjzGjx8vrr/+erFv3z6h0+msOqdWqxUAhFartbVcIveUt1WI2QFCvJ3Q/Of/N9z0+R9bnFoWyUtNTY04ePCgqKmpkboUm8TExIg333yzyXtxcXFi9uzZLe6Tn58vAIjdu3dbfZ60tDQxdOjQJu99+eWXwsvLS1RUVFh9nKqqKjFt2jQRGhoqFAqFAGB5mGvevn27CAkJEd99912Tfc+cOSMSEhLE448/ftlzPProo2LUqFFW1ySEEM8++6zo379/k/cmTJggUlJSWtzn3nvvFXfddVeT9/7973+Lzp07C6PR2Ow+l/tzZu3Pb5uXg09NTcWkSZMwdOhQJCQkYPHixaiqqsKUKVMAABMnTkRUVBTS09Ph5eWFAQMGNNk/KCgIAC55n4hscL5hKv3F40XM/COAkn0cxEpNCQHUSTRGwdPnwkrBdjB58mQUFBQgMzOzyfu33noramtr0atXLzz77LO49dZbWzyGTqe75C6z3t7eqK2tRXZ2ttU30Hvttdfw6aef4v3338egQYOwatUqvPLKK5g9ezbuuOMOAMDw4cNRVlZ2yb4dOnTAr7/+esVzaLVaBAcHW74uKChA165dsXXr1hbrzMrKajLhBABSUlIuezdknU4HHx+fJu95e3vjxIkTOHbsGGJjY69Ya2vYHEYmTJiAsrIyzJo1C8XFxYiPj8fmzZstg1oLCwuhVHJhVyKHqmwYMH65MAIwjFBTddXA652kOfcLJwG1r90OFxkZaen2BwA/Pz8sXLgQw4cPh1KpxP/+9z/cdttt+Pzzz1sMJCkpKVi8eDE++eQT/PWvf0VxcTFefvllAMCpU9b/3Vm2bBmef/553HXXXQBMM0i//fZbnD17FoMGDWrDVZrs2LEDa9eubXJTP09PT/Tu3fuS4NBYcXFxsxNOKioqUFNTA29v70v2SUlJwcyZMzF58mSMGjUKR44cwcKFCwGY/p/IJowAwIwZM1rsO7o4pV7MmsEzRHQFlpk0Yc1/bn6/6tLfxIjcQXp6epOvQ0JCkJqaavl62LBhOHnyJObPn99iGBkzZgzmz5+P6dOn44EHHoBGo8GLL76In376yepfqs+ePYszZ87gmmuuafL+8OHDsXv3bhuv6lL79+/H+PHjMXv2bIwZM8byflRUFA4fPtzm419s6tSpyMvLw80334y6ujoEBATgySefxJw5cxza0MC79hK5InM3jf+lA8cBAL6hpmeGEWrM08fUQiHVudvAYDDYvE9iYiK2bNly2W1SU1Mxc+ZMnDp1Ch06dEBBQQHS0tKsXqDT09Oz2foMBgNUKpXNNTd28OBBjB49GtOmTcO//vUvm/ePiIhodsJJQEBAs60iAKBQKDBv3jy8/vrrKC4uRmhoKDIyMgDAoYuWMowQuaLzV+im8WmYSl9V7px6yDUoFHbtKnGkxj9E6+rqWrXEQ05ODiIjI6+4nUKhQKdOpu6rTz75BNHR0Rg8eLBV5/Dz80OXLl3w888/Nxm7sWPHDlx99dU212x24MABXH/99Zg0aRJee+21Vh0jKSkJGzdubPLeli1bkJSUdMV9VSoVoqJMCyp+8sknSEpKQmhoaKvqsAbDCJErsgxgbaGbxjydt/q0c+ohsrPly5dj9OjRiImJwVtvvQWtVou8vDyUlJQgPDwcaWlpKCoqwkcffQQAWLlyJdRqNa666ioAwPr167F8+XJ88MEHlmNu2LABaWlpTbo35s+fj7Fjx0KpVGL9+vWYO3cuPv30U5taNZ599lm88MIL6NGjB+Lj47FixQrs2bMHa9asadW179+/H9dffz1SUlKQmppqWVRUpVJZAkFRURFGjx6Njz76CAkJCc0eZ/r06Xj77bfx7LPP4sEHH8QPP/yATz/9tMnYk7fffhsbNmywtH6Ul5fjs88+w8iRI1FbW4sVK1Zg3bp12LZtW6uuxVocaUrkiszdL74t/KbClhFycbfccgueeOIJDBw4EGfOnMGrr76K9evX4/vvvwdgGkxZWNj0ZpCvvPIKhgwZgsTERHzxxRdYu3atZaYnYJqRkpub22SfTZs2YcSIERg6dCi++eYbfPHFF5fcWV6hUFx2vOOjjz6KZ599Fk8//TQGDhyIb7/9Fl9//XWz62hZ47PPPkNZWRlWrVqFyMhIy2PYsGGWberq6pCbm3vZFVy7du2Kb775Blu2bEFcXBwWLlyIDz74oMkaI+Xl5cjLy2uy38qVKzF06FAMHz4cBw4cQGZmZouBx14UQsh/veiKigoEBgZCq9UiICBA6nKIpGU0Aq90BIQRePoPwL+ZrpqKk8CivoBCBbxYDnCGW7tUW1uL/Px8dO3a9ZIprHIWGxuLp5566rJTUJ0lPz8fvXr1wsGDB9GzZ0+py5Gly/05s/bnN/+FInI1tedMQQQwrcDaHJ+GbhphMG1PRK2yceNGTJs2jUHEwThmhMjVmO83owkwLf3eHA8NoAkEdFpTV41PcPPbEdFlPfbYY1KX0C4wjBC5mpqGMNJSq4iZb0dTGKkuB9DL4WUR2UtBQYHUJZCTsZuGyNWYZ8j4XOEGeBzESkQugmGEyNWYu2mu1PXi2xBGqhlGiEjeGEaIXI3VLSMNn1dxrZH2zgUmTZILs8efL4YRIldjbRgxt4xwSfh2y7xwl16vl7gScmfmtU7MS+O3BgewErkaywDWK3TTmD/n1N52y8PDAz4+PigrK4OnpyfvqE52JYRAdXU1SktLERQU1KZ78TCMELkaa8eMmGfb1Jx1bD0kWwqFApGRkcjPz8exY8ekLofcVFBQECIiWrhpp5UYRohcjdVhJMj0XHPOkdWQzKnVavTs2ZNdNeQQnp6ebb47McAwQuR6rB0zwpYRaqBUKl1qOXhqf9iBSORqzGHkimNGGEaIyDUwjBC5EqPxQriwpWWEUzuJSMYYRohciU5ruvkdcOUxI15BpmdhAPTnHVoWEVFbMIwQuRLz4FW1n+lmeJfj6Q2oGrZhVw0RyRjDCJErMa8ZYm71uByFguNGiMglMIwQuZJarenZK9C67RlGiMgFMIwQuRJzGDGvIXIlljByzhHVEBHZBcMIkSsxhwqrW0aCGvZjywgRyRfDCJErYTcNEbkhhhEiV2IJI0HWbc8wQkQugGGEyJVYZtPY2E3DO/cSkYwxjBC5Elu7acwtKGwZISIZYxghciWcTUNEbohhhMiVcAArEbkhhhEiV2Lr1F7zdrUVDimHiMgeGEaIXImts2k0AaZnndYh5RAR2QPDCJErsXkAa6OWEaPRMTUREbURwwiRq6irAQw602urw0hDywgEoD/vkLKIiNqKYYTIVZhbRRRKQO1n3T4eXoBKbXqt47gRIpInhhEiV9G4i0Zp5V9dheLCuJFajhshInliGCFyFbbOpDEzd9VwRg0RyRTDCJGrsHXwqpllECtbRohInhhGiFyFrdN6zSzTe9kyQkTyxDBC5CpsvUmeGVtGiEjmGEaIXEWrwwgHsBKRvDGMELkKW2+SZ6ZpCC/spiEimWIYIXIVHMBKRG6KYYTIVVim9gbZth+n9hKRzDGMELkKtowQkZtiGCFyFbpK07N5qq61OLWXiGSOYYTIVVjCiL9t+3E2DRHJHMMIkatodRgxd9OwZYSI5IlhhMhVtDaMsJuGiGSOYYTIFRgNQF2V6bWtY0bMLSN11YChzr51ERHZAcMIkSvQn7/wWuNn276Nwwu7aohIhhhGiFyBuYtGpQY8NLbtq/IAPH1Nr81LyhMRyQjDCJEraO14ETMvLglPRPLFMELkCtocRji9l4jki2GEyBWYWzTa2jLCMSNEJEMMI0SuoLWrr5pxei8RyRjDCJEr0DXMpmlty4h5P935y29HRCQBhhEiV2BuGVHbOK3XzDwd2HwcIiIZYRghcgVtHcDKbhoikjGGESJX0NYBrJZuGraMEJH8MIwQuYI2D2BlGCEi+WIYIXIFbe6mYRghIvliGCFyBQwjROTGGEaIXAHDCBG5MYYRIldgvmuvrXfsNTOPNdEzjBCR/DCMELkCy2waDmAlIvfDMELkCthNQ0RujGGESO6EaHsYMa/catAD9Tr71EVEZCcMI0RyV18LGOtNr9vaMgKwdYSIZKdVYWTp0qWIjY2Fl5cXEhMTsXPnzha3Xb9+PYYOHYqgoCD4+voiPj4e//3vf1tdMFG7YwkPCsDTt3XHUKou7Msl4YlIZmwOI2vXrkVqaipmz56NXbt2IS4uDikpKSgtLW12++DgYPzzn/9EVlYW9u7diylTpmDKlCn49ttv21w8UbvQuItG2YbGTI4bISKZsvlftkWLFmHq1KmYMmUK+vXrh2XLlsHHxwfLly9vdvuRI0fi9ttvR9++fdG9e3c8+eSTGDRoELZv397m4onahbbesdeMYYSIZMqmMKLX65GdnY3k5OQLB1AqkZycjKysrCvuL4RARkYGcnNzce2117a4nU6nQ0VFRZMHUbvV1sGrZgwjRCRTNoWR8vJyGAwGhIeHN3k/PDwcxcXFLe6n1Wrh5+cHtVqNcePGYcmSJbjhhhta3D49PR2BgYGWR3R0tC1lErkXu4eR8207DhGRnTllNo2/vz9ycnLw22+/4bXXXkNqaioyMzNb3D4tLQ1ardbyOH78uDPKJJInu4cRtjQSkbx42LJxSEgIVCoVSkpKmrxfUlKCiIiIFvdTKpXo0aMHACA+Ph6HDh1Ceno6Ro4c2ez2Go0GGo3GltKI3Jdl9dW2hpGG1VvZTUNEMmNTy4harcaQIUOQkZFhec9oNCIjIwNJSUlWH8doNEKn48JLRFaxtIy0cil4M/N9bRhGiEhmbGoZAYDU1FRMmjQJQ4cORUJCAhYvXoyqqipMmTIFADBx4kRERUUhPT0dgGn8x9ChQ9G9e3fodDps3LgR//3vf/HOO+/Y90qI3BUHsBKRm7M5jEyYMAFlZWWYNWsWiouLER8fj82bN1sGtRYWFkLZaC2EqqoqPProozhx4gS8vb3Rp08frFq1ChMmTLDfVRC5M0sY4dReInJPNocRAJgxYwZmzJjR7GcXD0x99dVX8eqrr7bmNEQEAPqG2S8cwEpEbor3piGSO7t103AAKxHJE8MIkdxZZtO0dQAru2mISJ4YRojkjgNYicjNMYwQyZ29w4ieK7ASkbwwjBDJHVtGiMjNMYwQyZ297tqrbtQyYjS07VhERHbEMEIkZ4Z6oK7a9NpeA1gBdtUQkawwjBDJWePQ0NZFzzw0gNLT9JpdNUQkIwwjRHJmDg0qjSlMtIVCwXEjRCRLDCNEcmavwatmDCNEJEMMI0RyZvcwYl6FlUvCE5F8MIwQyZm9bpJnxpYRIpIhhhEiObPXUvBmljDC2TREJB8MI0RyxjEjRNQOMIwQyZl5ai/DCBG5MYYRIjmze8tIw9gTDmAlIhlhGCGSM86mIaJ2gGGESM4sA1jt3U3DAaxEJB8MI0RyZrlJHseMEJH7YhghkjPOpiGidoBhhEjOHBVGeNdeIpIRhhEiOeMAViJqBxhGiOTMPNDUXiuwqs1Te9lNQ0TywTBCJGcOm01TCQhhn2MSEbURwwiRXAnhuDEjxnqgvtY+xyQiaiOGESK5qqsBhMH02l537VU3Og67aohIJhhGiOTKEhYUgKevfY6pVF5Ys4RhhIhkgmGESK4ad9Eo7fhXlWuNEJHMMIwQyZW9B6+aaTijhojkhWGESK7MC5PZPYywZYSI5IVhhEiu7D2TxoxhhIhkhmGESK4cHka4CisRyQPDCJFcWe7Ya6dpvWbm1Vx5fxoikgmGESK5sgxgtdNS8GbspiEimWEYIZIrjhkhonaCYYRIrhwVRnizPCKSGYYRIrliywgRtRMMI0RypXPUOiMNY1A4m4aIZIJhhEiuHLYCq7llhLNpiEgeGEaI5IrdNETUTjCMEMkVwwgRtRMMI0Ry5bAwwtk0RCQvDCNEcuWwMNIwgLWuCjAa7HtsIqJWYBghkiNDPVBfY3qtdlA3DcDWESKSBYYRIjlqPO3W3i0jHhpApTa95v1piEgGGEaI5MgcEjy8AA+1/Y/PQaxEJCMMI0Ry5KjxImYMI0QkIwwjRHLktDDCVViJSHoMI0Ry5OgwombLCBHJB8MIkRyZWyzsPZPGjN00RCQjDCNEcuS0bhrOpiEi6TGMEMkRB7ASUTvCMEIkRxzASkTtCMMIkRw5PIwEND0PEZGEGEaI5MjcYuGwMMKb5RGRfDCMEMmRpWUkwDHH55gRIpIRhhEiOXLWmBHem4aIZIBhhEiOOJuGiNoRhhEiOTKv/+HwAaycTUNE0mMYIZIjtowQUTvCMEIkR84cwCqEY85BRGQlhhEiuRHC8VN71Q1Te431QH2tY85BRGQlhhEiudFXAWhorXB0GAF4fxoikhzDCJHcmLtoFErA09sx51AqL9wRmINYiUhiDCNEctN48KpC4bjzcBArEckEwwiR3Dh68KoZwwgRyQTDCJHcOHrwqhnDCBHJBMMIkdw4eo0RM94sj4hkolVhZOnSpYiNjYWXlxcSExOxc+fOFrd9//33MWLECHTo0AEdOnRAcnLyZbcnavecFkbM96dhGCEiadkcRtauXYvU1FTMnj0bu3btQlxcHFJSUlBaWtrs9pmZmbj33nuxdetWZGVlITo6GmPGjEFRUVGbiydyS04LI+Yl4RlGiEhaNoeRRYsWYerUqZgyZQr69euHZcuWwcfHB8uXL292+9WrV+PRRx9FfHw8+vTpgw8++ABGoxEZGRltLp7ILTm7ZYRhhIgkZlMY0ev1yM7ORnJy8oUDKJVITk5GVlaWVceorq5GXV0dgoODbauUqL2wDGDlbBoiah88bNm4vLwcBoMB4eHhTd4PDw/H4cOHrTrGc889h06dOjUJNBfT6XTQ6XSWrysquCgTtSN6B9+x14xhhIhkwqmzaebOnYs1a9Zgw4YN8PLyanG79PR0BAYGWh7R0dFOrJJIYuymIaJ2xqYwEhISApVKhZKSkibvl5SUICIi4rL7LliwAHPnzsV3332HQYMGXXbbtLQ0aLVay+P48eO2lEnk2pwVRtQMI0QkDzaFEbVajSFDhjQZfGoejJqUlNTifm+88QZeeeUVbN68GUOHDr3ieTQaDQICApo8iNoNtowQUTtj05gRAEhNTcWkSZMwdOhQJCQkYPHixaiqqsKUKVMAABMnTkRUVBTS09MBAPPmzcOsWbPw8ccfIzY2FsXFxQAAPz8/+Pn5tXgeonbLPIBVzTBCRO2DzWFkwoQJKCsrw6xZs1BcXIz4+Hhs3rzZMqi1sLAQSuWFBpd33nkHer0ed911V5PjzJ49G3PmzGlb9UTuiC0jRNTO2BxGAGDGjBmYMWNGs59lZmY2+bqgoKA1pyBqvxhGiKid4b1piOTG2Suw1lUBRoNjz0VEdBkMI0RyUq8DDHrTa2fdKA+4sLYJEZEEGEaI5KRxl4mjw4iHBlCpLz0vEZGTMYwQyYl5Jo2nL6BUOf58HDdCRDLAMEIkJ84aL2LGMEJEMsAwQiQnkoUR3v+JiKTDMEIkJ04PIwFNz0tEJAGGESI50Tnpjr1mlpYRzqYhIukwjBDJiU5renZWGFE3TO9lywgRSYhhhEhOahvGbmicdHNIDmAlIhlgGCGSE/NAUq9A55yPA1iJSAYYRojkpLahm8bLWS0jHMBKRNJjGCGSE3bTEFE7xDBCJCdSddPw3jREJCGGESI5cXo3DWfTEJH0GEaI5ITdNETUDjGMEMmJ07tpGkJPLWfTEJF0GEaI5MTSTeOkMGI+j3mxNSIiCTCMEMmF0XBhIKmzumnMYaS2AjAanXNOIqKLMIwQyUXjhcecvc4IBKDnuBEikgbDCJFcmLtoPLwAD41zzunpZTpf4/MTETkZwwiRXDh7Jo2ZZRArwwgRSYNhhEgunD2TxqzxuBEiIgkwjBDJhbMXPDOzhBG2jBCRNBhGiORCqm4ahhEikhjDCJFcSN5NwzBCRNJgGCGSC3bTEFE7xTBCJBfOXn3VzIuzaYhIWgwjRHJh7qbRSNRNwyXhiUgiDCNEcsFuGiJqpxhGiOSiVqoBrEEN52cYISJpMIwQyYU5DHBqLxG1MwwjRHJhmdrLMEJE7QvDCJFcSNVNw3vTEJHEGEaI5ELybpoKQAjnnpuICAwjRPJQrwMMOtNrqbpphAHQVzn33EREYBghkofGd8x1dsuIpzeg9Gyog101ROR8DCNEcmAOAWp/QKly7rkVCg5iJSJJMYwQyYFOogXPzBhGiEhCDCNEclBzzvRsXoDM2Xh/GiKSEMMIkRzUnjM9ewdJc362jBCRhBhGiORA8pYR883yKi6/HRGRAzCMEMlBzVnTs3cHac5vaRk5J835iahdYxghkgN20xBRO8YwQiQHcummYRghIgkwjBDJgaWbJkia82sYRohIOh5SF0BEsIQA4RWEIyWV+OFwKQ6eqsCJszWoNxjho/ZATEcfDInpgOt6hyLM38u+52fLCBFJiGGESAZEzVkoAMzZUoSVZT82u03W0dNY89txqJQKjOkXjunXdUdcdJB9CjC3yJi7i4iInIhhhEhie46fQ2RZCcIA5JQroFYpkdS9IxK7BSO2oy+8PJXQ1tThz5Lz+OnPcuwr0mLT/mJs2l+M26+Kwj/H9UWIn6ZtRZhn8Zi7i4iInIhhhEgiRqPAO9vysPC7XOxVVwIK4I7hA3DLqBEI9lU3u8+zY4HDxRV4b9tRrN9dhA27i/DTn+VYcPcgjOwd1vpiLGHkXOuPQUTUShzASiSBan09pn70O+Z/mwulqIefohYAMGlUfItBxKxPRAAWTYjHlzOGo3e4P8rP6zB5xW94d1sehBCtK8gcRnRawFDfumMQEbUSwwiRk52t0uO+939FxuFSaDyUWHhz7IUPzQNJrTCocxC+mDEc9yV2AQCkbzqM2V8egNHYikDSeEoxB7ESkZMxjBA50ZkqPSa8l4Wc4+cQ5OOJNdOuxvg+PqYPNQGAyraeUy9PFV6/fSBevLkfFArgo6xj+Ofn+21vIVF5XJjey3EjRORkDCNETnJeV48pK3bij5LzCA/QYN3DSbiqSwe7LHj20F+6YvGEeCgUwCc7CzHnywO2BxLLjJozra6DiKg1GEaInEBfb8S0j37HnhNadPDxxOq/J6JnuL/pQzstBT8+Pgrz74qDQgGszDqGd388atsBOKOGiCTCMELkBC99dQA78k7DV63CygcT0CPM/8KHdlx99a4hnfHiuH4AgHmbD2Pz/mLrd2YYISKJMIwQOdjqX49h9a+FUCiAt+8bjEGdg5puYOf70kwZHouJSTEQAnhq7W4cOlVh3Y4+wQ31MIwQkXMxjBA5UPaxs5jz5QEAwDNjemNUn2bWArF003SwyzkVCgVm3dwP1/YKRW2dEY+t3oXzOium67JlhIgkwjBC5CAVtXV44pPdqDMIjBsYiUdHdm9+QwfcJM9DpcTiCfGIDPTC0fIqvLB+35UHtJrDSDUHsBKRczGMEDmAEAL/2rAfRedqEB3sjbl3DoRCoWh+Yzt305gF+6rx9n1XQaVU4Ms9J7H2t+OX34EtI0QkEYYRIgfYsLsIX+45CZVSgbfuuQr+Xp4tb2znbprGhsQE4x8pvQEAr3x9EMfPVLe8sTfHjBCRNBhGiOys6FwNZn1hGify1OieGNzlCiHD3DJix26axqaO6IaE2GBU6Q14Zt2elldoZcsIEUmEYYTIjkzdM/twXlePwV2C8OioHlfeybzImANaRgBApVRg/t2D4KNW4df8M1iZVdD8hgwjRCQRhhEiO/pyz0lszS2DWqXEG3cNgkrZwjiRxqpPm559QhxWV0xHX6Td1BcAMHfTYRw7XXXpRpYwwgGsRORcDCNEdnKmSo+XvjoIAHj8+h5NFzZriRAXZq/4dHRgdcDfErvgmu4doas3YtYXzSwXbw4jtVrAaHBoLUREjTGMENnJK18fxJkqPfpE+OPh61qYxnuxWi0gGn7wmxcdcxCFQoFXbxsAtUqJbX+UYeO+i1ZnbdxNxDv3EpETMYwQ2UFW3mls2F0EpQKYd+cgqD2s/Ktl7qJR+wEeGscV2KBbqB8eaVjv5KWvDqCitu7ChyoP052DAY4bISKnYhghaqN6gxEvfWWaPXNfYhfERQdZv7Oli8axrSKNPTKyO7qG+KK0UodF3/3R9EPLnXsZRojIeRhGiNpo1S/HcLi4EkE+nnj6ht627WwZvOrY8SKNeXmq8Mr4AQCAj7IKmt67hquwEpEEWhVGli5ditjYWHh5eSExMRE7d+5scdsDBw7gzjvvRGxsLBQKBRYvXtzaWolkp/y8Dou2mFoXnhnTGx181bYdQIIwAgB/6RmCcQMjYRTAy18dvDCYlQufEZEEbA4ja9euRWpqKmbPno1du3YhLi4OKSkpKC0tbXb76upqdOvWDXPnzkVERESbCyaSk/mbc1FRW4/+nQJwb0IX2w8gURgBgOdv7AO1hxJZR0/ju4MlpjctLSOnnV4PEbVfNoeRRYsWYerUqZgyZQr69euHZcuWwcfHB8uXL292+2HDhmH+/Pm45557oNE4foAekbPsPXEOn2ab7vfy8vj+1q0pcjEJw0h0sA+mjegGAHh94yHo6g2Ab8NaJ9XlTq+HiNovm8KIXq9HdnY2kpOTLxxAqURycjKysrLsXhyRXAkhkL7xMIQAbr8qCkNiWjkA1RJGnDeAtbFHRnZHmL8Gx05XY8XPBYBvqOmDKoYRInIem8JIeXk5DAYDwsPDm7wfHh6O4uLiFvaynU6nQ0VFRZMHkZz8+Gc5so6ehtpDiWdSbBy02piTFjxria/GA8+O7QMAePuHI6hUBTbUxW4aInIeWc6mSU9PR2BgoOURHR0tdUlEFkajwLxNhwEAE6+OQVSQd+sPViNtGAGAO66KwqDOgTivq8eXf+pNb7JlhIicyKYwEhISApVKhZKSkibvl5SU2HVwalpaGrRareVx/Phxux2bqK2+2nsSB09VwF/jgcesuRHe5Ug4ZsRMqVTgnw33rfnqiDmMlElWDxG1PzaFEbVajSFDhiAjI8PyntFoREZGBpKSkuxWlEajQUBAQJMHkRzo641Y2LBQ2MPXdbN9Ku/FZBBGACCxW0eM7hOGMmPD/XQ4gJWInMjmbprU1FS8//77WLlyJQ4dOoRHHnkEVVVVmDJlCgBg4sSJSEtLs2yv1+uRk5ODnJwc6PV6FBUVIScnB0eOHLHfVRA5yZrfClF4phqh/ho8+JeubTuY0XBhPQ9vaQawNvbs2D44i4bgX6sF6vXSFkRE7YaHrTtMmDABZWVlmDVrFoqLixEfH4/NmzdbBrUWFhZCqbyQcU6ePImrrrrK8vWCBQuwYMECXHfddcjMzGz7FRA5SZWuHv/O+BMA8MTonvBR2/zXp6laLSCMptcSzaZprHeEP5Kv6o36A0p4KIwQ1eVQBHSSuiwiagda9a/pjBkzMGPGjGY/uzhgxMbGXnqrciIX9J/t+Sg/r0dsRx/cM8wOg6rNXTSaQEDl2fbj2cFTY/rg7AF/hEKL3w78gYQkhhEicjxZzqYhkpvT53V478ejAICnx/SGp8oOf3XMM1Zk0Cpi1inIG8LHtPDZ+u17YTDyFwkicjyGESIrLN2ah/O6egyICsC4gZH2OWhVwy0U/MLsczw7CQ41XV/12WKs33VC4mqIqD1gGCG6ghNnq7Hql2MAgOfG9oGyNcu+N+d8Qxgxr3oqEx7+pno6KiqwaMsfqK0zSFwREbk7hhGiK1i05Q/oDUYM79ERI3raMTiY1/KQWcuIORzFeNXglLYW/806JnFBROTuGEaILuNwcQU27C4CYGoVsStLy4jMwkjDmJERUaYWoKWZR1BRWydlRUTk5hhGiC5j/uZcCAGMGxiJQZ2D7HtwS8uIvLpp4GtagK2bdzV6hvnhXHUd3t2WJ3FRROTOGEaIWvBbwRlkHC6FSqnA02N62f8E5xtuqyDTlhFF9WnLTQD/sz0fpRW1UlZFRG6MYYSoGUIIzG24Gd6EYdHoFupn/5OYu2n8wi+/nbOZB9RWl2NMv3AM7hKE2joj3mpY8I2IyN4YRoia8f2hUmQfOwsvTyWeHN3TMSeRbTdNQz3nS6FQKCxjZdb8dhz55VUSFkZE7ophhOgiBqPA/G9NrSIPDu+K8AAv+59Edx6oqza9lls3jX9DS42uAtBXI7FbR4zqHQqDUWDBd7nS1kZEbolhhOgi63edwB8l5xHo7YmHr+vumJOYFzzz9AE0DugCagtNAODhbXp9vhiA6SZ6CgXwzd5T2HdCK2FxROSOGEaIGqmtM+DNLX8AAB4b1R2B3g66Z8z5hi4amS14BgBQKC60jlSaBtn2jQzAbfFRAIB5mw9LVRkRuSmGEaJGVv1yDCe1tYgM9MLEpFjHncg8k0ZuC56Z+Tcsed/QMgIAqTf0gqdKge1HyrH9z3KJCiMid8QwQtSgorYOb289AgCYmdwLXp4qx52sSqYLnpmZZ/hUXggj0cE+uD8xBoCpdcTIm+gRkZ0wjBA1eG/bUZyrrkOPMD/cMTjKsSc7L9OZNGb+EabnRmEEAGZc3wO+ahX2FWmxcf8pCQojInfEMEIEoLSiFv/Zng8A+EdKb3ioHPxXw9z9Ibc1RszMdZm7kxqE+Gkw9dpuAIAF3+aizmB0dmVE5IYYRogALM74EzV1BgzuEoQx/ZwQECoaWhUCOjn+XK3RQssIAPx9RDd09FWj4HQ11v523MmFEZE7Yhihdi+v7Lzlh+rzN/aFQqFw/EkrT5qe/WUaRlpoGQEAP40HHr++BwDgrYw/Ua2vd2ZlROSGGEao3VvwbS4MRoHkvmFI6BrsnJNWNISRgEjnnM9WlpaR5seF3JcYg+hgb5RV6rDi5wLn1UVEbolhhNq1XYVnsWl/MZQK08JeTlGvA6pPm17LtWXEPLW35qyp3ouoPZR4+gbTTfSWZebhbJXemdURkZthGKF2SwiBuRtNC3jdNaQzeoX7O+fE5tYGlQbwcVJLjK28OwAqtel1M101AHBrXCf0ifBHpa4e72zLc2JxRORuGEao3dqaW4qdBWeg8VBi5g29nHdiy+DVSNNqp3KkUDRaa6T5MKJUXriJ3oc7CnDyXI2zqiMiN8MwQu2SwSgwb5Pppm9ThndFZKC3804u98GrZuZxIxVFLW4ysncoEroGQ19vxOLv/3BSYUTkbhhGqF1av+sEcksqEejtiUccdTO8lsh98KpZYGfT82XCiEKhwPM3mlpHPss+gT9LKp1RGRG5GYYRaneq9fVY8J2pVeSxUd0R6OOgm+G1xNxN4+8iYUR74rKbDe7SAWP6hcMogPnf5jqhMCJyNwwj1O4s23YUJRU6RAd7O/ZmeC0xd9MEOHjJ+bYK7GJ61l55YbNnx/aGUgF8d7AE2cfOOrgwInI3DCPUrhSdq8G7DTM/Xrixr2NvhteSxgNY5czKlhEA6BHmj7uGmLZP33gIQvAmekRkPYYRalfe2HwYunojEroGY+yACGmKMP9wD+gszfmtZUMYAYCZN/SCt6cKvx87iy/3nHRgYUTkbhhGqN3IPnYWX+SchEIBzLq5n3OWfb9Yvf5CN02HGOef3xbmMFJVBtRdedpuZKA3Hh1pGgycvvEwl4knIqsxjFC7YDQKvPL1QQDA3UM6Y0BUoDSFVJwAhBHw8AJ8Q6WpwVreHQBPX9Nrbcszahqbem03RAd7o7iiFv+3lQuhEZF1GEaoXfhyz0nkHD8HX7UKz4zpLV0h5wpNz0Fd5LvgmZlC0airxrq783p5qvDPm/oBAN776SgKT1c7qjoiciMMI+T2Kmrr8NrGQwCAR0f1QFiAl3TFNA4jriAo2vRs5bgRAEjpH46/9AiBvt6IV7856KDCiMidMIyQ21v03R8oq9ShW4gv/j6iq7TFuFoYsXEQK2BaCG32Lf2gUirw3cES/PRnmYOKIyJ3wTBCbm1/kRYfZRUAAF4ePwAaDwmm8jbmqmHk3DGbdusZ7o8HrjYN0J39xQHU1hnsXRkRuRGGEXJbRqPAvz7fD6MAbonrhL/0DJG6pEZhROYzacyCu5mez+TbvOvMG3oh1F+Do+VV+L9MDmYlopYxjJDbWvv7ceQcPwc/jQf+Na6v1OWYuGwYOWrzroHenph9i2kw6zuZR3CklPetIaLmMYyQWyqtrMXcTYcBAKk39EK4lINWzepqL9wkz1W6acxhpKoUqK2wefdxAyMxqnco6gwCL6zfD6ORK7MS0aUYRsgtzf7iALQ1dejfKQATk2TSCnE2H4AANIGArwy6jKzhFQj4NNR61vauGoVCgZfHD4C3pwo7C85gXbZ1U4SJqH1hGCG3883eU9i0vxgeSgXm3xUHD5VM/piX/2l67thd/muMNGZuHTndunEf0cE+SL2hFwDg9Y2HUVpZa6/KiMhNyORfaSL7OFOlx6wv9gMwrSnSr1OAxBU1cvqI6bljD2nrsFVH0xLvrRk3YjZleCwGRAVAW1OHtP/t4430iKgJhhFyK3O+PIDTVXr0DvfHjFEy+6FvDiMhPaWtw1ZtmFFj5qFSYsHdcVCrlMg4XIp12davW0JE7o9hhNzGxn2n8OWek1AqgDfuGgS1h8z+eFtaRrpLW4etLGGkbdNz+0QEYGZDd83LXx3EibNcKp6ITGT2rzVR65w8V4Pn/7cXADD9uu6Iiw6StqDmWMaMuFjLiDk8lf8BtLF7Zdq13TAkpgPO6+rxj3V7ObuGiAAwjJAbMBgFUj/NQUVtPeI6B1p++5aV6jNAzRnTa1drGQnpDUABVJ8Gqtq2tLtKqcDCu+Pg7alC1tHT+M/21nf9EJH7YBghl/fuj3n45egZ+KhVeOueq+Apl9kzjZWZ1jxBQGdA7SttLbZS+1zoqilt+43vYkN88a+bTYvQzdt8GLsLz7b5mETk2mT4rzaR9XYVnsWi7/4AAMy5tT9iQ2T6g77YNMMHEQOkraO1whpWsC2xz11470vognEDI1FvFJjx8W5oq+vsclwick0MI+Syyip1eGRVNuqNAuMGReLuIZ2lLqllJQ1hJLy/tHW0VphpWXd7tIwApsXQ0u8ciJiOPig6V4NnPtvD6b5E7RjDCLmkeoMRj3+yCyUVOnQP9cW8OwdBIeeFxEoOmJ7DXbxlpPSQ3Q4Z4OWJpfcNhlqlxJaDJXj/p9avY0JEro1hhFzSG9/m4pejZ+CrVuHdB4bCT+MhdUktMxovtCi4ahgxt+iUHjJdj50MiArEiw3jR+ZuOoyth0vtdmwich0MI+RyPss+gfd+NP0WveDuOPQI85O4ois4mw/UVQMeXhcGgrqa4O6m+uuq2rQSa3P+dnUM7hkWDaMAnvhkN+/uS9QOMYyQS9mRV4609ab1RB4d2R03DoyUuCIrnMoxPYf1A1QybsG5HJUHEBlnel30u10Pbb6ZXkJsMCp19Xho5e84V6236zmISN4YRshlHCmtxMP/zUadQeDmQZF4ZkxvqUuyzomGH96dh0lbR1tFDTE9F2Xb/dBqDyXe+dtgRAV549jpajy08nfU6A12Pw8RyRPDCLmEU9oaTFr+Gypr6zEkpgMW3B0HpVLGA1YbO/Gb6dldwsgJ+7aMmHX002D55GEI8PJA9rGzeOzjXagz2G98ChHJF8MIyV5ZpQ73v/8ris7VILajD957YAi8PFVSl2Wdeh1wao/pdeeh0tbSVuYwUrzPdF0O0DvCH8snD4PGQ4kfDpfiuc+4ZDxRe8AwQrJ2tkqPv33wK46WVyEqyBurp16Njn4aqcuyXvE+wKAHfEKADrFSV9M2HWIBn46Ase5CwHKAobHB+L/7B0OlVGD97iK8sGEfAwmRm2MYIdkqq9Thvg9+RW5JJcL8NVj990REBXlLXZZtCn4yPUcnAHJeB8UaCgXQJcn0On+bQ081um84Ft4dB6UCWPPbcTz7v70wMJAQuS2GEZKlE2er8dd3s3DoVAVC/NRY/fdE+S71fjlHM03P3UZKWYX9mK/jqGPDCADcdlUU3pwQD5VSgc+yTyD10xyOISFyUwwjJDt/llTi7mVZyG/omlk3/Rr0DPeXuizb1dUAx7JMr90tjBz/FdBXO/x04+OjsOTeq+ChVOCLnJN48MPfUFHL+9gQuRuGEZKVrbmluOP/duCUthbdQ33x2SNJ6OqKLSIAUJgFGHSAfycgpJfU1dhHxx6m6zHoTdfnBDcNjMR7E4fAR63CT3+W46/LsnDyXI1Tzk1EzsEwQrIghMAHPx3FQx/+hkpdPRJig7Fu+jWIDHSxMSKN5W4yPfe43vXHi5gpFECP0abXuRuddtrr+4Tj04eTEOavweHiStz69s/IyjvttPMTkWMxjJDkzlXr8ciqXXj1m0MwCuCeYdFY9fdEBPuqpS6t9YxG4OCXptd9x0tbi731a7ieg18CRuctTDYgKhAbHhuOPhH+KD+vw/0f/IKlW49wpg2RG2AYIUntzD+Dm976CZsPFMNTpcCcW/oh/Y6BUHu4+B/NEzuB88WAJhDodp3U1dhX1+sAr0CgqhQo/MWpp44K8saGR4fjjsFRMApg/re5mLRiJ7ttiFyci/+LT66qsrYOs77YjwnvZeGkthaxHX2w/pHhmDy8KxTu0KWRs9r03PtGwMOF1kWxhoca6D3O9HrPx04/vbdahYV3x2FuQ2j96c9ypLz5I9b+Vggh2EpC5IoYRsiphBDYtO8Ublj0Iz7KOgYhgLuHdMbXT4zAwM6BUpdnH7VaYN9nptdDJklbi6MMnmh63vc/oOas00+vUChwT0IXbHxiBK7qEoRKXT2e+98+THjvF+wv0jq9HiJqG4YRcprfC87grmVZeGT1LhRX1CKmow9W/z0R8++Og5/GRe9m25zdq4C6aiC074VFwtxNl6uBsP5AfY3peiXSI8wPn02/Bv+8qS+8PJXYmX8Gt7y9Hc//by+KtbWS1UVEtlEIF2jXrKioQGBgILRaLQICAqQuh2wghMDO/DNYti0PW3PLAABenkpMG9ENj47q4Tr3mLGWvhp4K840nuKWt4Ahk6WuyHGyVwJfPQH4hgJP5AAaP0nLOXmuBnM3HcaXe04CANQqJf46rDMeGdnD9VbuJXIT1v78Zhghh9DVG/DdgRL8Z3s+co6fAwAoFcCEYdF4KrkXwgO8pC3QUba9AWx9DQiKAR7PBlSeUlfkOIY64O1hwNl8YGQaMPJ5qSsCYGqBm7f5MH4rMHUfeSgVuHFgJP6W2AUJXYPdY0wSkYtgGCGnE0Jgf1EF/rfrBD7PKcK5atNKmWoPJe4a0hlTR3Rz3QXMrFGWCyz7i2lBsDv/Awy8S+qKHG///4DPHgRUamDaNiC8n9QVWfxy9DT+nfEndjRaj6RXuB/uHNwZNw2MRHSwj4TVEbUPDCPkFPp6I34/dgbfHSjBloMlKGo0xTIy0At3D43GA1fHINTfzWaUXKxWC3yQDJT/AfS4Abh/nfssdHY5QgCf3Av8scm0yuyD3wI+wVJX1cT+Ii1W/3oMn+8+iZq6C+uixEcHIaV/BEb0DEG/yAAole3g+0XkZA4NI0uXLsX8+fNRXFyMuLg4LFmyBAkJCS1uv27dOrz44osoKChAz549MW/ePNx0001Wn49hRD60NXU4cFKLnflnsDP/DHYVnkVt3YWbl3l5KjG6Tzj+Oiwaf+kRAlV7+Ae+6jTw8V+Bot+BgChg6g+Af4TUVTlPZQnw/iigogiIGgLcuxbwC5W6qktU1Nbhy5yT+HrvSfyafwaN/+Xr6KvGNT1CMKRLEOK7dEDfSH9oPNxsPBORBBwWRtauXYuJEydi2bJlSExMxOLFi7Fu3Trk5uYiLCzsku137NiBa6+9Funp6bj55pvx8ccfY968edi1axcGDBhg14sh+xBC4HSVHoVnqlF4uhpHy6tw6FQFDp2qwImzly4u1dFXjev7hGFM/wj8pUcIvNXt5B9xIUxLon/zDFB5EvDuAEz8EogcJHVlzldyEFhxI1B7zhTIbpwH9LlZtq1DpZW12Ly/GNtyy5B19DSq9U1XkvVUKdAnIgA9w/3QPdT06BHmi84dfNxv0DWRAzksjCQmJmLYsGF4++23AQBGoxHR0dF4/PHH8fzzlw5gmzBhAqqqqvD1119b3rv66qsRHx+PZcuW2fVi6FJCCNTWGXFeV48qXT3ONzyqdPU4U6VH+Xk9Tp/Xofy8DuXn9Sir1OHE2WpU6Vte5jsqyBuDYzogsWswru4WjO6hfu1nUKDuPFB6CDiaCRxYD5QeNL0f3B2452MgrI+k5Umq/E/g4wnAmTzT16F9TONmuo0CwvoCanmOF9LXG7G78Cyyjp7GnuPnsOeEFmeq9C1u38HHExGB3ugU6IWIQC909FUj0EeNIG9PBPl4IshHjUBvT/hqVPD2VMFbrYJapWw/f0eIGrH257dNizvo9XpkZ2cjLS3N8p5SqURycjKyspq/g2dWVhZSU1ObvJeSkoLPP/+8xfPodDrodDrL1xUVFbaUabWs1a9Ace5Yo3dMuaxJPGv0RcOnUFy0jYCAQohG2zTaQ1zYV4FG24hG2zR+JUzbNTnORTUYhYAQAkajgFEIGAUano0QRljeMwgBg8HYpFbFRRV2BBACgd5oSuEJeHuq4KtRwU/jgUBvTwR5eyLQ2/PCUu0nBXDyoh3RTLZtNu8K27dpdrtWns+WY9VqgeozQM0ZoPJU08/VfkDCVODaZwF1Ox8QGdITmL4d+GkB8Ou7QNlh4IdXTQ/AdLdf346AdzDgFQAoPU2zjSzPF7c4NPrhfckPcis/s4IaQGLDAxGAiBA4r6vH6fN6aGvqoK2uw7maOmhr6lBnMAJ6AGUNj0a0DY9juJQCgIdKAZVS2fCsgBIKKJWAUqGAQmF6Nj1Mi7qZnxWKC1ekaPiv4uL/BQ3/UZi3uOh/gWV/meUhhY3fq3ZFgv81XW56Bp1iL/5p4Bw2hZHy8nIYDAaEh4c3eT88PByHDx9udp/i4uJmty8uLm7xPOnp6XjppZdsKa1VOuR/gz71hxx+Hkm1pUXZCKCm4XHOLtW4B79woNNg01Lv/W41dc+QidoHGD0LGP4ksH89cOR70/1rqstNXVmVl6RX2VEA8G94XPJBW9fmEwDq23gMIgc5XH6Pa4QRZ0lLS2vSmlJRUYHo6Gi7n0fb+y5knTth+frCbw1NI6np/QuL1Soabdz0Nw3FRfs0vKcwv7r41xXFhb0aHUjR6HPFRdsCgEqpgEph+i1LqQRUCiWUSgVUSkCpUJp+61KafvtSeyihVinhqVJC2dyvRdb8Vtnsr1Mutp21vxI2t50m0DRDxCcYCIwGfEOsO1Z75hUIDJ1iegCmlqWz+UD1WVMLk64CMNQDxjrTeiWGOkAYGx2gcdPjZVqwrGndkpDBKFBvFKgzGFFvaHg2ChiMxoYWzcYtneZWzcatnaJxAysA8yVfaD0VjT8XTd83bXnpZyRvUn2fuoZ1kejMNoaRkJAQqFQqlJSUNHm/pKQEERHNzx6IiIiwaXsA0Gg00GgcPxU08e5nHH4OIsKFMNfOqBoebj6xnajNbLo3jVqtxpAhQ5CRkWF5z2g0IiMjA0lJzd+DIykpqcn2ALBly5YWtyciIqL2xeZumtTUVEyaNAlDhw5FQkICFi9ejKqqKkyZYmqOnThxIqKiopCeng4AePLJJ3Hddddh4cKFGDduHNasWYPff/8d7733nn2vhIiIiFySzWFkwoQJKCsrw6xZs1BcXIz4+Hhs3rzZMki1sLAQSuWFBpdrrrkGH3/8Mf71r3/hhRdeQM+ePfH5559bvcYIERERuTcuB09EREQOYe3Pb5vGjBARERHZG8MIERERSYphhIiIiCTFMEJERESSYhghIiIiSTGMEBERkaQYRoiIiEhSDCNEREQkKYYRIiIikpTNy8FLwbxIbEVFhcSVEBERkbXMP7evtNi7S4SRyspKAEB0dLTElRAREZGtKisrERgY2OLnLnFvGqPRiJMnT8Lf3x8KhcJux62oqEB0dDSOHz/utve8cfdr5PW5Pne/Rl6f63P3a3Tk9QkhUFlZiU6dOjW5ie7FXKJlRKlUonPnzg47fkBAgFv+AWvM3a+R1+f63P0aeX2uz92v0VHXd7kWETMOYCUiIiJJMYwQERGRpNp1GNFoNJg9ezY0Go3UpTiMu18jr8/1ufs18vpcn7tfoxyuzyUGsBIREZH7atctI0RERCQ9hhEiIiKSFMMIERERSYphhIiIiCTVbsPIa6+9hmuuuQY+Pj4ICgpqdpvCwkKMGzcOPj4+CAsLwz/+8Q/U19c7t1A7io2NhUKhaPKYO3eu1GW12tKlSxEbGwsvLy8kJiZi586dUpdkN3PmzLnke9WnTx+py2q1H3/8Ebfccgs6deoEhUKBzz//vMnnQgjMmjULkZGR8Pb2RnJyMv78809pim2lK13j5MmTL/mejh07VppiWyE9PR3Dhg2Dv78/wsLCcNtttyE3N7fJNrW1tXjsscfQsWNH+Pn54c4770RJSYlEFdvGmusbOXLkJd/D6dOnS1Sxbd555x0MGjTIsrBZUlISNm3aZPlc6u9duw0jer0ed999Nx555JFmPzcYDBg3bhz0ej127NiBlStX4sMPP8SsWbOcXKl9vfzyyzh16pTl8fjjj0tdUqusXbsWqampmD17Nnbt2oW4uDikpKSgtLRU6tLspn///k2+V9u3b5e6pFarqqpCXFwcli5d2uznb7zxBv79739j2bJl+PXXX+Hr64uUlBTU1tY6udLWu9I1AsDYsWObfE8/+eQTJ1bYNtu2bcNjjz2GX375BVu2bEFdXR3GjBmDqqoqyzYzZ87EV199hXXr1mHbtm04efIk7rjjDgmrtp411wcAU6dObfI9fOONNySq2DadO3fG3LlzkZ2djd9//x3XX389xo8fjwMHDgCQwfdOtHMrVqwQgYGBl7y/ceNGoVQqRXFxseW9d955RwQEBAidTufECu0nJiZGvPnmm1KXYRcJCQniscces3xtMBhEp06dRHp6uoRV2c/s2bNFXFyc1GU4BACxYcMGy9dGo1FERESI+fPnW947d+6c0Gg04pNPPpGgwra7+BqFEGLSpEli/PjxktTjCKWlpQKA2LZtmxDC9D3z9PQU69ats2xz6NAhAUBkZWVJVWarXXx9Qghx3XXXiSeffFK6ouysQ4cO4oMPPpDF967dtoxcSVZWFgYOHIjw8HDLeykpKaioqLAkSVc0d+5cdOzYEVdddRXmz5/vkt1Oer0e2dnZSE5OtrynVCqRnJyMrKwsCSuzrz///BOdOnVCt27dcP/996OwsFDqkhwiPz8fxcXFTb6fgYGBSExMdKvvJwBkZmYiLCwMvXv3xiOPPILTp09LXVKrabVaAEBwcDAAIDs7G3V1dU2+j3369EGXLl1c8vt48fWZrV69GiEhIRgwYADS0tJQXV0tRXltYjAYsGbNGlRVVSEpKUkW3zuXuFGeFIqLi5sEEQCWr4uLi6Uoqc2eeOIJDB48GMHBwdixYwfS0tJw6tQpLFq0SOrSbFJeXg6DwdDs9+fw4cMSVWVfiYmJ+PDDD9G7d2+cOnUKL730EkaMGIH9+/fD399f6vLsyvz3qbnvp6v+XWvO2LFjcccdd6Br167Iy8vDCy+8gBtvvBFZWVlQqVRSl2cTo9GIp556CsOHD8eAAQMAmL6ParX6kjF4rvh9bO76AOC+++5DTEwMOnXqhL179+K5555Dbm4u1q9fL2G11tu3bx+SkpJQW1sLPz8/bNiwAf369UNOTo7k3zu3CiPPP/885s2bd9ltDh065NIDAS9myzWnpqZa3hs0aBDUajUefvhhpKenu+0yx67qxhtvtLweNGgQEhMTERMTg08//RQPPfSQhJVRa91zzz2W1wMHDsSgQYPQvXt3ZGZmYvTo0RJWZrvHHnsM+/fvd+lxTJfT0vVNmzbN8nrgwIGIjIzE6NGjkZeXh+7duzu7TJv17t0bOTk50Gq1+OyzzzBp0iRs27ZN6rIAuFkYefrppzF58uTLbtOtWzerjhUREXHJ7AzzyOKIiIhW1ecIbbnmxMRE1NfXo6CgAL1793ZAdY4REhIClUp1yUjvkpISWX1v7CkoKAi9evXCkSNHpC7F7szfs5KSEkRGRlreLykpQXx8vERVOV63bt0QEhKCI0eOuFQYmTFjBr7++mv8+OOP6Ny5s+X9iIgI6PV6nDt3rslv2K7297Kl62tOYmIiAODIkSMuEUbUajV69OgBABgyZAh+++03vPXWW5gwYYLk3zu3CiOhoaEIDQ21y7GSkpLw2muvobS0FGFhYQCALVu2ICAgAP369bPLOeyhLdeck5MDpVJpuT5XoVarMWTIEGRkZOC2224DYGpWzcjIwIwZM6QtzkHOnz+PvLw8PPDAA1KXYnddu3ZFREQEMjIyLOGjoqICv/76a4uz3dzBiRMncPr06SYBTM6EEHj88cexYcMGZGZmomvXrk0+HzJkCDw9PZGRkYE777wTAJCbm4vCwkIkJSVJUbJNrnR9zcnJyQEAl/keXsxoNEKn08nje+eUYbIydOzYMbF7927x0ksvCT8/P7F7926xe/duUVlZKYQQor6+XgwYMECMGTNG5OTkiM2bN4vQ0FCRlpYmceWts2PHDvHmm2+KnJwckZeXJ1atWiVCQ0PFxIkTpS6tVdasWSM0Go348MMPxcGDB8W0adNEUFBQk9lPruzpp58WmZmZIj8/X/z8888iOTlZhISEiNLSUqlLa5XKykrL3zEAYtGiRWL37t3i2LFjQggh5s6dK4KCgsQXX3wh9u7dK8aPHy+6du0qampqJK7cepe7xsrKSvHMM8+IrKwskZ+fL77//nsxePBg0bNnT1FbWyt16VZ55JFHRGBgoMjMzBSnTp2yPKqrqy3bTJ8+XXTp0kX88MMP4vfffxdJSUkiKSlJwqqtd6XrO3LkiHj55ZfF77//LvLz88UXX3whunXrJq699lqJK7fO888/L7Zt2yby8/PF3r17xfPPPy8UCoX47rvvhBDSf+/abRiZNGmSAHDJY+vWrZZtCgoKxI033ii8vb1FSEiIePrpp0VdXZ10RbdBdna2SExMFIGBgcLLy0v07dtXvP766y7zD2FzlixZIrp06SLUarVISEgQv/zyi9Ql2c2ECRNEZGSkUKvVIioqSkyYMEEcOXJE6rJabevWrc3+fZs0aZIQwjS998UXXxTh4eFCo9GI0aNHi9zcXGmLttHlrrG6ulqMGTNGhIaGCk9PTxETEyOmTp3qUuG5uWsDIFasWGHZpqamRjz66KOiQ4cOwsfHR9x+++3i1KlT0hVtgytdX2Fhobj22mtFcHCw0Gg0okePHuIf//iH0Gq10hZupQcffFDExMQItVotQkNDxejRoy1BRAjpv3cKIYRwShMMERERUTO4zggRERFJimGEiIiIJMUwQkRERJJiGCEiIiJJMYwQERGRpBhGiIiISFIMI0RERCQphhEiIiKSFMMIERERSYphhIiIiCTFMEJERESSYhghIiIiSf0/C7QoufiuxSIAAAAASUVORK5CYII=\n" + }, + "metadata": {} + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "We see that compared to the prior, the posterior has very small variance $(=0.9)$ meaning more accuracy and we see that the posterior has a mean $(=5.9)$ which is very close to $\\bar{x}=6$. Thus we can say with more certainty that that value we receive is closer to $6$." + ], + "metadata": { + "id": "xiVBxQO9LB0b" + } + }, + { + "cell_type": "markdown", + "source": [ + "#### __Part (c)__\n", + "\n", + "$a=\\frac{1}{\\sigma_{prior}^2}$\n", + "\n", + "$b=\\frac{n}{\\sigma^2}$\n", + "\n", + "$\\mu_{post}=\\frac{{a\\mu_{prior}+b\\bar{x}}}{a+b}$\n", + "\n", + "$\\sigma_{post}^2=\\frac{1}{a+b}$\n", + "\n", + "If there are more signals being received then $n$ increases. If $n$ increases, so does $b$. Clearly, then $\\sigma_{post}^2$ decreases, meaning less variance and more accuracy.\n", + "We can say about $\\mu_{post}$ that the weightage of $\\bar{x}$ increases as $b$ increases however no change occurs in $a$ or $\\mu_{prior}$. So, there is just more weightage of $\\bar{x}$ in the value of posterior mean." + ], + "metadata": { + "id": "uBe6gIGoMAaO" + } + }, + { + "cell_type": "markdown", + "source": [ + "#### __Part (d)__\n", + "\n", + "We have been given that IQ follows a distribution $N(100,152)$, so this is the prior that we already have with us, where $\\theta$ is the unknown value of the true IQ such that,\n", + "\n", + "$ f(\\theta)\\sim N(100,152)$\n", + "\n", + "Next, if a person is tested multiple times, their measured IQ (say $x$) differs from their true IQ ($\\theta$) according to $N(0,102)$. As the mean is 0, the difference in the mean of the measured IQ and the true IQ (which is the true IQ itself which is no more unknown to us) is 0, or the measured IQ has a mean equal to the true value itself,\n", + "So, the likelihood of some value of IQ given some true value of IQ $\\theta$,\n", + "\n", + "$f(x|\\theta) \\sim N(\\theta,102)$\n", + "\n", + "$ (i)$ Randall Vard scored 80, so the measured IQ is 80. We can say that the expected value of his true IQ will be obtained from the posterior, the probability of his true IQ given his measured IQ. So, to get the expected value (or mean), we use,\n", + "\n", + "$a=\\frac{1}{\\sigma_{prior}^2}$\n", + "\n", + "$b=\\frac{n}{\\sigma^2}$\n", + "\n", + "$\\mu_{post}=\\frac{{a\\mu_{prior}+b\\bar{x}}}{a+b}$\n", + "\n", + "Where,\n", + "\n", + "$\\sigma_{prior}^2=152$\n", + "\n", + "$\\mu_{prior}=100$\n", + "\n", + "$n=1$ and\n", + "\n", + "$\\sigma^2=102$ (known variance)\n", + "\n", + "Using these,\n", + "\n", + "$a=\\frac{1}{152}$\n", + "\n", + "$b=\\frac{1}{102}$\n", + "\n", + "Here, $\\bar{x}=x=80$ thus,\n", + "\n", + "$\\mu_{post}= \\frac{\\frac{1}{152}.100+\\frac{1}{102}.80}{\\frac{1}{152}+\\frac{1}{102}} = 88.031$\n", + "\n", + "$(ii)$ Here, for Mary I. Taft, $\\bar{x}=x=150$ thus,\n", + "\n", + "$\\mu_{post}= \\frac{\\frac{1}{152}.100+\\frac{1}{102}.150}{\\frac{1}{152}+\\frac{1}{102}} = 129.914$\n" + ], + "metadata": { + "id": "el5ZV7bdPr64" + } + }, + { + "cell_type": "markdown", + "source": [ + "###__Question 3__\n", + "We already know how we can use MLE to estimate the unknown mean and variance of a given dataset that follows gaussian distribution.\n", + "\n", + "So, let $\\theta$ denote the set of unknown parameters $\\mu$ and $\\sigma$,\n", + "\n", + "$\\theta = \\{\\mu,\\sigma\\}$\n", + "\n", + "The goal of MLE is the maximize the probability (actually probability density since the data is continuous) of the data given $\\theta$. Hence we want to maximise the following function -\n", + "\n", + "$f\\{x_1,x_2....x_n|\\theta\\}$\n", + "\n", + "where, of course,\n", + "\n", + "$f(x_i|\\theta)= \\frac{1}{\\sqrt{2\\pi\\sigma}}e^{\\frac{{-(x_i-\\mu)^2}}{2\\sigma^2}}$\n", + "\n", + "Thus we aim the find the optimal $\\theta$ for which $f$ is maximised,\n", + "\n", + "$\\hat{\\theta}_{MLE}=argmax_{\\theta}\\prod\\limits_{i}^{n}f(x_i|\\theta)$\n", + "\n", + "To simplify calculation (taking derivative to maximise $f$), we use logarithmic function. Then by simple algebra, our equatio above changes to,\n", + "\n", + "$\\hat{\\theta}_{MLE}=argmax_{\\theta}\\sum_\\limits{i}^{n}ln(f(x_i|\\theta))$\n", + "\n", + "From MAN-006, we have already calculated the parameters estimated by MLE by taking the derivative of the log of the probability density f with respect to each unknown parameter and equating that to zero, and they turn out to be,\n", + "\n", + "$\\hat{\\mu}_{MLE}=\\frac{1}{n}\\sum\\limits_{i}^{n}x_i$\n", + "\n", + "Which is simply the sample mean, and,\n", + "\n", + "$\\hat{\\sigma}^2_{MLE}=\\frac{1}{n}\\sum\\limits_{i}^{n}(x_i-\\mu)^2$\n", + "\n", + "Which is the sample variance.\n", + "\n", + "Now, our next goal is to use python to estimate this. So, all we should have to do is give a dataset that follows gaussian distibution, define the Maximum likelihood function (the logarithmic form) and then maximize it using python to obtain the required estimates.\n", + "\n", + "A small research on the internet revealed that the easiest way to maximise the function would be using Scipy. Instead of trying to maximize the log-likelihood funciton, the standard way seems to be to minimize the negative log-likelihood function." + ], + "metadata": { + "id": "Nrayn0lxLFd2" + } + }, + { + "cell_type": "code", + "source": [ + "# first let's generate a dataset following a gaussian distribution using numpy\n", + "import numpy as np\n", + "from scipy.optimize import minimize\n", + "from scipy.stats import norm\n", + "\n", + "truemean = 100\n", + "truevariance=152\n", + "truesd=np.sqrt(truevariance)\n", + "n=1000\n", + "dataset = np.random.normal(truemean,truesd, size=n)\n", + "print(\"True Mean:\", truemean)\n", + "print(\"True Variance:\",truevariance )\n", + "\n", + "#print(dataset)\n", + "\n", + "# now we must define the negative log-likelihood function\n", + "\"\"\"\n", + "def likelihood(parameters,dataset):\n", + " mean = parameters[0]\n", + " sd=parameters[1]\n", + " sum=0\n", + " for i in dataset:\n", + " data=i\n", + " pdf = norm.pdf(data , loc = mean , scale = sd )\n", + " LL=np.log(pdf)\n", + " sum+=LL\n", + " negativeLL= -1*sum\n", + " return negativeLL\n", + "\"\"\"\n", + "\n", + "def likelihood(parameters, dataset):\n", + " mean, variance = parameters\n", + " sum=0\n", + " N = len(dataset)\n", + " for i in dataset:\n", + " sum+=(i-mean)**2\n", + " LL = - N/2 * np.log(variance) - 1/(2*variance) * sum\n", + " return -LL\n", + "\n", + "# Scipy optimization algorithms require a intial parameter value, closer to the true value, faster the optimization algorithm\n", + "# we will set the initial parameters to the standard normal distribution values since we don't have a prior\n", + "initialparameters=[0,1]\n", + "\n", + "# now we can use Scipy to minimize the negative log-likelihood function\n", + "result= minimize(likelihood, initialparameters, args=(dataset,),method='L-BFGS-B')\n", + "estimatedmean,estimatedsd=result.x\n", + "print(\"Estimated Mean:\", estimatedmean)\n", + "print(\"Estimated Standard deviation:\",estimatedsd )\n", + "\n", + "# at first I was using scipy to define the normal pdf but....\n", + "# this was not working on high values of mean and standard deviation (bigger values are giving RuntimeWarning: divide by zero encountered in log )\n", + "# I tried searching about this error a lot but didn't find anything very helpful then I switched to defining log-likelihood function from scratch and it worked!\n", + "# further it important to specify the method of optimization as L-BFGS-B otherwise we keep getting \"invalid value encountered in log\" error. No idea what's happening!\n", + "# L-BFGS-B is a variation of the standard BFGS algorithm which is used when we have limited memory and our variables have some bound, its also much faster\n" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "M5Z7s0BXWVOf", + "outputId": "089816a0-7b42-43fb-8b10-c5309fd03eb4" + }, + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "True Mean: 100\n", + "True Variance: 152\n", + "Estimated Mean: 99.46425688255611\n", + "Estimated Standard deviation: 167.37918914364832\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "###__Question 4__\n", + "\n", + "Firstly, let's break down the problem and understand what is asked of us.\n", + "\n", + "So, we have a dataset of binary classification, which means our data has outcomes 0 or 1 only. We want to use logistic regression model for the classification and we're been given that the model parameters (conventionally taken as __w__) has a gaussian prior.\n", + "\n", + "Now, we want to formulate the likelihood, the prior and the posterior and then find the MAP estimate of the model parameters.\n", + "\n", + "After a bit of revision, we recall that in logistic regression we use the logistic function to define the likelihood function such that it is exactly of the form,\n", + "\n", + "$P(y|\\textbf{x})=\\frac{1}{1+e^{(\\textbf{w}^{\\textbf{T}}\\textbf{x}+b)}}$\n", + "\n", + "Where, $\\textbf{w}$ is the vector of parameters.\n", + "\n", + "We might make assumptions on the posterior, that is,\n", + "\n", + "$P(x|y)$ but it doesn't matter since here we'll be directly using MAP to estimate the parameters.\n", + "\n", + "In MAP, our goal is to maximise the posterior (unlike MLE where we maximise the likelihood function). As we already know,\n", + "\n", + "$P(\\textbf{w}|y)=P(y|\\textbf{x},\\textbf{w}).P(\\textbf{w})$\n", + "\n", + "Then, we require the value of $\\textbf{w}$ such that the posterior is maximized.\n", + "\n", + "$argmax_{\\textbf{w}}[P(y|\\textbf{w}).P(\\textbf{w})]=argmax_{\\textbf{w}}\\{[ln[P(y|\\textbf{w})]+ ln[P(\\textbf{w})]\\}=argmin_{\\textbf{w}}\\{-[ln[P(y|\\textbf{w})]- ln[P(\\textbf{w})]\\}$\n", + "\n", + "Now, we have a binary classification situation, so we have to utilise the fact that Y can only take values 0 and 1. So, only one of the following two terms is going to be non-zero for any given Y,\n", + "\n", + "$P(Y=1|X)=\\frac{e^{(\\textbf{w}^{\\textbf{T}}\\textbf{x}+b)}}{1+e^{(\\textbf{w}^{\\textbf{T}}\\textbf{x}+b)}}$\n", + "\n", + "And,\n", + "\n", + "$P(Y=0|X)=\\frac{1}{1+e^{(\\textbf{w}^{\\textbf{T}}\\textbf{x}+b)}}$\n", + "\n", + "So now our goal becomes,\n", + "\n", + "$\\hat{\\textbf{w}}_{MAP}=argmax_{\\textbf{w}}\\{[ln[\\prod\\limits_{1}^{n}P(y_i|\\textbf{w})]+ ln[P(\\textbf{w})]\\}$\n", + "$=argmax_{\\textbf{w}}\\{[\\sum\\limits_{1}^{n}ln[P(y_i|\\textbf{w})]+ ln[P(\\textbf{w})]\\}$\n", + "\n", + "Now, according to the above utilisation, we can rewrite $ln[P(y_i|\\textbf{w})]$ as,\n", + "\n", + "$=\\sum\\limits_{1}^{n}Y_i.ln[P(Y_i=1|X_i,\\textbf{w})]+(1-Y_i).ln[P(Y_i=0|X_i,\\textbf{w})]$\n", + "\n", + "$=\\sum\\limits_{1}^{n}Y_i.ln[\\frac{P(Y_i=1|X_i,\\textbf{w})}{P(Y_i=0|X_i,\\textbf{w})}]+ln[P(Y_i=0|X_i,\\textbf{w})]$\n", + "\n", + "$=\\sum\\limits_{1}^{n}\\{Y_i.(\\textbf{w}^{\\textbf{T}}\\textbf{x}_i+b)-ln(1+e^{(\\textbf{w}^{\\textbf{T}}\\textbf{x}_i+b)})\\}$\n", + "\n", + "Now, let's come to the fact that our model paramters have a gaussian prior. We will pick the simplest and most convenient zero-mean gaussian with a known variance $\\sigma$ for every parameter.\n", + "\n", + "$w_j \\sim N(0,\\sigma^2)$ such that $P(\\textbf{w})=\\prod\\limits_{1}^{n}\\frac{1}{\\sigma\\sqrt{2\\pi}}e^{\\{\\frac{{-w_j}^2}{2\\sigma^2}\\}}$\n", + "\n", + "Then, (excluding terms not containing $w_j$,\n", + "\n", + "$ln[P(\\textbf{w})]=-\\frac{1}{2\\sigma^2}\\sum\\limits_{1}^{f}{\\textbf{w}_j}^2$\n", + "\n", + "Putting everything together, our goal is,\n", + "\n", + "$argmin_{\\textbf{w}}\\{-[ln[P(y|\\textbf{w})]- ln[P(\\textbf{w})]\\}=\\frac{1}{2\\sigma^2}\\sum\\limits_{1}^{f}{\\textbf{w}_j}^2 - \\sum\\limits_{1}^{n}\\{Y_i.(\\textbf{w}^\\textbf{T}.x_i+b)-ln(1+e^{(\\textbf{w}^{\\textbf{T}}\\textbf{x}_i+b)})\\}$\n", + "\n", + "So far, I believe we can do the same sort of thing we've done in the previous question - define each of the functions and finally use Scipy to minimise this poeterior.\n", + "\n", + "We'll have to specify the prior ourselves, so for the sake of simplicity, we can absorb the parameter $b$ into $w$ through an additional constant dimension in $w$ (read in some papers).\n", + "\n", + "So, $w$ has multivariate gaussian prior and once again, for simplicity, let's say it has just two dimensions (we'll also include $b$ so the final dimension of $w$ will be. Our goal now looks like minimising the following function,\n", + "\n", + "$=\\sum\\limits_{1}^{f}\\frac{1}{2\\sigma^2}{\\textbf{w}_j}^2 -\\sum\\limits_{1}^{n}\\{Y_i.(\\textbf{w}^{\\textbf{T}}\\textbf{x}_i)-ln(1+e^{(\\textbf{w}^{\\textbf{T}}\\textbf{x}_i)})\\}$\n", + "\n", + "So, let us begin." + ], + "metadata": { + "id": "ugh_Avb9wn9f" + } + }, + { + "cell_type": "code", + "source": [ + "import numpy as np\n", + "from sklearn.datasets import make_classification\n", + "from scipy.optimize import minimize\n", + "\"\"\"\n", + "# first we'll have to generate a dataset, let size be n and number of features (dimension of w) be f. Here we have just one feature\n", + "n= 1000\n", + "f= 2+1\n", + "x, y = make_classification(n_samples=n,n_features=f,n_redundant=0,n_informative=3,n_repeated=0, random_state=444)\n", + "#print(x.shape)\n", + "#print(y)\n", + "\n", + "# now for w, it's up to us to choose the variance, so let's assume the following\n", + "pmean=0\n", + "pvar=5\n", + "\n", + "def logprior(w,variance):\n", + " logprior = (-1/(2*variance)) * np.sum(np.square(w))\n", + " return logprior\n", + "\"\"\"\n", + "# first we'll have to generate a dataset, let size be n and number of features (dimension of w -1) be f.\n", + "n = 1000\n", + "f = 2\n", + "priormean=0 # for every parameter\n", + "priormeanvector = np.zeros(f + 1) # absorbing parameter b\n", + "priorvariance=5 # for every parameter and they are independent of each other\n", + "priorcovariance = priorvariance * np.eye(f + 1) # absorbing parameter b, covariances are all 0, variance of each is equal to priorvariance\n", + "\n", + "truew = np.random.multivariate_normal(priormeanvector, priorcovariance)\n", + "print(\"True parameters used for data generation :\",truew)\n", + "\n", + "x = np.hstack((np.ones((n, 1)), np.random.uniform(-1, 1, size=(n, f)))) # bias is fixed at 1, generating random features (n of them, each is a vector with 3 elements, first is 1)\n", + "z = np.dot(x, truew)\n", + "p = 1 / (1 + np.exp(-z)) # y=1 has this probability\n", + "y = np.random.binomial(1, p)\n", + "\n", + "def logprior(w,priormean,priorvariance):\n", + " LP = (-1/(2*priorvariance)) * np.sum(np.square(w-priormean))\n", + " return LP\n", + "\n", + "def loglikelihood(w, x, y):\n", + " LL=0\n", + " count=0\n", + " for i in range(n):\n", + " z = np.dot(x[i], w)\n", + " LL+=y[i]*(x[i].dot(w.T))-np.log(1+np.exp(z))\n", + " return LL\n", + "\n", + "\n", + "def neglogposterior(w,priormean,priorvariance,x,y):\n", + " return -logprior(w,priormean,priorvariance)-loglikelihood(w,x,y)\n", + "\n", + "# like before, we need initial parameters\n", + "# print(x.shape)\n", + "initialparameters_w=np.array([2,2,2]) # w is a vector after all\n", + "\n", + "result = minimize(neglogposterior, initialparameters_w, args=(priormean,priorvariance,x,y), method='L-BFGS-B')\n", + "westimate=result.x\n", + "print(\"MAP Estimated parameters: \",westimate) # it's pretty close\n", + "\n" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "PB60-rFGx5Mi", + "outputId": "db68ff27-3cdf-4253-f0c9-73a31fbdb19e" + }, + "execution_count": 24, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "True parameters used for data generation : [ 1.20033799 -2.79411209 3.2057022 ]\n", + "MAP Estimated parameters: [ 1.24238212 -3.12122653 3.15600055]\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "### __Question 5__\n", + "\n", + "We have been asked to find the VC dimension of some concept classes. To make sure I understood this right, VC dimension for a concept class is simply the maximum number of points that the concept class can create a sort of seperation (shatter) for (given each point belongs to one of two classes and taking all $2^n$ possibilites in mind) such that they're classified correctly in their regions and an important thing to note is that all we need is ONE configuration of these points (and the $2^n$ possibilities of class allocation) which can be shattered by the concept class in order to define the VC dimension for it. Meaning, we don't need every configuration (different locations of points, but any one) to be able to define the VC dimension. Also, I haven't gone through the math intensive article yet.\n", + "\n", + "#### __(a) Constant Function__\n", + "Meaning this is a function that can take any constant value. So, from my understanding, given a set of points which are to be classified into two classes, a constant function can only take either \"positive\" or \"negative\" for all the given points. So, if we had 10 points, we could only assign either positive or negative to all the 10 points. If this understanding is correct, then this concept class can only classify 1 point correctly. If we had 2 points then our possibilities (despite their configuration) become,\n", + "\n", + "1. +,-\n", + "\n", + "2. -,+\n", + "\n", + "3. +,+\n", + "\n", + "4. -,-\n", + "\n", + "And in such a case, assigning only positive or only negative value to both points doesn't satisfy all 4 cases (but only one at a time, either case 3 or case 4). So, the VC dimension should be 1.\n", + "\n", + "#### __(b) Linear Function in d dimensions__\n", + "A linear function would generate a line in 2 dimensions, a plane in 3 dimensions and so on. From the reading, a linear classifier has cardinality n+1 (given n is the dimension?). Which makes sense for 2 dimensions as the VC dimension there is 3. Again, I have yet to see the mathematical proof for this one. The VC dimension should be d+1.\n", + "\n", + "#### __(c) Axis aligned rectangle in 2 dimensions__\n", + "Here, we are being restricted by \"axis aligned\". If we have 1,2 or 3 points in space, we can just enclose the points of one class within a large enough axis aligned rectangle. If we had 4 points, we can imagine a configuration where two points are lying sort of diagonally to each other but are close enough in any one dimension to allow us to put them in a narrow axis aligned rectangle. So, we'd still be able to enclose 4 points. In case of 5 points however, even if we have 3 points close together, a different combination of classes would cause difficulty like if we had the centre point of one class (+), the two side points of another class (-) and the other two outward points also of the same class as the centre point (+), then a 2-D axis aligned rectangle can't shatter these points. No other configuration works either. So, VC Dimension has to be 4.\n", + "\n", + "#### __(d) Intervals__\n", + "Intervals as in some interval on an infinite line. Now, if we have 3 points, they'd obviously be collinear and then the situation where a negative(or positive) point lies between two positive (or negative) points, would make it impossible to define an interval to shatter them. So, VC dimension can be 2.\n", + "\n", + "\n" + ], + "metadata": { + "id": "ByZ8ATC1re9E" + } + } + ] +} \ No newline at end of file diff --git a/assMath/probStat/Anupriya/Assignment1_Anupriya.pdf b/assMath/probStat/Anupriya/Assignment1_Anupriya.pdf new file mode 100644 index 00000000..7c9f74a4 Binary files /dev/null and b/assMath/probStat/Anupriya/Assignment1_Anupriya.pdf differ diff --git a/assMath/probStat/Anupriya/Assignment1_Anupriya.pdf:Zone.Identifier b/assMath/probStat/Anupriya/Assignment1_Anupriya.pdf:Zone.Identifier new file mode 100644 index 00000000..053d1127 --- /dev/null +++ b/assMath/probStat/Anupriya/Assignment1_Anupriya.pdf:Zone.Identifier @@ -0,0 +1,3 @@ +[ZoneTransfer] +ZoneId=3 +HostUrl=about:internet diff --git a/assMath/probStat/Anupriya/kalmanfilter/README.md b/assMath/probStat/Anupriya/kalmanfilter/README.md new file mode 100644 index 00000000..f0e2c165 --- /dev/null +++ b/assMath/probStat/Anupriya/kalmanfilter/README.md @@ -0,0 +1,13 @@ +# Assignment +Make changes in deep_sort/kalman_filter_assignment.py +Run test.py code to test your code +Don't try to search for orignal code, The assignment is to get you more familliar with using statistics in real life problem setting, and it's for your own benifit. + +## Dependencies + +The code is compatible with Python 2.7 and 3. The following dependencies are +needed to run the tracker: + +* NumPy +* sklearn +* OpenCV diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/__init__.py b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/__init__.py new file mode 100644 index 00000000..f708a9b2 --- /dev/null +++ b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/__init__.py @@ -0,0 +1 @@ +__version__ = "1.3.2" diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/__pycache__/__init__.cpython-38.pyc b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 00000000..be38ef1b Binary files /dev/null and b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/__pycache__/__init__.cpython-38.pyc differ diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/__pycache__/__init__.cpython-39.pyc b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..7b79e4f5 Binary files /dev/null and b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/__pycache__/__init__.cpython-39.pyc differ diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/__pycache__/deepsort_tracker.cpython-38.pyc b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/__pycache__/deepsort_tracker.cpython-38.pyc new file mode 100644 index 00000000..46d6f5f5 Binary files /dev/null and b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/__pycache__/deepsort_tracker.cpython-38.pyc differ diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/__pycache__/deepsort_tracker.cpython-39.pyc b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/__pycache__/deepsort_tracker.cpython-39.pyc new file mode 100644 index 00000000..3e912377 Binary files /dev/null and b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/__pycache__/deepsort_tracker.cpython-39.pyc differ diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/.ipynb_checkpoints/kalman_filter-checkpoint.py b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/.ipynb_checkpoints/kalman_filter-checkpoint.py new file mode 100644 index 00000000..7741d00a --- /dev/null +++ b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/.ipynb_checkpoints/kalman_filter-checkpoint.py @@ -0,0 +1,245 @@ +# vim: expandtab:ts=4:sw=4 +import numpy as np +import scipy.linalg + + +""" +Table for the 0.95 quantile of the chi-square distribution with N degrees of +freedom (contains values for N=1, ..., 9). Taken from MATLAB/Octave's chi2inv +function and used as Mahalanobis gating threshold. +""" +### Don't change +chi2inv95 = { + 1: 3.8415, + 2: 5.9915, + 3: 7.8147, + 4: 9.4877, + 5: 11.070, + 6: 12.592, + 7: 14.067, + 8: 15.507, + 9: 16.919, +} + + +class KalmanFilter(object): + """ + A simple Kalman filter for tracking bounding boxes in image space. + + The 8-dimensional state space + + x, y, a, h, vx, vy, va, vh + + contains the bounding box center position (x, y), aspect ratio a, height h, + and their respective velocities. + + Object motion follows a constant velocity model. The bounding box location + (x, y, a, h) is taken as direct observation of the state space (linear + observation model). + + """ + + def __init__(self): + dt=1.0 + + # Create Kalman filter model matrices. + ##ADD code + # Motion and observation uncertainty are chosen relative to the current + # state estimate. These weights control the amount of uncertainty in + # the model. This is a bit hacky. +##ADD code + # for state extrapolation equation + # state transition matrix + self.F = np.array([[1, 0, 0, 0, dt, 0, 0, 0], + [0, 1, 0, 0, 0, dt, 0, 0], + [0, 0, 1, 0, 0, 0, dt, 0], + [0, 0, 0, 1, 0, 0, 0, dt], + [0, 0, 0, 0, 1, 0, 0, 0], + [0, 0, 0, 0, 0, 1, 0, 0], + [0, 0, 0, 0, 0, 0, 1, 0], + [0, 0, 0, 0, 0, 0, 0, 1]]) + # no control matrix since this is a constant velocity model + # measurement matrix intitialised as 4 x 4 identity matrix for measuring x,y,a,h + self.H = np.array([[1, 0, 0, 0], + [0, 1, 0, 0], + [0, 0, 1, 0], + [0, 0, 0, 1]]) + # for covariance extrapolation equation + # Process noise covariance + self.Q = np.array([[1, 0, 0, 0, 0, 0, 0, 0], + [0, 1, 0, 0, 0, 0, 0, 0], + [0, 0, 1, 0, 0, 0, 0, 0], + [0, 0, 0, 1, 0, 0, 0, 0], + [0, 0, 0, 0, 1, 0, 0, 0], + [0, 0, 0, 0, 0, 1, 0, 0], + [0, 0, 0, 0, 0, 0, 1, 0], + [0, 0, 0, 0, 0, 0, 0, 1]]) + # Measurement noise covariance + self.R = np.array([[1, 0, 0, 0], + [0, 1, 0, 0], + [0, 0, 1, 0], + [0, 0, 0, 1]]) + + def initiate(self, measurement): + """Create track from unassociated measurement. + + Parameters + ---------- + measurement : ndarray + Bounding box coordinates (x, y, a, h) with center position (x, y), + aspect ratio a, and height h. + + Returns + ------- + (ndarray, ndarray) + Returns the mean vector (8 dimensional) and covariance matrix (8x8 + dimensional) of the new track. Unobserved velocities are initialized + to 0 mean. + + """ +##ADD code + # intially the x,y,a,h are observed and equal to measurement + # unobserved velocities have 0 mean (last 4 entries) + mean=np.array([0,0,0,0,0,0,0,0]) + mean[:4]=measurement + covariance =np.array([[1, 0, 0, 0, 0, 0, 0, 0], + [0, 1, 0, 0, 0, 0, 0, 0], + [0, 0, 1, 0, 0, 0, 0, 0], + [0, 0, 0, 1, 0, 0, 0, 0], + [0, 0, 0, 0, 1, 0, 0, 0], + [0, 0, 0, 0, 0, 1, 0, 0], + [0, 0, 0, 0, 0, 0, 1, 0], + [0, 0, 0, 0, 0, 0, 0, 1]]) + return mean, covariance + + def predict(self, mean, covariance): + """Run Kalman filter prediction step. + + Parameters + ---------- + mean : ndarray + The 8 dimensional mean vector of the object state at the previous + time step. + covariance : ndarray + The 8x8 dimensional covariance matrix of the object state at the + previous time step. + + Returns + ------- + (ndarray, ndarray) + Returns the mean vector and covariance matrix of the predicted + state. Unobserved velocities are initialized to 0 mean. + + """ +##ADD code + # basically the prediction equation, here state extrapolation -> x(n+1)= F.x(n) and x(n) is the mean so + mean=self.F.dot(mean) + # and now covariance extrapolation -> P(n+1)= F.P(n).F(transpose) + Q + covariance = self.F.dot(covariance).dot(self.F.T)+ self.Q + return mean, covariance + + def project(self, mean, covariance): + """Project state distribution to measurement space. + + Parameters + ---------- + mean : ndarray + The state's mean vector (8 dimensional array). + covariance : ndarray + The state's covariance matrix (8x8 dimensional). + + Returns + ------- + (ndarray, ndarray) + Returns the projected mean and covariance matrix of the given state + estimate. + + """ +##ADD code + # measurement equations? + mean=self.H.dot(mean) + covariance=self.H.dot(covariance).dot(self.H.T)+self.R + return mean, covariance + + def update(self, mean, covariance, measurement): + """Run Kalman filter correction step. + + Parameters + ---------- + mean : ndarray + The predicted state's mean vector (8 dimensional). + covariance : ndarray + The state's covariance matrix (8x8 dimensional). + measurement : ndarray + The 4 dimensional measurement vector (x, y, a, h), where (x, y) + is the center position, a the aspect ratio, and h the height of the + bounding box. + + Returns + ------- + (ndarray, ndarray) + Returns the measurement-corrected state distribution. + + """ +##ADD code + # update equation -> x(n,n)=x(n,n-1) + K(n)(z(n)-H.x(n,n-1)) + # but we need K(n), A 4x4 matrix + # Kalman gain equation -> K(n) = P(n,n-1).H(transpose).(H.P(n,n-1).H(transpose)+R)(inverse) + + Kalman_gain= covariance.dot(self.H.T).dot(np.linalg,inv(self.H.dot(covariance).dot(self.H.T)+self.R)) + + innovation=measurement - np.dot(self.H,mean) # as it is conventionally called + + new_mean= mean+ np.dot(Kalman_gain,innovation) + + # covariance update equation (longer form, cause shorter form is "numerically unstable" according to text) -> + # P(n,n)= (I-H.K(n)).P(n,n-1).(I-H.K(n))(transpose) + K(n).R.K(n)(transpose) + + newterm= np.eye(4)-np.dot(self.H,Kalman_gain) + + new_covariance=newterm.dot(covariance).dot(newterm.T) + Kalman_gain.dot(self.R).dot(Kalman_gain.T) + + return new_mean, new_covariance + + def gating_distance(self, mean, covariance, measurements, only_position=False): + """Compute gating distance between state distribution and measurements. + + A suitable distance threshold can be obtained from `chi2inv95`. If + `only_position` is False, the chi-square distribution has 4 degrees of + freedom, otherwise 2. + + Parameters + ---------- + mean : ndarray + Mean vector over the state distribution (8 dimensional). + covariance : ndarray + Covariance of the state distribution (8x8 dimensional). + measurements : ndarray + An Nx4 dimensional matrix of N measurements, each in + format (x, y, a, h) where (x, y) is the bounding box center + position, a the aspect ratio, and h the height. + only_position : Optional[bool] + If True, distance computation is done with respect to the bounding + box center position only. + + Returns + ------- + ndarray + Returns an array of length N, where the i-th element contains the + squared Mahalanobis distance between (mean, covariance) and + `measurements[i]`. + + """ + ### Don't change anything + mean, covariance = self.project(mean, covariance) + if only_position: + mean, covariance = mean[:2], covariance[:2, :2] + measurements = measurements[:, :2] + + cholesky_factor = np.linalg.cholesky(covariance) + d = measurements - mean + z = scipy.linalg.solve_triangular( + cholesky_factor, d.T, lower=True, check_finite=False, overwrite_b=True + ) + squared_maha = np.sum(z * z, axis=0) + return squared_maha diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__init__.py b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__init__.py new file mode 100644 index 00000000..43e08fb8 --- /dev/null +++ b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__init__.py @@ -0,0 +1 @@ +# vim: expandtab:ts=4:sw=4 diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/__init__.cpython-38.pyc b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 00000000..0b539e3b Binary files /dev/null and b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/__init__.cpython-38.pyc differ diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/__init__.cpython-39.pyc b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..763fa6cd Binary files /dev/null and b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/__init__.cpython-39.pyc differ diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/detection.cpython-38.pyc b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/detection.cpython-38.pyc new file mode 100644 index 00000000..d581b3d3 Binary files /dev/null and b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/detection.cpython-38.pyc differ diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/detection.cpython-39.pyc b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/detection.cpython-39.pyc new file mode 100644 index 00000000..47cf7d13 Binary files /dev/null and b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/detection.cpython-39.pyc differ diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/iou_matching.cpython-38.pyc b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/iou_matching.cpython-38.pyc new file mode 100644 index 00000000..fd6c8b67 Binary files /dev/null and b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/iou_matching.cpython-38.pyc differ diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/iou_matching.cpython-39.pyc b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/iou_matching.cpython-39.pyc new file mode 100644 index 00000000..aa1759b4 Binary files /dev/null and b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/iou_matching.cpython-39.pyc differ diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/kalman_filter.cpython-38.pyc b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/kalman_filter.cpython-38.pyc new file mode 100644 index 00000000..bd5aa79c Binary files /dev/null and b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/kalman_filter.cpython-38.pyc differ diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/kalman_filter.cpython-39.pyc b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/kalman_filter.cpython-39.pyc new file mode 100644 index 00000000..a4bf57fd Binary files /dev/null and b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/kalman_filter.cpython-39.pyc differ diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/linear_assignment.cpython-38.pyc b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/linear_assignment.cpython-38.pyc new file mode 100644 index 00000000..c19b0f97 Binary files /dev/null and b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/linear_assignment.cpython-38.pyc differ diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/linear_assignment.cpython-39.pyc b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/linear_assignment.cpython-39.pyc new file mode 100644 index 00000000..10fa3c9b Binary files /dev/null and b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/linear_assignment.cpython-39.pyc differ diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/nn_matching.cpython-38.pyc b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/nn_matching.cpython-38.pyc new file mode 100644 index 00000000..7c209fd1 Binary files /dev/null and b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/nn_matching.cpython-38.pyc differ diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/nn_matching.cpython-39.pyc b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/nn_matching.cpython-39.pyc new file mode 100644 index 00000000..424d4863 Binary files /dev/null and b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/nn_matching.cpython-39.pyc differ diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/track.cpython-38.pyc b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/track.cpython-38.pyc new file mode 100644 index 00000000..3f0ab52e Binary files /dev/null and b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/track.cpython-38.pyc differ diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/track.cpython-39.pyc b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/track.cpython-39.pyc new file mode 100644 index 00000000..b08ecbb0 Binary files /dev/null and b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/track.cpython-39.pyc differ diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/tracker.cpython-38.pyc b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/tracker.cpython-38.pyc new file mode 100644 index 00000000..a33c461c Binary files /dev/null and b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/tracker.cpython-38.pyc differ diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/tracker.cpython-39.pyc b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/tracker.cpython-39.pyc new file mode 100644 index 00000000..519dcc76 Binary files /dev/null and b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/__pycache__/tracker.cpython-39.pyc differ diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/detection.py b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/detection.py new file mode 100644 index 00000000..30c0a52e --- /dev/null +++ b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/detection.py @@ -0,0 +1,62 @@ +# vim: expandtab:ts=4:sw=4 +import numpy as np + + +class Detection(object): + """ + This class represents a bounding box detection in a single image. + + Parameters + ---------- + ltwh : array_like + Bounding box in format `(x, y, w, h)`. + confidence : float + Detector confidence score. + feature : array_like + A feature vector that describes the object contained in this image. + class_name : Optional str + Detector predicted class name. + instance_mask : Optional + Instance mask corresponding to bounding box + others : Optional any + Other supplementary fields associated with detection that wants to be stored as a "memory" to be retrieve through the track downstream. + + Attributes + ---------- + ltwh : ndarray + Bounding box in format `(top left x, top left y, width, height)`. + confidence : ndarray + Detector confidence score. + feature : ndarray | NoneType + A feature vector that describes the object contained in this image. + + """ + + def __init__(self, ltwh, confidence, feature, class_name=None, instance_mask=None, others=None): + # def __init__(self, ltwh, feature): + self.ltwh = np.asarray(ltwh, dtype=np.float32) + self.confidence = float(confidence) + self.feature = np.asarray(feature, dtype=np.float32) + self.class_name = class_name + self.instance_mask = instance_mask + self.others = others + + def get_ltwh(self): + return self.ltwh.copy() + + def to_tlbr(self): + """Convert bounding box to format `(min x, min y, max x, max y)`, i.e., + `(top left, bottom right)`. + """ + ret = self.ltwh.copy() + ret[2:] += ret[:2] + return ret + + def to_xyah(self): + """Convert bounding box to format `(center x, center y, aspect ratio, + height)`, where the aspect ratio is `width / height`. + """ + ret = self.ltwh.copy() + ret[:2] += ret[2:] / 2 + ret[2] /= ret[3] + return ret diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/iou_matching.py b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/iou_matching.py new file mode 100644 index 00000000..f0bf802b --- /dev/null +++ b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/iou_matching.py @@ -0,0 +1,85 @@ +# vim: expandtab:ts=4:sw=4 +from __future__ import absolute_import +import numpy as np +from . import linear_assignment + + +def iou(bbox, candidates): + """Computer intersection over union. + + Parameters + ---------- + bbox : ndarray + A bounding box in format `(top left x, top left y, width, height)`. + candidates : ndarray + A matrix of candidate bounding boxes (one per row) in the same format + as `bbox`. + + Returns + ------- + ndarray + The intersection over union in [0, 1] between the `bbox` and each + candidate. A higher score means a larger fraction of the `bbox` is + occluded by the candidate. + + """ + bbox_tl, bbox_br = bbox[:2], bbox[:2] + bbox[2:] + candidates_tl = candidates[:, :2] + candidates_br = candidates[:, :2] + candidates[:, 2:] + + tl = np.c_[ + np.maximum(bbox_tl[0], candidates_tl[:, 0])[:, np.newaxis], + np.maximum(bbox_tl[1], candidates_tl[:, 1])[:, np.newaxis], + ] + br = np.c_[ + np.minimum(bbox_br[0], candidates_br[:, 0])[:, np.newaxis], + np.minimum(bbox_br[1], candidates_br[:, 1])[:, np.newaxis], + ] + wh = np.maximum(0.0, br - tl) + + area_intersection = wh.prod(axis=1) + area_bbox = bbox[2:].prod() + area_candidates = candidates[:, 2:].prod(axis=1) + return area_intersection / (area_bbox + area_candidates - area_intersection) + + +def iou_cost(tracks, detections, track_indices=None, detection_indices=None): + """An intersection over union distance metric. + + Parameters + ---------- + tracks : List[deep_sort.track.Track] + A list of tracks. + detections : List[deep_sort.detection.Detection] + A list of detections. + track_indices : Optional[List[int]] + A list of indices to tracks that should be matched. Defaults to + all `tracks`. + detection_indices : Optional[List[int]] + A list of indices to detections that should be matched. Defaults + to all `detections`. + + Returns + ------- + ndarray + Returns a cost matrix of shape + len(track_indices), len(detection_indices) where entry (i, j) is + `1 - iou(tracks[track_indices[i]], detections[detection_indices[j]])`. + + """ + if track_indices is None: + track_indices = np.arange(len(tracks)) + if detection_indices is None: + detection_indices = np.arange(len(detections)) + + cost_matrix = np.zeros((len(track_indices), len(detection_indices))) + candidates = np.asarray([detections[i].ltwh for i in detection_indices]) + + for row, track_idx in enumerate(track_indices): + if tracks[track_idx].time_since_update > 1: + cost_matrix[row, :] = linear_assignment.INFTY_COST + continue + + bbox = tracks[track_idx].to_ltwh() + cost_matrix[row, :] = 1.0 - iou(bbox, candidates) + return cost_matrix diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/kalman_filter.py b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/kalman_filter.py new file mode 100644 index 00000000..7741d00a --- /dev/null +++ b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/kalman_filter.py @@ -0,0 +1,245 @@ +# vim: expandtab:ts=4:sw=4 +import numpy as np +import scipy.linalg + + +""" +Table for the 0.95 quantile of the chi-square distribution with N degrees of +freedom (contains values for N=1, ..., 9). Taken from MATLAB/Octave's chi2inv +function and used as Mahalanobis gating threshold. +""" +### Don't change +chi2inv95 = { + 1: 3.8415, + 2: 5.9915, + 3: 7.8147, + 4: 9.4877, + 5: 11.070, + 6: 12.592, + 7: 14.067, + 8: 15.507, + 9: 16.919, +} + + +class KalmanFilter(object): + """ + A simple Kalman filter for tracking bounding boxes in image space. + + The 8-dimensional state space + + x, y, a, h, vx, vy, va, vh + + contains the bounding box center position (x, y), aspect ratio a, height h, + and their respective velocities. + + Object motion follows a constant velocity model. The bounding box location + (x, y, a, h) is taken as direct observation of the state space (linear + observation model). + + """ + + def __init__(self): + dt=1.0 + + # Create Kalman filter model matrices. + ##ADD code + # Motion and observation uncertainty are chosen relative to the current + # state estimate. These weights control the amount of uncertainty in + # the model. This is a bit hacky. +##ADD code + # for state extrapolation equation + # state transition matrix + self.F = np.array([[1, 0, 0, 0, dt, 0, 0, 0], + [0, 1, 0, 0, 0, dt, 0, 0], + [0, 0, 1, 0, 0, 0, dt, 0], + [0, 0, 0, 1, 0, 0, 0, dt], + [0, 0, 0, 0, 1, 0, 0, 0], + [0, 0, 0, 0, 0, 1, 0, 0], + [0, 0, 0, 0, 0, 0, 1, 0], + [0, 0, 0, 0, 0, 0, 0, 1]]) + # no control matrix since this is a constant velocity model + # measurement matrix intitialised as 4 x 4 identity matrix for measuring x,y,a,h + self.H = np.array([[1, 0, 0, 0], + [0, 1, 0, 0], + [0, 0, 1, 0], + [0, 0, 0, 1]]) + # for covariance extrapolation equation + # Process noise covariance + self.Q = np.array([[1, 0, 0, 0, 0, 0, 0, 0], + [0, 1, 0, 0, 0, 0, 0, 0], + [0, 0, 1, 0, 0, 0, 0, 0], + [0, 0, 0, 1, 0, 0, 0, 0], + [0, 0, 0, 0, 1, 0, 0, 0], + [0, 0, 0, 0, 0, 1, 0, 0], + [0, 0, 0, 0, 0, 0, 1, 0], + [0, 0, 0, 0, 0, 0, 0, 1]]) + # Measurement noise covariance + self.R = np.array([[1, 0, 0, 0], + [0, 1, 0, 0], + [0, 0, 1, 0], + [0, 0, 0, 1]]) + + def initiate(self, measurement): + """Create track from unassociated measurement. + + Parameters + ---------- + measurement : ndarray + Bounding box coordinates (x, y, a, h) with center position (x, y), + aspect ratio a, and height h. + + Returns + ------- + (ndarray, ndarray) + Returns the mean vector (8 dimensional) and covariance matrix (8x8 + dimensional) of the new track. Unobserved velocities are initialized + to 0 mean. + + """ +##ADD code + # intially the x,y,a,h are observed and equal to measurement + # unobserved velocities have 0 mean (last 4 entries) + mean=np.array([0,0,0,0,0,0,0,0]) + mean[:4]=measurement + covariance =np.array([[1, 0, 0, 0, 0, 0, 0, 0], + [0, 1, 0, 0, 0, 0, 0, 0], + [0, 0, 1, 0, 0, 0, 0, 0], + [0, 0, 0, 1, 0, 0, 0, 0], + [0, 0, 0, 0, 1, 0, 0, 0], + [0, 0, 0, 0, 0, 1, 0, 0], + [0, 0, 0, 0, 0, 0, 1, 0], + [0, 0, 0, 0, 0, 0, 0, 1]]) + return mean, covariance + + def predict(self, mean, covariance): + """Run Kalman filter prediction step. + + Parameters + ---------- + mean : ndarray + The 8 dimensional mean vector of the object state at the previous + time step. + covariance : ndarray + The 8x8 dimensional covariance matrix of the object state at the + previous time step. + + Returns + ------- + (ndarray, ndarray) + Returns the mean vector and covariance matrix of the predicted + state. Unobserved velocities are initialized to 0 mean. + + """ +##ADD code + # basically the prediction equation, here state extrapolation -> x(n+1)= F.x(n) and x(n) is the mean so + mean=self.F.dot(mean) + # and now covariance extrapolation -> P(n+1)= F.P(n).F(transpose) + Q + covariance = self.F.dot(covariance).dot(self.F.T)+ self.Q + return mean, covariance + + def project(self, mean, covariance): + """Project state distribution to measurement space. + + Parameters + ---------- + mean : ndarray + The state's mean vector (8 dimensional array). + covariance : ndarray + The state's covariance matrix (8x8 dimensional). + + Returns + ------- + (ndarray, ndarray) + Returns the projected mean and covariance matrix of the given state + estimate. + + """ +##ADD code + # measurement equations? + mean=self.H.dot(mean) + covariance=self.H.dot(covariance).dot(self.H.T)+self.R + return mean, covariance + + def update(self, mean, covariance, measurement): + """Run Kalman filter correction step. + + Parameters + ---------- + mean : ndarray + The predicted state's mean vector (8 dimensional). + covariance : ndarray + The state's covariance matrix (8x8 dimensional). + measurement : ndarray + The 4 dimensional measurement vector (x, y, a, h), where (x, y) + is the center position, a the aspect ratio, and h the height of the + bounding box. + + Returns + ------- + (ndarray, ndarray) + Returns the measurement-corrected state distribution. + + """ +##ADD code + # update equation -> x(n,n)=x(n,n-1) + K(n)(z(n)-H.x(n,n-1)) + # but we need K(n), A 4x4 matrix + # Kalman gain equation -> K(n) = P(n,n-1).H(transpose).(H.P(n,n-1).H(transpose)+R)(inverse) + + Kalman_gain= covariance.dot(self.H.T).dot(np.linalg,inv(self.H.dot(covariance).dot(self.H.T)+self.R)) + + innovation=measurement - np.dot(self.H,mean) # as it is conventionally called + + new_mean= mean+ np.dot(Kalman_gain,innovation) + + # covariance update equation (longer form, cause shorter form is "numerically unstable" according to text) -> + # P(n,n)= (I-H.K(n)).P(n,n-1).(I-H.K(n))(transpose) + K(n).R.K(n)(transpose) + + newterm= np.eye(4)-np.dot(self.H,Kalman_gain) + + new_covariance=newterm.dot(covariance).dot(newterm.T) + Kalman_gain.dot(self.R).dot(Kalman_gain.T) + + return new_mean, new_covariance + + def gating_distance(self, mean, covariance, measurements, only_position=False): + """Compute gating distance between state distribution and measurements. + + A suitable distance threshold can be obtained from `chi2inv95`. If + `only_position` is False, the chi-square distribution has 4 degrees of + freedom, otherwise 2. + + Parameters + ---------- + mean : ndarray + Mean vector over the state distribution (8 dimensional). + covariance : ndarray + Covariance of the state distribution (8x8 dimensional). + measurements : ndarray + An Nx4 dimensional matrix of N measurements, each in + format (x, y, a, h) where (x, y) is the bounding box center + position, a the aspect ratio, and h the height. + only_position : Optional[bool] + If True, distance computation is done with respect to the bounding + box center position only. + + Returns + ------- + ndarray + Returns an array of length N, where the i-th element contains the + squared Mahalanobis distance between (mean, covariance) and + `measurements[i]`. + + """ + ### Don't change anything + mean, covariance = self.project(mean, covariance) + if only_position: + mean, covariance = mean[:2], covariance[:2, :2] + measurements = measurements[:, :2] + + cholesky_factor = np.linalg.cholesky(covariance) + d = measurements - mean + z = scipy.linalg.solve_triangular( + cholesky_factor, d.T, lower=True, check_finite=False, overwrite_b=True + ) + squared_maha = np.sum(z * z, axis=0) + return squared_maha diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/linear_assignment.py b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/linear_assignment.py new file mode 100644 index 00000000..d1f6fc65 --- /dev/null +++ b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/linear_assignment.py @@ -0,0 +1,213 @@ +# vim: expandtab:ts=4:sw=4 +from __future__ import absolute_import +import numpy as np + +# from sklearn.utils.linear_assignment_ import linear_assignment +from scipy.optimize import linear_sum_assignment +from . import kalman_filter + + +INFTY_COST = 1e5 + + +def min_cost_matching( + distance_metric, + max_distance, + tracks, + detections, + track_indices=None, + detection_indices=None, +): + """Solve linear assignment problem. + + Parameters + ---------- + distance_metric : Callable[List[Track], List[Detection], List[int], List[int]) -> ndarray + The distance metric is given a list of tracks and detections as well as + a list of N track indices and M detection indices. The metric should + return the NxM dimensional cost matrix, where element (i, j) is the + association cost between the i-th track in the given track indices and + the j-th detection in the given detection_indices. + max_distance : float + Gating threshold. Associations with cost larger than this value are + disregarded. + tracks : List[track.Track] + A list of predicted tracks at the current time step. + detections : List[detection.Detection] + A list of detections at the current time step. + track_indices : List[int] + List of track indices that maps rows in `cost_matrix` to tracks in + `tracks` (see description above). + detection_indices : List[int] + List of detection indices that maps columns in `cost_matrix` to + detections in `detections` (see description above). + + Returns + ------- + (List[(int, int)], List[int], List[int]) + Returns a tuple with the following three entries: + * A list of matched track and detection indices. + * A list of unmatched track indices. + * A list of unmatched detection indices. + + """ + if track_indices is None: + track_indices = np.arange(len(tracks)) + if detection_indices is None: + detection_indices = np.arange(len(detections)) + + if len(detection_indices) == 0 or len(track_indices) == 0: + return [], track_indices, detection_indices # Nothing to match. + + cost_matrix = distance_metric(tracks, detections, track_indices, detection_indices) + cost_matrix[cost_matrix > max_distance] = max_distance + 1e-5 + # indices = linear_assignment(cost_matrix) + indices = np.vstack(linear_sum_assignment(cost_matrix)).T + + matches, unmatched_tracks, unmatched_detections = [], [], [] + for col, detection_idx in enumerate(detection_indices): + if col not in indices[:, 1]: + unmatched_detections.append(detection_idx) + for row, track_idx in enumerate(track_indices): + if row not in indices[:, 0]: + unmatched_tracks.append(track_idx) + for row, col in indices: + track_idx = track_indices[row] + detection_idx = detection_indices[col] + if cost_matrix[row, col] > max_distance: + unmatched_tracks.append(track_idx) + unmatched_detections.append(detection_idx) + else: + matches.append((track_idx, detection_idx)) + return matches, unmatched_tracks, unmatched_detections + + +def matching_cascade( + distance_metric, + max_distance, + cascade_depth, + tracks, + detections, + track_indices=None, + detection_indices=None, +): + """Run matching cascade. + + Parameters + ---------- + distance_metric : Callable[List[Track], List[Detection], List[int], List[int]) -> ndarray + The distance metric is given a list of tracks and detections as well as + a list of N track indices and M detection indices. The metric should + return the NxM dimensional cost matrix, where element (i, j) is the + association cost between the i-th track in the given track indices and + the j-th detection in the given detection indices. + max_distance : float + Gating threshold. Associations with cost larger than this value are + disregarded. + cascade_depth: int + The cascade depth, should be se to the maximum track age. + tracks : List[track.Track] + A list of predicted tracks at the current time step. + detections : List[detection.Detection] + A list of detections at the current time step. + track_indices : Optional[List[int]] + List of track indices that maps rows in `cost_matrix` to tracks in + `tracks` (see description above). Defaults to all tracks. + detection_indices : Optional[List[int]] + List of detection indices that maps columns in `cost_matrix` to + detections in `detections` (see description above). Defaults to all + detections. + + Returns + ------- + (List[(int, int)], List[int], List[int]) + Returns a tuple with the following three entries: + * A list of matched track and detection indices. + * A list of unmatched track indices. + * A list of unmatched detection indices. + + """ + if track_indices is None: + track_indices = list(range(len(tracks))) + if detection_indices is None: + detection_indices = list(range(len(detections))) + + unmatched_detections = detection_indices + matches = [] + for level in range(cascade_depth): + if len(unmatched_detections) == 0: # No detections left + break + + track_indices_l = [ + k for k in track_indices if tracks[k].time_since_update == 1 + level + ] + if len(track_indices_l) == 0: # Nothing to match at this level + continue + + matches_l, _, unmatched_detections = min_cost_matching( + distance_metric, + max_distance, + tracks, + detections, + track_indices_l, + unmatched_detections, + ) + matches += matches_l + unmatched_tracks = list(set(track_indices) - set(k for k, _ in matches)) + return matches, unmatched_tracks, unmatched_detections + + +def gate_cost_matrix( + kf, + cost_matrix, + tracks, + detections, + track_indices, + detection_indices, + gated_cost=INFTY_COST, + only_position=False, +): + """Invalidate infeasible entries in cost matrix based on the state + distributions obtained by Kalman filtering. + + Parameters + ---------- + kf : The Kalman filter. + cost_matrix : ndarray + The NxM dimensional cost matrix, where N is the number of track indices + and M is the number of detection indices, such that entry (i, j) is the + association cost between `tracks[track_indices[i]]` and + `detections[detection_indices[j]]`. + tracks : List[track.Track] + A list of predicted tracks at the current time step. + detections : List[detection.Detection] + A list of detections at the current time step. + track_indices : List[int] + List of track indices that maps rows in `cost_matrix` to tracks in + `tracks` (see description above). + detection_indices : List[int] + List of detection indices that maps columns in `cost_matrix` to + detections in `detections` (see description above). + gated_cost : Optional[float] + Entries in the cost matrix corresponding to infeasible associations are + set this value. Defaults to a very large value. + only_position : Optional[bool] + If True, only the x, y position of the state distribution is considered + during gating. Defaults to False. + + Returns + ------- + ndarray + Returns the modified cost matrix. + + """ + gating_dim = 2 if only_position else 4 + gating_threshold = kalman_filter.chi2inv95[gating_dim] + measurements = np.asarray([detections[i].to_xyah() for i in detection_indices]) + for row, track_idx in enumerate(track_indices): + track = tracks[track_idx] + gating_distance = kf.gating_distance( + track.mean, track.covariance, measurements, only_position + ) + cost_matrix[row, gating_distance > gating_threshold] = gated_cost + return cost_matrix diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/nn_matching.py b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/nn_matching.py new file mode 100644 index 00000000..df6445ea --- /dev/null +++ b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/nn_matching.py @@ -0,0 +1,175 @@ +# vim: expandtab:ts=4:sw=4 +import numpy as np + + +def _pdist(a, b): + """Compute pair-wise squared distance between points in `a` and `b`. + + Parameters + ---------- + a : array_like + An NxM matrix of N samples of dimensionality M. + b : array_like + An LxM matrix of L samples of dimensionality M. + + Returns + ------- + ndarray + Returns a matrix of size len(a), len(b) such that eleement (i, j) + contains the squared distance between `a[i]` and `b[j]`. + + """ + a, b = np.asarray(a), np.asarray(b) + if len(a) == 0 or len(b) == 0: + return np.zeros((len(a), len(b))) + a2, b2 = np.square(a).sum(axis=1), np.square(b).sum(axis=1) + r2 = -2.0 * np.dot(a, b.T) + a2[:, None] + b2[None, :] + r2 = np.clip(r2, 0.0, float(np.inf)) + return r2 + + +def _cosine_distance(a, b, data_is_normalized=False): + """Compute pair-wise cosine distance between points in `a` and `b`. + + Parameters + ---------- + a : array_like + An NxM matrix of N samples of dimensionality M. + b : array_like + An LxM matrix of L samples of dimensionality M. + data_is_normalized : Optional[bool] + If True, assumes rows in a and b are unit length vectors. + Otherwise, a and b are explicitly normalized to lenght 1. + + Returns + ------- + ndarray + Returns a matrix of size len(a), len(b) such that eleement (i, j) + contains the squared distance between `a[i]` and `b[j]`. + + """ + if not data_is_normalized: + a = np.asarray(a) / np.linalg.norm(a, axis=1, keepdims=True) + b = np.asarray(b) / np.linalg.norm(b, axis=1, keepdims=True) + return 1.0 - np.dot(a, b.T) + + +def _nn_euclidean_distance(x, y): + """Helper function for nearest neighbor distance metric (Euclidean). + + Parameters + ---------- + x : ndarray + A matrix of N row-vectors (sample points). + y : ndarray + A matrix of M row-vectors (query points). + + Returns + ------- + ndarray + A vector of length M that contains for each entry in `y` the + smallest Euclidean distance to a sample in `x`. + + """ + distances = _pdist(x, y) + return np.maximum(0.0, distances.min(axis=0)) + + +def _nn_cosine_distance(x, y): + """Helper function for nearest neighbor distance metric (cosine). + + Parameters + ---------- + x : ndarray + A matrix of N row-vectors (sample points). + y : ndarray + A matrix of M row-vectors (query points). + + Returns + ------- + ndarray + A vector of length M that contains for each entry in `y` the + smallest cosine distance to a sample in `x`. + + """ + distances = _cosine_distance(x, y) + return distances.min(axis=0) + + +class NearestNeighborDistanceMetric(object): + """ + A nearest neighbor distance metric that, for each target, returns + the closest distance to any sample that has been observed so far. + + Parameters + ---------- + metric : str + Either "euclidean" or "cosine". + matching_threshold: float + The matching threshold. Samples with larger distance are considered an + invalid match. + budget : Optional[int] + If not None, fix samples per class to at most this number. Removes + the oldest samples when the budget is reached. + + Attributes + ---------- + samples : Dict[int -> List[ndarray]] + A dictionary that maps from target identities to the list of samples + that have been observed so far. + + """ + + def __init__(self, metric, matching_threshold, budget=None): + + if metric == "euclidean": + self._metric = _nn_euclidean_distance + elif metric == "cosine": + self._metric = _nn_cosine_distance + else: + raise ValueError("Invalid metric; must be either 'euclidean' or 'cosine'") + self.matching_threshold = matching_threshold + self.budget = budget + self.samples = {} + + def partial_fit(self, features, targets, active_targets): + """Update the distance metric with new data. + + Parameters + ---------- + features : ndarray + An NxM matrix of N features of dimensionality M. + targets : ndarray + An integer array of associated target identities. + active_targets : List[int] + A list of targets that are currently present in the scene. + + """ + for feature, target in zip(features, targets): + self.samples.setdefault(target, []).append(feature) + if self.budget is not None: + self.samples[target] = self.samples[target][-self.budget :] + self.samples = {k: self.samples[k] for k in active_targets} + + def distance(self, features, targets): + """Compute distance between features and targets. + + Parameters + ---------- + features : ndarray + An NxM matrix of N features of dimensionality M. + targets : List[int] + A list of targets to match the given `features` against. + + Returns + ------- + ndarray + Returns a cost matrix of shape len(targets), len(features), where + element (i, j) contains the closest squared distance between + `targets[i]` and `features[j]`. + + """ + cost_matrix = np.zeros((len(targets), len(features))) + for i, target in enumerate(targets): + cost_matrix[i, :] = self._metric(self.samples[target], features) + return cost_matrix diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/track.py b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/track.py new file mode 100644 index 00000000..3389de72 --- /dev/null +++ b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/track.py @@ -0,0 +1,281 @@ +# vim: expandtab:ts=4:sw=4 +class TrackState: + """ + Enumeration type for the single target track state. Newly created tracks are + classified as `tentative` until enough evidence has been collected. Then, + the track state is changed to `confirmed`. Tracks that are no longer alive + are classified as `deleted` to mark them for removal from the set of active + tracks. + + """ + + Tentative = 1 + Confirmed = 2 + Deleted = 3 + + +class Track: + """ + A single target track with state space `(x, y, a, h)` and associated + velocities, where `(x, y)` is the center of the bounding box, `a` is the + aspect ratio and `h` is the height. + + Parameters + ---------- + mean : ndarray + Mean vector of the initial state distribution. + covariance : ndarray + Covariance matrix of the initial state distribution. + track_id : int + A unique track identifier. + n_init : int + Number of consecutive detections before the track is confirmed. The + track state is set to `Deleted` if a miss occurs within the first + `n_init` frames. + max_age : int + The maximum number of consecutive misses before the track state is + set to `Deleted`. + feature : Optional[ndarray] + Feature vector of the detection this track originates from. If not None, + this feature is added to the `features` cache. + original_ltwh : Optional List + Bounding box associated with matched detection + det_class : Optional str + Classname of matched detection + det_conf : Optional float + Confidence associated with matched detection + instance_mask : Optional + Instance mask associated with matched detection + others : Optional any + Any supplementary fields related to matched detection + + Attributes + ---------- + mean : ndarray + Mean vector of the initial state distribution. + covariance : ndarray + Covariance matrix of the initial state distribution. + track_id : int + A unique track identifier. + hits : int + Total number of measurement updates. + age : int + Total number of frames since first occurrence. + time_since_update : int + Total number of frames since last measurement update. + state : TrackState + The current track state. + features : List[ndarray] + A cache of features. On each measurement update, the associated feature + vector is added to this list. + + """ + + def __init__( + self, + mean, + covariance, + track_id, + n_init, + max_age, + feature=None, + original_ltwh=None, + det_class=None, + det_conf=None, + instance_mask=None, + others=None, + ): + self.mean = mean + self.covariance = covariance + self.track_id = track_id + self.hits = 1 + self.age = 1 + self.time_since_update = 0 + + self.state = TrackState.Tentative + self.features = [] + self.latest_feature = None + if feature is not None: + self.features.append(feature) + self.latest_feature = feature + + + self._n_init = n_init + self._max_age = max_age + + self.original_ltwh = original_ltwh + self.det_class = det_class + self.det_conf = det_conf + self.instance_mask = instance_mask + self.others = others + + def to_tlwh(self, orig=False, orig_strict=False): + """Get current position in bounding box format `(top left x, top left y, + width, height)`. This function is POORLY NAMED. But we are keeping the way it works the way it works in order not to break any older libraries that depend on this. + + Returns + ------- + ndarray + The KF-predicted bounding box by default. + If `orig` is True and track is matched to a detection this round, then the original det is returned. + """ + return self.to_ltwh(orig=orig, orig_strict=orig_strict) + + def to_ltwh(self, orig=False, orig_strict=False): + """Get current position in bounding box format `(top left x, top left y, + width, height)`. + + Params + ------ + orig : bool + To use original detection (True) or KF predicted (False). Only works for original dets that are horizontal BBs. + orig_strict: bool + Only relevant when orig is True. If orig_strict is True, it ONLY outputs original bbs and will not output kalman mean even if original bb is not available. + + Returns + ------- + ndarray + The KF-predicted bounding box by default. + If `orig` is True and track is matched to a detection this round, then the original det is returned. + + """ + if orig: + if self.original_ltwh is None: + if orig_strict: + return None + # else if not orig_strict, return kalman means below + else: + return self.original_ltwh.copy() + + ret = self.mean[:4].copy() + ret[2] *= ret[3] + ret[:2] -= ret[2:] / 2 + return ret + + def to_tlbr(self, orig=False, orig_strict=False): + """Get current position in bounding box format `(min x, miny, max x, + max y)`. This original function is POORLY NAMED. But we are keeping the way it works the way it works in order not to break any older projects that depend on this. + USE THIS AT YOUR OWN RISK. LIESSSSSSSSSS! + Returns LIES + ------- + ndarray + The KF-predicted bounding box by default. + If `orig` is True and track is matched to a detection this round, then the original det is returned. + """ + return self.to_ltrb(orig=orig, orig_strict=orig_strict) + + def to_ltrb(self, orig=False, orig_strict=False): + """Get current position in bounding box format `(min x, miny, max x, + max y)`. + + Params + ------ + orig : bool + To use original detection (True) or KF predicted (False). Only works for original dets that are horizontal BBs. + + Returns + ------- + ndarray + The KF-predicted bounding box by default. + If `orig` is True and track is matched to a detection this round, then the original det is returned. + """ + ret = self.to_ltwh(orig=orig, orig_strict=orig_strict) + if ret is not None: + ret[2:] = ret[:2] + ret[2:] + return ret + + def get_det_conf(self): + """ + `det_conf` will be None is there are no associated detection this round + """ + return self.det_conf + + def get_det_class(self): + """ + Only `det_class` will be persisted in the track even if there are no associated detection this round. + """ + return self.det_class + + def get_instance_mask(self): + ''' + Get instance mask associated with detection. Will be None is there are no associated detection this round + ''' + return self.instance_mask + + def get_det_supplementary(self): + """ + Get supplementary info associated with the detection. Will be None is there are no associated detection this round. + """ + return self.others + + def get_feature(self): + ''' + Get latest appearance feature + ''' + return self.latest_feature + + def predict(self, kf): + """Propagate the state distribution to the current time step using a + Kalman filter prediction step. + + Parameters + ---------- + kf : kalman_filter.KalmanFilter + The Kalman filter. + + """ + self.mean, self.covariance = kf.predict(self.mean, self.covariance) + self.age += 1 + self.time_since_update += 1 + self.original_ltwh = None + self.det_conf = None + self.instance_mask = None + self.others = None + + def update(self, kf, detection): + """Perform Kalman filter measurement update step and update the feature + cache. + + Parameters + ---------- + kf : kalman_filter.KalmanFilter + The Kalman filter. + detection : Detection + The associated detection. + + """ + self.original_ltwh = detection.get_ltwh() + self.mean, self.covariance = kf.update( + self.mean, self.covariance, detection.to_xyah() + ) + self.features.append(detection.feature) + self.latest_feature = detection.feature + self.det_conf = detection.confidence + self.det_class = detection.class_name + self.instance_mask = detection.instance_mask + self.others = detection.others + + self.hits += 1 + + self.time_since_update = 0 + if self.state == TrackState.Tentative and self.hits >= self._n_init: + self.state = TrackState.Confirmed + + def mark_missed(self): + """Mark this track as missed (no association at the current time step).""" + if self.state == TrackState.Tentative: + self.state = TrackState.Deleted + elif self.time_since_update > self._max_age: + self.state = TrackState.Deleted + + def is_tentative(self): + """Returns True if this track is tentative (unconfirmed).""" + return self.state == TrackState.Tentative + + def is_confirmed(self): + """Returns True if this track is confirmed.""" + return self.state == TrackState.Confirmed + + def is_deleted(self): + """Returns True if this track is dead and should be deleted.""" + return self.state == TrackState.Deleted diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/tracker.py b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/tracker.py new file mode 100644 index 00000000..e6f7c7e1 --- /dev/null +++ b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deep_sort/tracker.py @@ -0,0 +1,211 @@ +# vim: expandtab:ts=4:sw=4 +from __future__ import absolute_import +from datetime import datetime +import numpy as np +from . import kalman_filter +from . import linear_assignment +from . import iou_matching +from .track import Track + + +class Tracker: + """ + This is the multi-target tracker. + + Parameters + ---------- + metric : nn_matching.NearestNeighborDistanceMetric + A distance metric for measurement-to-track association. + max_age : int + Maximum number of missed misses before a track is deleted. + n_init : int + Number of consecutive detections before the track is confirmed. The + track state is set to `Deleted` if a miss occurs within the first + `n_init` frames. + today: Optional[datetime.date] + Provide today's date, for naming of tracks + + Attributes + ---------- + metric : nn_matching.NearestNeighborDistanceMetric + The distance metric used for measurement to track association. + max_age : int + Maximum number of missed misses before a track is deleted. + n_init : int + Number of frames that a track remains in initialization phase. + kf : kalman_filter.KalmanFilter + A Kalman filter to filter target trajectories in image space. + tracks : List[Track] + The list of active tracks at the current time step. + gating_only_position : Optional[bool] + Used during gating, comparing KF predicted and measured states. If True, only the x, y position of the state distribution is considered during gating. Defaults to False, where x,y, aspect ratio and height will be considered. + """ + + def __init__( + self, + metric, + max_iou_distance=0.7, + max_age=30, + n_init=3, + override_track_class=None, + today=None, + gating_only_position=False, + ): + self.today = today + self.metric = metric + self.max_iou_distance = max_iou_distance + self.max_age = max_age + self.n_init = n_init + self.gating_only_position = gating_only_position + + self.kf = kalman_filter.KalmanFilter() + self.tracks = [] + self.del_tracks_ids = [] + self._next_id = 1 + if override_track_class: + self.track_class = override_track_class + else: + self.track_class = Track + + def predict(self): + """Propagate track state distributions one time step forward. + + This function should be called once every time step, before `update`. + """ + for track in self.tracks: + track.predict(self.kf) + + def update(self, detections, today=None): + """Perform measurement update and track management. + + Parameters + ---------- + detections : List[deep_sort.detection.Detection] + A list of detections at the current time step. + today: Optional[datetime.date] + Provide today's date, for naming of tracks + """ + if self.today: + if today is None: + today = datetime.now().date() + # Check if its a new day, then refresh idx + if today != self.today: + self.today = today + self._next_id = 1 + + # Run matching cascade. + matches, unmatched_tracks, unmatched_detections = self._match(detections) + + # Update track set. + for track_idx, detection_idx in matches: + self.tracks[track_idx].update(self.kf, detections[detection_idx]) + for track_idx in unmatched_tracks: + self.tracks[track_idx].mark_missed() + for detection_idx in unmatched_detections: + self._initiate_track(detections[detection_idx]) + new_tracks = [] + self.del_tracks_ids = [] + for t in self.tracks: + if not t.is_deleted(): + new_tracks.append(t) + else: + self.del_tracks_ids.append(t.track_id) + self.tracks = new_tracks + # self.tracks = [t for t in self.tracks if not t.is_deleted()] + + # Update distance metric. + active_targets = [t.track_id for t in self.tracks if t.is_confirmed()] + features, targets = [], [] + for track in self.tracks: + if not track.is_confirmed(): + continue + features += track.features + targets += [track.track_id for _ in track.features] + track.features = [] + self.metric.partial_fit( + np.asarray(features), np.asarray(targets), active_targets + ) + + def _match(self, detections): + def gated_metric(tracks, dets, track_indices, detection_indices): + features = np.array([dets[i].feature for i in detection_indices]) + targets = np.array([tracks[i].track_id for i in track_indices]) + cost_matrix = self.metric.distance(features, targets) + cost_matrix = linear_assignment.gate_cost_matrix( + self.kf, cost_matrix, tracks, dets, track_indices, detection_indices, only_position=self.gating_only_position + ) + + return cost_matrix + + # Split track set into confirmed and unconfirmed tracks. + confirmed_tracks = [i for i, t in enumerate(self.tracks) if t.is_confirmed()] + unconfirmed_tracks = [ + i for i, t in enumerate(self.tracks) if not t.is_confirmed() + ] + + # Associate confirmed tracks using appearance features. + ( + matches_a, + unmatched_tracks_a, + unmatched_detections, + ) = linear_assignment.matching_cascade( + gated_metric, + self.metric.matching_threshold, + self.max_age, + self.tracks, + detections, + confirmed_tracks, + ) + + # Associate remaining tracks together with unconfirmed tracks using IOU. + iou_track_candidates = unconfirmed_tracks + [ + k for k in unmatched_tracks_a if self.tracks[k].time_since_update == 1 + ] + unmatched_tracks_a = [ + k for k in unmatched_tracks_a if self.tracks[k].time_since_update != 1 + ] + ( + matches_b, + unmatched_tracks_b, + unmatched_detections, + ) = linear_assignment.min_cost_matching( + iou_matching.iou_cost, + self.max_iou_distance, + self.tracks, + detections, + iou_track_candidates, + unmatched_detections, + ) + + matches = matches_a + matches_b + unmatched_tracks = list(set(unmatched_tracks_a + unmatched_tracks_b)) + return matches, unmatched_tracks, unmatched_detections + + def _initiate_track(self, detection): + mean, covariance = self.kf.initiate(detection.to_xyah()) + + if self.today: + track_id = "{}_{}".format(self.today, self._next_id) + else: + track_id = "{}".format(self._next_id) + self.tracks.append( + self.track_class( + mean, + covariance, + track_id, + self.n_init, + self.max_age, + # mean, covariance, self._next_id, self.n_init, self.max_age, + feature=detection.feature, + original_ltwh=detection.get_ltwh(), + det_class=detection.class_name, + det_conf=detection.confidence, + instance_mask=detection.instance_mask, + others=detection.others, + ) + ) + self._next_id += 1 + + def delete_all_tracks(self): + self.tracks = [] + self._next_id = 1 diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deepsort_tracker.py b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deepsort_tracker.py new file mode 100644 index 00000000..319849fb --- /dev/null +++ b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/deepsort_tracker.py @@ -0,0 +1,336 @@ +import time +import logging +from collections.abc import Iterable + +import cv2 +import numpy as np + +from deep_sort_realtime.deep_sort import nn_matching +from deep_sort_realtime.deep_sort.detection import Detection +from deep_sort_realtime.deep_sort.tracker import Tracker +from deep_sort_realtime.utils.nms import non_max_suppression + +logger = logging.getLogger(__name__) + +EMBEDDER_CHOICES = [ + "mobilenet", + "torchreid", + "clip_RN50", + "clip_RN101", + "clip_RN50x4", + "clip_RN50x16", + "clip_ViT-B/32", + "clip_ViT-B/16", +] + + +class DeepSort(object): + def __init__( + self, + max_iou_distance=0.7, + max_age=30, + n_init=3, + nms_max_overlap=1.0, + max_cosine_distance=0.2, + nn_budget=None, + gating_only_position=False, + override_track_class=None, + embedder="mobilenet", + half=True, + bgr=True, + embedder_gpu=True, + embedder_model_name=None, + embedder_wts=None, + polygon=False, + today=None, + ): + """ + + Parameters + ---------- + max_iou_distance : Optional[float] = 0.7 + Gating threshold on IoU. Associations with cost larger than this value are + disregarded. Argument for deep_sort_realtime.deep_sort.tracker.Tracker. + max_age : Optional[int] = 30 + Maximum number of missed misses before a track is deleted. Argument for deep_sort_realtime.deep_sort.tracker.Tracker. + n_init : int + Number of frames that a track remains in initialization phase. Defaults to 3. Argument for deep_sort_realtime.deep_sort.tracker.Tracker. + nms_max_overlap : Optional[float] = 1.0 + Non-maxima suppression threshold: Maximum detection overlap, if is 1.0, nms will be disabled + max_cosine_distance : Optional[float] = 0.2 + Gating threshold for cosine distance + nn_budget : Optional[int] = None + Maximum size of the appearance descriptors, if None, no budget is enforced + gating_only_position : Optional[bool] + Used during gating, comparing KF predicted and measured states. If True, only the x, y position of the state distribution is considered during gating. Defaults to False, where x,y, aspect ratio and height will be considered. + override_track_class : Optional[object] = None + Giving this will override default Track class, this must inherit Track. Argument for deep_sort_realtime.deep_sort.tracker.Tracker. + embedder : Optional[str] = 'mobilenet' + Whether to use in-built embedder or not. If None, then embeddings must be given during update. + Choice of ['mobilenet', 'torchreid', 'clip_RN50', 'clip_RN101', 'clip_RN50x4', 'clip_RN50x16', 'clip_ViT-B/32', 'clip_ViT-B/16'] + half : Optional[bool] = True + Whether to use half precision for deep embedder (applicable for mobilenet only) + bgr : Optional[bool] = True + Whether frame given to embedder is expected to be BGR or not (RGB) + embedder_gpu: Optional[bool] = True + Whether embedder uses gpu or not + embedder_model_name: Optional[str] = None + Only used when embedder=='torchreid'. This provides which model to use within torchreid library. Check out torchreid's model zoo. + embedder_wts: Optional[str] = None + Optional specification of path to embedder's model weights. Will default to looking for weights in `deep_sort_realtime/embedder/weights`. If deep_sort_realtime is installed as a package and CLIP models is used as embedder, best to provide path. + polygon: Optional[bool] = False + Whether detections are polygons (e.g. oriented bounding boxes) + today: Optional[datetime.date] + Provide today's date, for naming of tracks. Argument for deep_sort_realtime.deep_sort.tracker.Tracker. + """ + self.nms_max_overlap = nms_max_overlap + metric = nn_matching.NearestNeighborDistanceMetric( + "cosine", max_cosine_distance, nn_budget + ) + self.tracker = Tracker( + metric, + max_iou_distance=max_iou_distance, + max_age=max_age, + n_init=n_init, + override_track_class=override_track_class, + today=today, + gating_only_position=gating_only_position, + ) + + if embedder is not None: + if embedder not in EMBEDDER_CHOICES: + raise Exception(f"Embedder {embedder} is not a valid choice.") + if embedder == "mobilenet": + from deep_sort_realtime.embedder.embedder_pytorch import ( + MobileNetv2_Embedder as Embedder, + ) + + self.embedder = Embedder( + half=half, + max_batch_size=16, + bgr=bgr, + gpu=embedder_gpu, + model_wts_path=embedder_wts, + ) + elif embedder == 'torchreid': + from deep_sort_realtime.embedder.embedder_pytorch import TorchReID_Embedder as Embedder + + self.embedder = Embedder( + bgr=bgr, + gpu=embedder_gpu, + model_name=embedder_model_name, + model_wts_path=embedder_wts, + ) + + elif embedder.startswith('clip_'): + from deep_sort_realtime.embedder.embedder_clip import ( + Clip_Embedder as Embedder, + ) + + model_name = "_".join(embedder.split("_")[1:]) + self.embedder = Embedder( + model_name=model_name, + model_wts_path=embedder_wts, + max_batch_size=16, + bgr=bgr, + gpu=embedder_gpu, + ) + + else: + self.embedder = None + self.polygon = polygon + logger.info("DeepSort Tracker initialised") + logger.info(f"- max age: {max_age}") + logger.info(f"- appearance threshold: {max_cosine_distance}") + logger.info( + f'- nms threshold: {"OFF" if self.nms_max_overlap==1.0 else self.nms_max_overlap }' + ) + logger.info(f"- max num of appearance features: {nn_budget}") + logger.info( + f'- overriding track class : {"No" if override_track_class is None else "Yes"}' + ) + logger.info(f'- today given : {"No" if today is None else "Yes"}') + logger.info(f'- in-build embedder : {"No" if self.embedder is None else "Yes"}') + logger.info(f'- polygon detections : {"No" if polygon is False else "Yes"}') + + def update_tracks(self, raw_detections, embeds=None, frame=None, today=None, others=None, instance_masks=None): + + """Run multi-target tracker on a particular sequence. + + Parameters + ---------- + raw_detections (horizontal bb) : List[ Tuple[ List[float or int], float, str ] ] + List of detections, each in tuples of ( [left,top,w,h] , confidence, detection_class) + raw_detections (polygon) : List[ List[float], List[int or str], List[float] ] + List of Polygons, Classes, Confidences. All 3 sublists of the same length. A polygon defined as a ndarray-like [x1,y1,x2,y2,...]. + embeds : Optional[ List[] ] = None + List of appearance features corresponding to detections + frame : Optional [ np.ndarray ] = None + if embeds not given, Image frame must be given here, in [H,W,C]. + today: Optional[datetime.date] + Provide today's date, for naming of tracks + others: Optional[ List ] = None + Other things associated to detections to be stored in tracks, usually, could be corresponding segmentation mask, other associated values, etc. Currently others is ignored with polygon is True. + instance_masks: Optional [ List ] = None + Instance masks corresponding to detections. If given, they are used to filter out background and only use foreground for apperance embedding. Expects numpy boolean mask matrix. + + Returns + ------- + list of track objects (Look into track.py for more info or see "main" section below in this script to see simple example) + + """ + + if embeds is None: + if self.embedder is None: + raise Exception( + "Embedder not created during init so embeddings must be given now!" + ) + if frame is None: + raise Exception("either embeddings or frame must be given!") + + assert isinstance(raw_detections,Iterable) + + if len(raw_detections) > 0: + if not self.polygon: + assert len(raw_detections[0][0])==4 + raw_detections = [d for d in raw_detections if d[0][2] > 0 and d[0][3] > 0] + + if embeds is None: + embeds = self.generate_embeds(frame, raw_detections, instance_masks=instance_masks) + + # Proper deep sort detection objects that consist of bbox, confidence and embedding. + detections = self.create_detections(raw_detections, embeds, instance_masks=instance_masks, others=others) + else: + polygons, bounding_rects = self.process_polygons(raw_detections[0]) + + if embeds is None: + embeds = self.generate_embeds_poly(frame, polygons, bounding_rects) + + # Proper deep sort detection objects that consist of bbox, confidence and embedding. + detections = self.create_detections_poly( + raw_detections, embeds, bounding_rects, + ) + else: + detections = [] + + # Run non-maxima suppression. + boxes = np.array([d.ltwh for d in detections]) + scores = np.array([d.confidence for d in detections]) + if self.nms_max_overlap < 1.0: + # nms_tic = time.perf_counter() + indices = non_max_suppression(boxes, self.nms_max_overlap, scores) + # nms_toc = time.perf_counter() + # logger.debug(f'nms time: {nms_toc-nms_tic}s') + detections = [detections[i] for i in indices] + + # Update tracker. + self.tracker.predict() + self.tracker.update(detections, today=today) + + return self.tracker.tracks + + def refresh_track_ids(self): + self.tracker._next_id + + def generate_embeds(self, frame, raw_dets, instance_masks=None): + crops, cropped_inst_masks = self.crop_bb(frame, raw_dets, instance_masks=instance_masks) + if cropped_inst_masks is not None: + masked_crops = [] + for crop, mask in zip(crops, cropped_inst_masks): + masked_crop = np.zeros_like(crop) + masked_crop = masked_crop + np.array([123.675, 116.28, 103.53], dtype=crop.dtype) + masked_crop[mask] = crop[mask] + masked_crops.append(masked_crop) + return self.embedder.predict(masked_crops) + else: + return self.embedder.predict(crops) + + def generate_embeds_poly(self, frame, polygons, bounding_rects): + crops = self.crop_poly_pad_black(frame, polygons, bounding_rects) + return self.embedder.predict(crops) + + def create_detections(self, raw_dets, embeds, instance_masks=None, others=None): + detection_list = [] + for i, (raw_det, embed) in enumerate(zip(raw_dets, embeds)): + detection_list.append( + Detection( + raw_det[0], + raw_det[1], + embed, + class_name=raw_det[2] if len(raw_det)==3 else None, + instance_mask = instance_masks[i] if isinstance(instance_masks, Iterable) else instance_masks, + others = others[i] if isinstance(others, Iterable) else others, + ) + ) # raw_det = [bbox, conf_score, class] + return detection_list + + def create_detections_poly(self, dets, embeds, bounding_rects): + detection_list = [] + dets.extend([embeds, bounding_rects]) + for raw_polygon, cl, score, embed, bounding_rect in zip(*dets): + x, y, w, h = bounding_rect + x = max(0, x) + y = max(0, y) + bbox = [x, y, w, h] + detection_list.append( + Detection(bbox, score, embed, class_name=cl, others=raw_polygon) + ) + return detection_list + + @staticmethod + def process_polygons(raw_polygons): + polygons = [ + [polygon[x : x + 2] for x in range(0, len(polygon), 2)] + for polygon in raw_polygons + ] + bounding_rects = [ + cv2.boundingRect(np.array([polygon]).astype(int)) for polygon in polygons + ] + return polygons, bounding_rects + + @staticmethod + def crop_bb(frame, raw_dets, instance_masks=None): + crops = [] + im_height, im_width = frame.shape[:2] + if instance_masks is not None: + masks = [] + else: + masks = None + for i, detection in enumerate(raw_dets): + l, t, w, h = [int(x) for x in detection[0]] + r = l + w + b = t + h + crop_l = max(0, l) + crop_r = min(im_width, r) + crop_t = max(0, t) + crop_b = min(im_height, b) + crops.append(frame[crop_t:crop_b, crop_l:crop_r]) + if instance_masks is not None: + masks.append( instance_masks[i][crop_t:crop_b, crop_l:crop_r] ) + + return crops, masks + + @staticmethod + def crop_poly_pad_black(frame, polygons, bounding_rects): + masked_polys = [] + im_height, im_width = frame.shape[:2] + for polygon, bounding_rect in zip(polygons, bounding_rects): + mask = np.zeros(frame.shape, dtype=np.uint8) + polygon_mask = np.array([polygon]).astype(int) + cv2.fillPoly(mask, polygon_mask, color=(255, 255, 255)) + + # apply the mask + masked_image = cv2.bitwise_and(frame, mask) + + # crop masked image + x, y, w, h = bounding_rect + crop_l = max(0, x) + crop_r = min(im_width, x + w) + crop_t = max(0, y) + crop_b = min(im_height, y + h) + cropped = masked_image[crop_t:crop_b, crop_l:crop_r].copy() + masked_polys.append(np.array(cropped)) + return masked_polys + + def delete_all_tracks(self): + self.tracker.delete_all_tracks() diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/__init__.py b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/__pycache__/__init__.cpython-38.pyc b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 00000000..fa652098 Binary files /dev/null and b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/__pycache__/__init__.cpython-38.pyc differ diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/__pycache__/__init__.cpython-39.pyc b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..a0f15654 Binary files /dev/null and b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/__pycache__/__init__.cpython-39.pyc differ diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/__pycache__/embedder_pytorch.cpython-38.pyc b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/__pycache__/embedder_pytorch.cpython-38.pyc new file mode 100644 index 00000000..1e8f1701 Binary files /dev/null and b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/__pycache__/embedder_pytorch.cpython-38.pyc differ diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/__pycache__/embedder_pytorch.cpython-39.pyc b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/__pycache__/embedder_pytorch.cpython-39.pyc new file mode 100644 index 00000000..b482b5b7 Binary files /dev/null and b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/__pycache__/embedder_pytorch.cpython-39.pyc differ diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/__pycache__/mobilenetv2_bottle.cpython-38.pyc b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/__pycache__/mobilenetv2_bottle.cpython-38.pyc new file mode 100644 index 00000000..5757620b Binary files /dev/null and b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/__pycache__/mobilenetv2_bottle.cpython-38.pyc differ diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/__pycache__/mobilenetv2_bottle.cpython-39.pyc b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/__pycache__/mobilenetv2_bottle.cpython-39.pyc new file mode 100644 index 00000000..2e1f275d Binary files /dev/null and b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/__pycache__/mobilenetv2_bottle.cpython-39.pyc differ diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/embedder_clip.py b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/embedder_clip.py new file mode 100644 index 00000000..06a42cb3 --- /dev/null +++ b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/embedder_clip.py @@ -0,0 +1,101 @@ +import os +import logging +from pathlib import Path + +import clip +import cv2 +import numpy as np +import pkg_resources +import torch +from PIL import Image + +logger = logging.getLogger(__name__) + + +def _batch(iterable, bs=1): + l = len(iterable) + for ndx in range(0, l, bs): + yield iterable[ndx : min(ndx + bs, l)] + + +class Clip_Embedder(object): + """ + Clip_Embedder loads a CLIP model of specified architecture, outputting a feature of size 1024. + + Params + ------ + - model_name (optional, str) : CLIP model to use + - model_wts_path (optional, str): Optional specification of path to CLIP model weights. Defaults to None and look for weights in `deep_sort_realtime/embedder/weights` or clip will download from internet into their own cache. + - max_batch_size (optional, int) : max batch size for embedder, defaults to 16 + - bgr (optional, Bool) : boolean flag indicating if input frames are bgr or not, defaults to True + - gpu (optional, Bool) : boolean flag indicating if gpu is enabled or not, defaults to True + """ + + def __init__( + self, + model_name="ViT-B/32", + model_wts_path=None, + max_batch_size=16, + bgr=True, + gpu=True, + ): + if model_wts_path is None: + assert model_name in clip.available_models() + + weights_name = model_name.replace("/", "-") + weights_path = ( + Path(__file__).parent.resolve() / "weights" / f"{weights_name}.pt" + ) + if weights_path.is_file(): + model_wts_path = str(weights_path) + else: + model_wts_path = model_name + + self.device = "cuda" if gpu else "cpu" + self.model, self.img_preprocess = clip.load(model_wts_path, device=self.device) + self.model.eval() + + self.max_batch_size = max_batch_size + self.bgr = bgr + + logger.info("Clip Embedder for Deep Sort initialised") + logger.info(f"- gpu enabled: {gpu}") + logger.info(f"- max batch size: {self.max_batch_size}") + logger.info(f"- expects BGR: {self.bgr}") + logger.info(f"- model name: {model_name}") + + zeros = np.zeros((100, 100, 3), dtype=np.uint8) + self.predict([zeros]) # warmup + + def predict(self, np_images): + """ + batch inference + + Params + ------ + np_images : list of ndarray + list of (H x W x C), bgr or rgb according to self.bgr + + Returns + ------ + list of features (np.array with dim = 1024) + + """ + if not np_images: + return [] + + if self.bgr: + np_images = [cv2.cvtColor(img, cv2.COLOR_BGR2RGB) for img in np_images] + + pil_images = [ + self.img_preprocess(Image.fromarray(rgb)).to(self.device) + for rgb in np_images + ] + + all_feats = [] + for this_batch in _batch(pil_images, bs=self.max_batch_size): + batch = torch.stack(this_batch, 0) + with torch.no_grad(): + feats = self.model.encode_image(batch) + all_feats.extend(feats.cpu().data.numpy()) + return all_feats diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/embedder_pytorch.py b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/embedder_pytorch.py new file mode 100644 index 00000000..9847edf4 --- /dev/null +++ b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/embedder_pytorch.py @@ -0,0 +1,232 @@ +import os +import logging + +import cv2 +import numpy as np +import pkg_resources +import torch +from torchvision.transforms import transforms + +from deep_sort_realtime.embedder.mobilenetv2_bottle import MobileNetV2_bottle + +logger = logging.getLogger(__name__) + +MOBILENETV2_BOTTLENECK_WTS = pkg_resources.resource_filename( + "deep_sort_realtime", "embedder/weights/mobilenetv2_bottleneck_wts.pt" +) + +TORCHREID_OSNET_AIN_X1_0_MS_D_C_WTS = pkg_resources.resource_filename( + "deep_sort_realtime", "embedder/weights/osnet_ain_ms_d_c_wtsonly.pth" +) + +INPUT_WIDTH = 224 + + +def batch(iterable, bs=1): + l = len(iterable) + for ndx in range(0, l, bs): + yield iterable[ndx : min(ndx + bs, l)] + + +class MobileNetv2_Embedder(object): + """ + MobileNetv2_Embedder loads a Mobilenetv2 pretrained on Imagenet1000, with classification layer removed, exposing the bottleneck layer, outputing a feature of size 1280. + + Params + ------ + - model_wts_path (optional, str) : path to mobilenetv2 model weights, defaults to the model file in ./mobilenetv2 + - half (optional, Bool) : boolean flag to use half precision or not, defaults to True + - max_batch_size (optional, int) : max batch size for embedder, defaults to 16 + - bgr (optional, Bool) : boolean flag indicating if input frames are bgr or not, defaults to True + - gpu (optional, Bool) : boolean flag indicating if gpu is enabled or not + """ + + def __init__( + self, model_wts_path=None, half=True, max_batch_size=16, bgr=True, gpu=True + ): + if model_wts_path is None: + model_wts_path = MOBILENETV2_BOTTLENECK_WTS + assert os.path.exists( + model_wts_path + ), f"Mobilenetv2 model path {model_wts_path} does not exists!" + self.model = MobileNetV2_bottle(input_size=INPUT_WIDTH, width_mult=1.0) + self.model.load_state_dict(torch.load(model_wts_path)) + + self.gpu = gpu and torch.cuda.is_available() + if self.gpu: + self.model.cuda() # loads model to gpu + self.half = half + if self.half: + self.model.half() + else: + self.half = False + + self.model.eval() # inference mode, deactivates dropout layers + + self.max_batch_size = max_batch_size + self.bgr = bgr + + logger.info("MobileNetV2 Embedder for Deep Sort initialised") + logger.info(f"- gpu enabled: {self.gpu}") + logger.info(f"- half precision: {self.half}") + logger.info(f"- max batch size: {self.max_batch_size}") + logger.info(f"- expects BGR: {self.bgr}") + + zeros = np.zeros((100, 100, 3), dtype=np.uint8) + self.predict([zeros]) # warmup + + def preprocess(self, np_image): + """ + Preprocessing for embedder network: Flips BGR to RGB, resize, convert to torch tensor, normalise with imagenet mean and variance, reshape. Note: input image yet to be loaded to GPU through tensor.cuda() + + Parameters + ---------- + np_image : ndarray + (H x W x C) + + Returns + ------- + Torch Tensor + + """ + if self.bgr: + np_image_rgb = np_image[..., ::-1] + else: + np_image_rgb = np_image + + input_image = cv2.resize(np_image_rgb, (INPUT_WIDTH, INPUT_WIDTH)) + trans = transforms.Compose( + [ + transforms.ToTensor(), + transforms.Normalize( + mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225] + ), + ] + ) + input_image = trans(input_image) + input_image = input_image.view(1, 3, INPUT_WIDTH, INPUT_WIDTH) + + return input_image + + def predict(self, np_images): + """ + batch inference + + Params + ------ + np_images : list of ndarray + list of (H x W x C), bgr or rgb according to self.bgr + + Returns + ------ + list of features (np.array with dim = 1280) + + """ + all_feats = [] + + preproc_imgs = [self.preprocess(img) for img in np_images] + + for this_batch in batch(preproc_imgs, bs=self.max_batch_size): + this_batch = torch.cat(this_batch, dim=0) + if self.gpu: + this_batch = this_batch.cuda() + if self.half: + this_batch = this_batch.half() + output = self.model.forward(this_batch) + + all_feats.extend(output.cpu().data.numpy()) + + return all_feats + + +class TorchReID_Embedder(object): + """ + Embedder that works with torchreid (https://github.com/KaiyangZhou/deep-person-reid). Model zoo: https://kaiyangzhou.github.io/deep-person-reid/MODEL_ZOO + + Params + ------ + - model_name (optional, str): name of model, see torchreid model zoo. defaults to osnet_ain_x1_0 + - model_wts_path (optional, str) : path to torchreid model weights, defaults to TORCHREID_OSNET_AIN_X1_0_MS_D_C_WTS if model_name=='osnet_ain_x1_0' (default) and else, imagenet pretrained weights of given model + - bgr (optional, Bool) : boolean flag indicating if input frames are bgr or not, defaults to True + - gpu (optional, Bool) : boolean flag indicating if gpu is enabled or not + - max_batch_size: Does nothing, just for compatibility to other embedder classes + """ + + def __init__( + self, model_name=None, model_wts_path=None, bgr=True, gpu=True, max_batch_size=None, + ): + try: + import torchreid + except ImportError: + raise Exception('ImportError: torchreid is not installed, please install and try again or choose another embedder') + + from torchreid.utils import FeatureExtractor + + if model_name is None: + model_name = 'osnet_ain_x1_0' + + if model_wts_path is None: + model_wts_path = '' + + if model_name=='osnet_ain_x1_0' and model_wts_path=='': + model_wts_path = TORCHREID_OSNET_AIN_X1_0_MS_D_C_WTS + + self.gpu = gpu and torch.cuda.is_available() + if self.gpu: + device = 'cuda' + else: + device = 'cpu' + + self.model = FeatureExtractor( + model_name=model_name, + model_path=model_wts_path, + device=device, + ) + + self.bgr = bgr + + logger.info("TorchReID Embedder for Deep Sort initialised") + logger.info(f"- gpu enabled: {self.gpu}") + logger.info(f"- expects BGR: {self.bgr}") + + zeros = np.zeros((100, 100, 3), dtype=np.uint8) + self.predict([zeros]) # warmup + + def preprocess(self, np_image): + """ + Preprocessing for embedder network: Flips BGR to RGB, resize, convert to torch tensor, normalise with imagenet mean and variance, reshape. Note: input image yet to be loaded to GPU through tensor.cuda() + + Parameters + ---------- + np_image : ndarray + (H x W x C) + + Returns + ------- + Torch Tensor + + """ + if self.bgr: + np_image_rgb = np_image[..., ::-1] + else: + np_image_rgb = np_image + # torchreid handles the rest of the preprocessing + return np_image_rgb + + def predict(self, np_images): + """ + batch inference + + Params + ------ + np_images : list of ndarray + list of (H x W x C), bgr or rgb according to self.bgr + + Returns + ------ + list of features (np.array with dim = 1280) + + """ + preproc_imgs = [self.preprocess(img) for img in np_images] + output = self.model(preproc_imgs) + return output.cpu().data.numpy() diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/embedder_tf.py b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/embedder_tf.py new file mode 100644 index 00000000..0d66f8c6 --- /dev/null +++ b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/embedder_tf.py @@ -0,0 +1,129 @@ +import os +import logging +from pathlib import Path + +import cv2 +import numpy as np +import pkg_resources +import tensorflow as tf + +MOBILENETV2_BOTTLENECK_WTS = pkg_resources.resource_filename( + "deep_sort_realtime", + "embedder/weights/mobilenet_v2_weights_tf_dim_ordering_tf_kernels_1.0_224.h5", +) + +logger = logging.getLogger(__name__) + +gpus = tf.config.experimental.list_physical_devices("GPU") +if gpus: + # Currently, memory growth needs to be the same across GPUs + for gpu in gpus: + tf.config.experimental.set_memory_growth(gpu, True) + +INPUT_WIDTH = 224 + + +def batch(iterable, bs=1): + l = len(iterable) + for ndx in range(0, l, bs): + yield iterable[ndx : min(ndx + bs, l)] + + +def get_mobilenetv2_with_preproc(wts="imagenet"): + i = tf.keras.layers.Input([None, None, 3], dtype=tf.uint8) + x = tf.cast(i, tf.float32) + x = tf.keras.applications.mobilenet_v2.preprocess_input(x) + + full_model = tf.keras.applications.mobilenet_v2.MobileNetV2( + input_shape=None, + weights=str(wts), + classifier_activation=None, + ) + core_model = tf.keras.Model(full_model.input, full_model.layers[-2].output) + + x = core_model(x) + + model = tf.keras.Model(inputs=[i], outputs=[x]) + model.summary() + return model + + +class MobileNetv2_Embedder(object): + """ + MobileNetv2_Embedder loads a Mobilenetv2 pretrained on Imagenet1000, with classification layer removed, exposing the bottleneck layer, outputing a feature of size 1280. + + Params + ------ + - model_wts_path (optional, str) : path to mobilenetv2 model weights, defaults to the model file in ./mobilenetv2 + - max_batch_size (optional, int) : max batch size for embedder, defaults to 16 + - bgr (optional, Bool) : boolean flag indicating if input frames are bgr or not, defaults to True + - gpu (optional, Bool) : boolean flag indicating if gpu is enabled or not + """ + + def __init__(self, model_wts_path=None, max_batch_size=16, bgr=True, gpu=True): + + if not gpu: + os.environ["CUDA_VISIBLE_DEVICES"] = "-1" + + if model_wts_path is None: + model_wts_path = MOBILENETV2_BOTTLENECK_WTS + model_wts_path = Path(model_wts_path) + assert ( + model_wts_path.is_file() + ), f"Mobilenetv2 model path {model_wts_path} does not exists!" + + self.model = get_mobilenetv2_with_preproc(wts=model_wts_path) + + self.max_batch_size = max_batch_size + self.bgr = bgr + + logger.info("MobileNetV2 Embedder (tf) for Deep Sort initialised") + logger.info(f"- max batch size: {self.max_batch_size}") + logger.info(f"- expects BGR: {self.bgr}") + + zeros = np.zeros((100, 100, 3), dtype=np.uint8) + self.predict([zeros, zeros]) # warmup + + def preprocess(self, np_image): + """ + Parameters + ---------- + np_image : ndarray + (H x W x C) + + Returns + ------- + TF Tensor + + """ + if self.bgr: + np_image_rgb = np_image[..., ::-1] + else: + np_image_rgb = np_image + np_image_rgb = cv2.resize(np_image_rgb, (INPUT_WIDTH, INPUT_WIDTH)) + return tf.convert_to_tensor(np_image_rgb) + + def predict(self, np_images): + """ + batch inference + + Params + ------ + np_images : list of ndarray + list of (H x W x C), bgr or rgb according to self.bgr + + Returns + ------ + list of features (np.array with dim = 1280) + + """ + all_feats = [] + + preproc_imgs = [self.preprocess(img) for img in np_images] + + for this_batch in batch(preproc_imgs, bs=self.max_batch_size): + this_batch = tf.stack(this_batch, axis=0) + output = self.model(this_batch) + all_feats.extend(output.numpy()) + + return all_feats diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/mobilenetv2_bottle.py b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/mobilenetv2_bottle.py new file mode 100644 index 00000000..a825117a --- /dev/null +++ b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/mobilenetv2_bottle.py @@ -0,0 +1,135 @@ +import torch.nn as nn +import math + + +def conv_bn(inp, oup, stride): + return nn.Sequential( + nn.Conv2d(inp, oup, 3, stride, 1, bias=False), + nn.BatchNorm2d(oup), + nn.ReLU6(inplace=True), + ) + + +def conv_1x1_bn(inp, oup): + return nn.Sequential( + nn.Conv2d(inp, oup, 1, 1, 0, bias=False), + nn.BatchNorm2d(oup), + nn.ReLU6(inplace=True), + ) + + +class InvertedResidual(nn.Module): + def __init__(self, inp, oup, stride, expand_ratio): + super(InvertedResidual, self).__init__() + self.stride = stride + assert stride in [1, 2] + + hidden_dim = round(inp * expand_ratio) + self.use_res_connect = self.stride == 1 and inp == oup + + if expand_ratio == 1: + self.conv = nn.Sequential( + # dw + nn.Conv2d( + hidden_dim, hidden_dim, 3, stride, 1, groups=hidden_dim, bias=False + ), + nn.BatchNorm2d(hidden_dim), + nn.ReLU6(inplace=True), + # pw-linear + nn.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False), + nn.BatchNorm2d(oup), + ) + else: + self.conv = nn.Sequential( + # pw + nn.Conv2d(inp, hidden_dim, 1, 1, 0, bias=False), + nn.BatchNorm2d(hidden_dim), + nn.ReLU6(inplace=True), + # dw + nn.Conv2d( + hidden_dim, hidden_dim, 3, stride, 1, groups=hidden_dim, bias=False + ), + nn.BatchNorm2d(hidden_dim), + nn.ReLU6(inplace=True), + # pw-linear + nn.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False), + nn.BatchNorm2d(oup), + ) + + def forward(self, x): + if self.use_res_connect: + return x + self.conv(x) + else: + return self.conv(x) + + +class MobileNetV2_bottle(nn.Module): + def __init__(self, input_size=224, width_mult=1.0): + super(MobileNetV2_bottle, self).__init__() + block = InvertedResidual + input_channel = 32 + last_channel = 1280 + interverted_residual_setting = [ + # t, c, n, s + [1, 16, 1, 1], + [6, 24, 2, 2], + [6, 32, 3, 2], + [6, 64, 4, 2], + [6, 96, 3, 1], + [6, 160, 3, 2], + [6, 320, 1, 1], + ] + + # building first layer + assert input_size % 32 == 0 + input_channel = int(input_channel * width_mult) + self.last_channel = ( + int(last_channel * width_mult) if width_mult > 1.0 else last_channel + ) + self.features = [conv_bn(3, input_channel, 2)] + # building inverted residual blocks + for t, c, n, s in interverted_residual_setting: + output_channel = int(c * width_mult) + for i in range(n): + if i == 0: + self.features.append( + block(input_channel, output_channel, s, expand_ratio=t) + ) + else: + self.features.append( + block(input_channel, output_channel, 1, expand_ratio=t) + ) + input_channel = output_channel + # building last several layers + self.features.append(conv_1x1_bn(input_channel, self.last_channel)) + # make it nn.Sequential + self.features = nn.Sequential(*self.features) + + # # building classifier + # self.classifier = nn.Sequential( + # nn.Dropout(0.2), + # nn.Linear(self.last_channel, n_class), + # ) + + self._initialize_weights() + + def forward(self, x): + x = self.features(x) + x = x.mean(3).mean(2) + # x = self.classifier(x) + return x + + def _initialize_weights(self): + for m in self.modules(): + if isinstance(m, nn.Conv2d): + n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + m.weight.data.normal_(0, math.sqrt(2.0 / n)) + if m.bias is not None: + m.bias.data.zero_() + elif isinstance(m, nn.BatchNorm2d): + m.weight.data.fill_(1) + m.bias.data.zero_() + elif isinstance(m, nn.Linear): + n = m.weight.size(1) + m.weight.data.normal_(0, 0.01) + m.bias.data.zero_() diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/weights/download_clip_wts.sh b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/weights/download_clip_wts.sh new file mode 100644 index 00000000..46e1b7c1 --- /dev/null +++ b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/weights/download_clip_wts.sh @@ -0,0 +1,6 @@ +# wget https://openaipublic.azureedge.net/clip/models/afeb0e10f9e5a86da6080e35cf09123aca3b358a0c3e3b6c78a7b63bc04b6762/RN50.pt +# wget https://openaipublic.azureedge.net/clip/models/8fa8567bab74a42d41c5915025a8e4538c3bdbe8804a470a72f30b0d94fab599/RN101.pt +# wget https://openaipublic.azureedge.net/clip/models/7e526bd135e493cef0776de27d5f42653e6b4c8bf9e0f653bb11773263205fdd/RN50x4.pt +# wget https://openaipublic.azureedge.net/clip/models/52378b407f34354e150460fe41077663dd5b39c54cd0bfd2b27167a4a06ec9aa/RN50x16.pt +wget https://openaipublic.azureedge.net/clip/models/40d365715913c9da98579312b702a82c18be219cc2a73407c4526f58eba950af/ViT-B-32.pt +# wget https://openaipublic.azureedge.net/clip/models/5806e77cd80f8b59890b7e101eabd078d9fb84e6937f9e85e4ecb61988df416f/ViT-B-16.pt diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/weights/download_tf_wts.sh b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/weights/download_tf_wts.sh new file mode 100644 index 00000000..2c13bac4 --- /dev/null +++ b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/weights/download_tf_wts.sh @@ -0,0 +1,7 @@ +# Checks if gdown is installed else install +if ! type "gdown" > /dev/null; then + pip3 install gdown +fi + +# Downloads with gdown +gdown https://drive.google.com/uc?id=1RBroAFc0tmfxgvrh7iXc2e1EK8TVzXkA diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/weights/mobilenetv2_bottleneck_wts.pt b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/weights/mobilenetv2_bottleneck_wts.pt new file mode 100644 index 00000000..12074439 Binary files /dev/null and b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/weights/mobilenetv2_bottleneck_wts.pt differ diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/weights/osnet_ain_ms_d_c_wtsonly.pth b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/weights/osnet_ain_ms_d_c_wtsonly.pth new file mode 100644 index 00000000..20e46f48 Binary files /dev/null and b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/embedder/weights/osnet_ain_ms_d_c_wtsonly.pth differ diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/utils/__init__.py b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/utils/__init__.py new file mode 100644 index 00000000..43e08fb8 --- /dev/null +++ b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/utils/__init__.py @@ -0,0 +1 @@ +# vim: expandtab:ts=4:sw=4 diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/utils/__pycache__/__init__.cpython-38.pyc b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/utils/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 00000000..c7620967 Binary files /dev/null and b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/utils/__pycache__/__init__.cpython-38.pyc differ diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/utils/__pycache__/__init__.cpython-39.pyc b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/utils/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 00000000..51e75f00 Binary files /dev/null and b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/utils/__pycache__/__init__.cpython-39.pyc differ diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/utils/__pycache__/nms.cpython-38.pyc b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/utils/__pycache__/nms.cpython-38.pyc new file mode 100644 index 00000000..b513f956 Binary files /dev/null and b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/utils/__pycache__/nms.cpython-38.pyc differ diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/utils/__pycache__/nms.cpython-39.pyc b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/utils/__pycache__/nms.cpython-39.pyc new file mode 100644 index 00000000..2bc79b9e Binary files /dev/null and b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/utils/__pycache__/nms.cpython-39.pyc differ diff --git a/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/utils/nms.py b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/utils/nms.py new file mode 100644 index 00000000..e0c1088a --- /dev/null +++ b/assMath/probStat/Anupriya/kalmanfilter/deep_sort_realtime/utils/nms.py @@ -0,0 +1,64 @@ +import numpy as np + + +def non_max_suppression(boxes, max_bbox_overlap, scores=None): + """Suppress overlapping detections. + Original code from [1]_ has been adapted to include confidence score. + .. [1] http://www.pyimagesearch.com/2015/02/16/ + faster-non-maximum-suppression-python/ + Examples + -------- + >>> boxes = [d.roi for d in detections] + >>> scores = [d.confidence for d in detections] + >>> indices = non_max_suppression(boxes, max_bbox_overlap, scores) + >>> detections = [detections[i] for i in indices] + Parameters + ---------- + boxes : ndarray + Array of ROIs (x, y, width, height). + max_bbox_overlap : float + ROIs that overlap more than this values are suppressed. + scores : Optional[array_like] + Detector confidence score. + Returns + ------- + List[int] + Returns indices of detections that have survived non-maxima suppression. + """ + if len(boxes) == 0: + return [] + + boxes = boxes.astype(np.float32) + pick = [] + + x1 = boxes[:, 0] + y1 = boxes[:, 1] + x2 = boxes[:, 2] + boxes[:, 0] + y2 = boxes[:, 3] + boxes[:, 1] + + area = (x2 - x1 + 1) * (y2 - y1 + 1) + if scores is not None: + idxs = np.argsort(scores) + else: + idxs = np.argsort(y2) + + while len(idxs) > 0: + last = len(idxs) - 1 + i = idxs[last] + pick.append(i) + + xx1 = np.maximum(x1[i], x1[idxs[:last]]) + yy1 = np.maximum(y1[i], y1[idxs[:last]]) + xx2 = np.minimum(x2[i], x2[idxs[:last]]) + yy2 = np.minimum(y2[i], y2[idxs[:last]]) + + w = np.maximum(0, xx2 - xx1 + 1) + h = np.maximum(0, yy2 - yy1 + 1) + + overlap = (w * h) / area[idxs[:last]] + + idxs = np.delete( + idxs, np.concatenate(([last], np.where(overlap > max_bbox_overlap)[0])) + ) + + return pick diff --git a/assMath/probStat/Anupriya/kalmanfilter/tempCodeRunnerFile.py b/assMath/probStat/Anupriya/kalmanfilter/tempCodeRunnerFile.py new file mode 100644 index 00000000..440c3491 --- /dev/null +++ b/assMath/probStat/Anupriya/kalmanfilter/tempCodeRunnerFile.py @@ -0,0 +1 @@ +cv2.CAP_V4L2 \ No newline at end of file diff --git a/assMath/probStat/Anupriya/kalmanfilter/test.py b/assMath/probStat/Anupriya/kalmanfilter/test.py new file mode 100644 index 00000000..8c2b1277 --- /dev/null +++ b/assMath/probStat/Anupriya/kalmanfilter/test.py @@ -0,0 +1,70 @@ +import datetime +from ultralytics import YOLO +import cv2 +from deep_sort_realtime.deepsort_tracker import DeepSort +import numpy as np + +CONFIDENCE_THRESHOLD = 0.8 +GREEN = (0, 255, 0) +WHITE = (255, 255, 255) + +# initialize the video capture object +video_cap = cv2.VideoCapture(cv2.CAP_V4L2) + +# load the pre-trained YOLOv8n model +model = YOLO("yolov8l.pt") +tracker = DeepSort(max_age=50) + +while True: + start = datetime.datetime.now() + ret, frame = video_cap.read() + + if not ret: + break + + detections = model(frame)[0] + + results = [] + person=[] + boxes=detections.boxes + for i in range(len(boxes)): + data=boxes[i] + xmin, ymin, xmax, ymax =data.xyxy[0][0].cpu(), data.xyxy[0][1].cpu(), data.xyxy[0][2].cpu(), data.xyxy[0][3].cpu() + results.append([[xmin, ymin, xmax - xmin, ymax - ymin], data.conf[0].cpu(), data.cls]) + + + tracks = tracker.update_tracks(results, frame=frame) + # loop over the tracks + for track in tracks: + # if the track is not confirmed, ignore it + if not track.is_confirmed(): + continue + + # get the track id and the bounding box + track_id = track.track_id + ltrb = track.to_ltrb() + + xmin, ymin, xmax, ymax = int(ltrb[0]), int( + ltrb[1]), int(ltrb[2]), int(ltrb[3]) + # draw the bounding box and the track id + cv2.rectangle(frame, (xmin, ymin), (xmax, ymax), GREEN, 2) + cv2.rectangle(frame, (xmin, ymin - 20), (xmin + 20, ymin), GREEN, -1) + cv2.putText(frame, str(track_id), (xmin + 5, ymin - 8), + cv2.FONT_HERSHEY_SIMPLEX, 0.5, WHITE, 2) + + # end time to compute the fps + end = datetime.datetime.now() + # show the time it took to process 1 frame + print(f"Time to process 1 frame: {(end - start).total_seconds() * 1000:.0f} milliseconds") + # calculate the frame per second and draw it on the frame + fps = f"FPS: {1 / (end - start).total_seconds():.2f}" + cv2.putText(frame, fps, (50, 50), + cv2.FONT_HERSHEY_SIMPLEX, 2, (0, 0, 255), 8) + + # show the frame to our screen + cv2.imshow("Frame", frame) + if cv2.waitKey(1) == ord("q"): + break + +video_cap.release() +cv2.destroyAllWindows() \ No newline at end of file