diff --git a/.gitignore b/.gitignore
index 0134ec0..97fd79c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,5 +1,3 @@
node_modules
venv
-test.py
-testNumba.py
__pycache__
diff --git a/.idea/dictionaries/Amith.xml b/.idea/dictionaries/Amith.xml
index 84036b5..4f853b1 100644
--- a/.idea/dictionaries/Amith.xml
+++ b/.idea/dictionaries/Amith.xml
@@ -30,7 +30,9 @@
fget
fset
imgs
+ jacobian
ncontour
+ nesterov
npcv
pcontour
pstats
diff --git a/__ipynb__.ipynb b/__ipynb__.ipynb
index 25eebf9..0671296 100644
--- a/__ipynb__.ipynb
+++ b/__ipynb__.ipynb
@@ -3,7 +3,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"import time \n",
@@ -26,7 +30,11 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%% md\n"
+ }
+ },
"source": [
"# /DataSets/dataSet.py"
]
@@ -34,7 +42,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class TrainSets:\n",
@@ -45,7 +57,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class TestSets:\n",
@@ -55,14 +71,22 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%% md\n"
+ }
+ },
"source": [
"# /Models/model.py"
]
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%% md\n"
+ }
+ },
"source": [
"# /__init__/tools/base.py"
]
@@ -70,7 +94,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"pass # import os\n",
@@ -80,7 +108,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"pass # import numpy as np\n",
@@ -90,7 +122,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class BaseSave(metaclass=ABCMeta):\n",
@@ -129,7 +165,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class BaseLoad(metaclass=ABCMeta):\n",
@@ -155,7 +195,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"plt.style.use('dark_background')\n",
@@ -261,7 +305,11 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%% md\n"
+ }
+ },
"source": [
"# /__init__/tools/helperClass.py"
]
@@ -269,7 +317,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"pass # import tempfile\n",
@@ -280,7 +332,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"if TYPE_CHECKING:\n",
@@ -290,7 +346,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"pass # import numpy as np\n",
@@ -300,7 +360,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class NumpyDataCache(np.ndarray):\n",
@@ -320,12 +384,17 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class Collections:\n",
" def __repr__(self):\n",
- " return f\"<{self.__class__.__name__}:{self.collectables}>\"\n\n",
+ " return f\"<{self.__class__.__name__}:{self.collectables}>\"\n",
+ "\n",
" # todo: make collectables Type[_] itself, and/or create Collection class generator in general\n",
" def __init__(self, *collectables):\n",
" self.collectables = collectables\n",
@@ -352,7 +421,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"try:\n",
@@ -409,7 +482,11 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%% md\n"
+ }
+ },
"source": [
"# /__init__/tools/magicProperty.py"
]
@@ -417,7 +494,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"pass # import inspect"
@@ -426,7 +507,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class MagicBase:\n",
@@ -443,7 +528,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class MagicProperty(property):\n",
@@ -475,7 +564,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"def makeMetaMagicProperty(*inherits):\n",
@@ -495,7 +588,11 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%% md\n"
+ }
+ },
"source": [
"# /__init__/Topologies/activationFunction.py"
]
@@ -503,7 +600,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"pass # from typing import Union\n",
@@ -513,7 +614,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"pass # import numpy as np"
@@ -522,7 +627,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class BaseActivationFunction(metaclass=ABCMeta):\n",
@@ -541,7 +650,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class Sigmoid(BaseActivationFunction):\n",
@@ -561,7 +674,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class TanH(BaseActivationFunction):\n",
@@ -579,7 +696,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class Relu(BaseActivationFunction):\n",
@@ -592,7 +713,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class PRelu(BaseActivationFunction):\n",
@@ -611,7 +736,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class Elu(BaseActivationFunction):\n",
@@ -630,7 +759,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class SoftMax(BaseActivationFunction):\n",
@@ -648,7 +781,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class SoftPlus(BaseActivationFunction):\n",
@@ -660,7 +797,11 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%% md\n"
+ }
+ },
"source": [
"# /__init__/Topologies/initializer.py"
]
@@ -668,7 +809,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"pass # from abc import ABCMeta, abstractmethod\n",
@@ -678,7 +823,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"if TYPE_CHECKING:\n",
@@ -688,7 +837,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"pass # import numpy as np"
@@ -697,7 +850,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class BaseInitializer(metaclass=ABCMeta):\n",
@@ -716,7 +873,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class Uniform(BaseInitializer):\n",
@@ -735,7 +896,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class Normal(BaseInitializer):\n",
@@ -752,7 +917,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class Xavier(BaseInitializer):\n",
@@ -769,7 +938,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class NormalizedXavier(BaseInitializer):\n",
@@ -787,7 +960,11 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%% md\n"
+ }
+ },
"source": [
"# /__init__/Topologies/lossFunction.py"
]
@@ -795,7 +972,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"pass # from abc import ABCMeta, abstractmethod"
@@ -804,7 +985,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"pass # import numpy as np"
@@ -813,7 +998,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class BaseLossFunction(metaclass=ABCMeta):\n",
@@ -829,7 +1018,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class MeanSquare(BaseLossFunction):\n",
@@ -841,7 +1034,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class CrossEntropy(BaseLossFunction):\n",
@@ -852,7 +1049,11 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%% md\n"
+ }
+ },
"source": [
"# /__init__/Topologies/optimizer.py"
]
@@ -860,7 +1061,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"pass # from typing import Callable\n",
@@ -870,7 +1075,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"pass # import numpy as np\n",
@@ -880,7 +1089,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class BaseOptimizer(metaclass=ABCMeta):\n",
@@ -909,7 +1122,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class GradientDecent(BaseOptimizer):\n",
@@ -925,7 +1142,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class Decay(BaseOptimizer):\n",
@@ -949,7 +1170,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class Momentum(BaseOptimizer):\n",
@@ -972,7 +1197,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class NesterovMomentum(BaseOptimizer):\n",
@@ -995,7 +1224,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class AdaGrad(BaseOptimizer):\n",
@@ -1019,7 +1252,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class RmsProp:\n",
@@ -1030,7 +1267,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class AdaDelta:\n",
@@ -1041,7 +1282,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class Adam(BaseOptimizer):\n",
@@ -1087,7 +1332,11 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%% md\n"
+ }
+ },
"source": [
"# /__init__/tools/helperFunction.py"
]
@@ -1095,7 +1344,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"pass # import sys\n",
@@ -1106,7 +1359,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"if TYPE_CHECKING:\n",
@@ -1116,7 +1373,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"pass # from .helperClass import PrintCols"
@@ -1125,7 +1386,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"def copyNumpyList(lis: list[\"np.ndarray\"]):\n",
@@ -1137,7 +1402,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"def iterable(var):\n",
@@ -1151,7 +1420,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"def secToHMS(seconds, hms=('h', 'm', 's')):\n",
@@ -1164,7 +1437,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"def statPrinter(key, value, *, prefix='', suffix=PrintCols.CEND, end=' '):\n",
@@ -1174,7 +1451,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"def getSize(obj, seen=None, ref=''):\n",
@@ -1201,7 +1482,11 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%% md\n"
+ }
+ },
"source": [
"# /__init__/tools/__init__.py"
]
@@ -1209,7 +1494,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"pass # from .base import BaseSave, BaseLoad, Plot\n",
@@ -1221,7 +1510,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"__all__ = [\n",
@@ -1233,7 +1526,11 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%% md\n"
+ }
+ },
"source": [
"# /__init__/Topologies/dataBase.py"
]
@@ -1241,7 +1538,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"pass # import warnings\n",
@@ -1251,7 +1552,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"pass # import numpy as np\n",
@@ -1261,7 +1566,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"pass # from ..tools import NumpyDataCache, BaseSave, BaseLoad, Plot"
@@ -1270,7 +1579,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class DataBase(BaseSave, BaseLoad):\n",
@@ -1353,7 +1666,8 @@
" return hotEncodedArray, oneHotMax\n",
" @staticmethod\n",
" def oneHotDecode(_3dArray):\n",
- " return np.where(_3dArray == 1)[1]\n\n",
+ " return np.where(_3dArray == 1)[1]\n",
+ "\n",
" # normalize input and target sets within the range of -scale to +scale\n",
" @staticmethod\n",
" def normalize(data, scale: float = None) -> tuple[\"np.ndarray\", float]:\n",
@@ -1361,10 +1675,12 @@
" factor = 1\n",
" else:\n",
" factor = ne.evaluate(\"abs(data) * scale\", local_dict={'data': data, 'scale': scale}).max()\n",
- " return data / factor, factor\n\n",
+ " return data / factor, factor\n",
+ "\n",
" # shuffle the index order\n",
" def randomize(self) -> \"None\":\n",
- " np.random.shuffle(self.indices)\n\n",
+ " np.random.shuffle(self.indices)\n",
+ "\n",
" # returns a generator for input and target sets, each batch-sets of size batchSize at a time\n",
" # send signal '-1' to end generator\n",
" def batchGenerator(self, batchSize) -> Generator[tuple[\"np.ndarray\", \"np.ndarray\"], None, None]:\n",
@@ -1392,7 +1708,8 @@
" indices = self.indices[self.pointer:self.pointer + self.batchSize]\n",
" inputBatch = self.inputs[indices]\n",
" targetBatch = self.targets[indices]\n",
- " return inputBatch, targetBatch\n\n",
+ " return inputBatch, targetBatch\n",
+ "\n",
" # resets generator flags after generator cycle\n",
" def __resetVars(self):\n",
" self.pointer = 0\n",
@@ -1403,7 +1720,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class PlotDataBase(Plot):\n",
@@ -1414,7 +1735,11 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%% md\n"
+ }
+ },
"source": [
"# /__init__/Topologies/__init__.py"
]
@@ -1422,7 +1747,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"pass # from ..tools import Collections\n",
@@ -1432,7 +1761,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class Activators(Collections):\n",
@@ -1446,7 +1779,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class Initializers(Collections):\n",
@@ -1460,7 +1797,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class Optimizers(Collections):\n",
@@ -1474,7 +1815,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class LossFunction:\n",
@@ -1485,7 +1830,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"__all__ = [\n",
@@ -1496,7 +1845,11 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%% md\n"
+ }
+ },
"source": [
"# /__init__/NeuralNetworks/base.py"
]
@@ -1504,7 +1857,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"pass # import time\n",
@@ -1520,7 +1877,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"if TYPE_CHECKING:\n",
@@ -1531,7 +1892,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"pass # import numpy as np"
@@ -1540,7 +1905,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"pass # from ..tools import MagicBase, MagicProperty, makeMetaMagicProperty, PrintCols, iterable, secToHMS, statPrinter\n",
@@ -1550,7 +1919,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class BaseShape(MagicBase, metaclass=makeMetaMagicProperty(ABCMeta)):\n",
@@ -1586,7 +1959,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class UniversalShape(BaseShape):\n",
@@ -1606,7 +1983,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class BaseLayer(MagicBase, metaclass=makeMetaMagicProperty(ABCMeta)):\n",
@@ -1682,7 +2063,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class BasePlot(MagicBase, metaclass=makeMetaMagicProperty(ABCMeta)):\n",
@@ -1693,7 +2078,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class Network:\n",
@@ -1751,7 +2140,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class BaseNN(MagicBase, metaclass=makeMetaMagicProperty(ABCMeta)):\n",
@@ -1939,7 +2332,11 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%% md\n"
+ }
+ },
"source": [
"# /__init__/NeuralNetworks/dense.py"
]
@@ -1947,7 +2344,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"pass # from typing import TYPE_CHECKING"
@@ -1956,7 +2357,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"if TYPE_CHECKING:\n",
@@ -1966,7 +2371,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"pass # import numpy as np"
@@ -1975,7 +2384,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"pass # from .base import BaseShape, BaseLayer, BasePlot, BaseNN, UniversalShape, Network"
@@ -1984,7 +2397,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class DenseShape(BaseShape):\n",
@@ -2005,7 +2422,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class DenseLayer(BaseLayer): # todo: pre-set deltas after forwardPass\n",
@@ -2044,7 +2465,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class DensePlot(BasePlot):\n",
@@ -2055,7 +2480,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class DenseNN(BaseNN):\n",
@@ -2084,7 +2513,11 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%% md\n"
+ }
+ },
"source": [
"# /__init__/NeuralNetworks/conv.py"
]
@@ -2092,7 +2525,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"pass # from typing import TYPE_CHECKING"
@@ -2101,7 +2538,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"if TYPE_CHECKING:\n",
@@ -2112,7 +2553,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"pass # import numpy as np"
@@ -2121,7 +2566,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"pass # from .base import BaseShape, BaseLayer, BasePlot, BaseNN, UniversalShape, Network"
@@ -2130,7 +2579,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class ConvShape:\n",
@@ -2140,7 +2593,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class ConvLayer:\n",
@@ -2150,7 +2607,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class ConvPlot(BasePlot):\n",
@@ -2161,7 +2622,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class ConvNN:\n",
@@ -2170,7 +2635,11 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%% md\n"
+ }
+ },
"source": [
"# /__init__/NeuralNetworks/__init__.py"
]
@@ -2178,7 +2647,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class Base:\n",
@@ -2190,7 +2663,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class Dense:\n",
@@ -2201,7 +2678,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"class Conv:\n",
@@ -2212,7 +2693,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"__all__ = [\n",
@@ -2222,7 +2707,11 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%% md\n"
+ }
+ },
"source": [
"# /__init__/__init__.py"
]
@@ -2230,7 +2719,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"pass # from .NeuralNetworks import *\n",
@@ -2240,7 +2733,11 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%% md\n"
+ }
+ },
"source": [
"# /main.py
\n",
"todo: dynamic optimizers
\n",
@@ -2252,7 +2749,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"pass # from __init__ import *\n",
@@ -2263,7 +2764,11 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
"outputs": [],
"source": [
"db = DataBase.load(dataSet.TrainSets.EmnistBalanced, normalizeInp=1, reshapeInp=(-1, 1),\n",
diff --git a/main.py b/main.py
index 7774a29..9f59c64 100644
--- a/main.py
+++ b/main.py
@@ -3,6 +3,7 @@
# todo: auto hyperparameter tuning: Grid search, Population-based natural selection
# todo: auto train stop, inf train
# todo: database save inputs, targets, labels separately
+# todo: string hyperparams
from src import *
from DataSets import dataSet
from Models import model
diff --git a/src/NeuralNetworks/__init__.py b/src/NeuralNetworks/__init__.py
index c0403cb..00e408d 100644
--- a/src/NeuralNetworks/__init__.py
+++ b/src/NeuralNetworks/__init__.py
@@ -29,5 +29,6 @@ class Conv:
__all__ = [
"Base", "Dense", "Conv",
- "UniversalShape", "Network"
+ "UniversalShape", "Network",
+ "Pooling", "Correlation"
]
diff --git a/src/NeuralNetworks/base.py b/src/NeuralNetworks/base.py
index 665d34d..7e6a110 100644
--- a/src/NeuralNetworks/base.py
+++ b/src/NeuralNetworks/base.py
@@ -104,6 +104,7 @@ def __init__(self, shape: "BaseShape",
self.outputDelta = np.zeros(self.SHAPE[0], dtype=np.float32)
self.DEPS = self._defineDeps(*depArgs, **depKwargs)
+ self._initializeDepOptimizer()
def forPass(self, _input: "np.ndarray") -> "np.ndarray":
f"""
diff --git a/src/NeuralNetworks/conv.py b/src/NeuralNetworks/conv.py
index e78110c..efea18d 100644
--- a/src/NeuralNetworks/conv.py
+++ b/src/NeuralNetworks/conv.py
@@ -47,7 +47,13 @@ def __init__(self, *pooling: "Pooling.Base"):
super(Pooling, self).__init__(*pooling)
class Base:
- def __init__(self, stride: Union[int, tuple[int, int]]):
+ def __repr__(self):
+ return f"<{self.__class__.__name__}:{self.stride}:{self.shape}>"
+
+ def __init__(self, shape: Union[int, tuple[int, int]] = None, stride: Union[int, tuple[int, int]] = None):
+ if shape is None: shape = 2
+ self.shape = formatStride(shape)
+ if stride is None: stride = self.shape
self.stride = formatStride(stride)
class MAX(Base): pass
@@ -60,7 +66,11 @@ def __init__(self, *correlation: "Correlation.Base"):
super(Correlation, self).__init__(*correlation)
class Base:
- def __init__(self, stride: Union[int, tuple[int, int]]):
+ def __repr__(self):
+ return f"<{self.__class__.__name__}:{self.stride}>"
+
+ def __init__(self, stride: Union[int, tuple[int, int]] = None):
+ if stride is None: stride = 1
self.stride = formatStride(stride)
class VALID(Base): pass
@@ -75,22 +85,26 @@ class ConvLayer(BaseLayer):
"""
- def __repr__(self):
- return super(ConvLayer, self).__repr__()
-
def _initializeDepOptimizer(self):
self.kernelOptimizer = self.optimizer.__new_copy__()
self.biasesOptimizer = self.optimizer.__new_copy__()
def _defineDeps(self, correlation: "Correlation.Base" = None, pooling: "Pooling.Base" = None) -> list['str']:
- if correlation is None: Correlation.VALID(1)
- if pooling is None: pooling = Pooling.MAX(1)
+ if correlation is None: correlation = Correlation.VALID()
+ if pooling is None: pooling = Pooling.MAX()
self.pooling = pooling
self.correlation = correlation
- # todo: how will shape be?
- self.kernels = self.INITIALIZER(UniversalShape(self.SHAPE.INPUT, *self.SHAPE.HIDDEN, self.SHAPE.OUTPUT))
- self.biases = self.INITIALIZER(UniversalShape(self.SHAPE.INPUT, *self.SHAPE.OUTPUT, self.SHAPE.OUTPUT))
- return ["kernels", "biases"]
+ self.kernelShape, self.kernelPad, self.kernelOut = \
+ self.__findKernelPadOut(self.SHAPE.INPUT, self.SHAPE.OUTPUT, self.correlation.__class__,
+ self.correlation.stride)
+ _, self.poolPad, self.poolOut = \
+ self.__findKernelPadOut(self.kernelOut, (self.kernelShape[0], *self.pooling.shape),
+ self.correlation.__class__, self.pooling.stride)
+ self.kernel = self.INITIALIZER(UniversalShape(self.SHAPE.HIDDEN, *self.kernelShape, self.SHAPE.OUTPUT))
+ self.biases = self.INITIALIZER(UniversalShape(self.SHAPE.HIDDEN, *self.poolOut, self.SHAPE.OUTPUT))
+ self.delta = None
+ self.activeDerivedDelta = None
+ return ["kernel", "biases"]
def _fire(self) -> "np.ndarray":
pass
@@ -98,6 +112,26 @@ def _fire(self) -> "np.ndarray":
def _wire(self) -> "np.ndarray":
pass
+ @staticmethod
+ def __findKernelPadOut(kernelInput, kernelBaseShape, correlation, stride):
+ kernel = kernelBaseShape[0], kernelInput[0], *kernelBaseShape[1:]
+ if correlation is Correlation.VALID:
+ out = np.ceil((np.array(kernelInput[-2:]) - kernel[-2:]) / stride) + 1
+ elif correlation is Correlation.FULL:
+ out = np.ceil((np.array(kernelInput[-2:]) - kernel[-2:] +
+ 2 * (kernel[-2:] - np.int16(1))) / stride) + 1 # noqa
+ elif correlation is Correlation.SAME:
+ out = np.array(kernelInput[-2:])
+ else:
+ raise ValueError("Invalid correlation type")
+ pad = ConvLayer.__findPadFromOut(kernelInput, out, stride, kernel)
+ out = kernel[0], *out.astype(np.int16)
+ return kernel, tuple(pad.tolist()), out
+
+ @staticmethod
+ def __findPadFromOut(inp, out, stride, kernel):
+ return ((out - 1) * stride + kernel[-2:] - inp[-2:]).astype(np.int16)
+
class ConvPlot(BasePlot):
"""
diff --git a/src/NeuralNetworks/dense.py b/src/NeuralNetworks/dense.py
index d62073b..06b02c1 100644
--- a/src/NeuralNetworks/dense.py
+++ b/src/NeuralNetworks/dense.py
@@ -35,13 +35,12 @@ def _initializeDepOptimizer(self):
self.biasesOptimizer = self.optimizer.__new_copy__()
def _defineDeps(self) -> list['str']:
- self.weights = self.INITIALIZER(UniversalShape(self.SHAPE.INPUT, *(self.SHAPE.OUTPUT[0], self.SHAPE.INPUT[0]),
- self.SHAPE.OUTPUT))
+ self.weight = self.INITIALIZER(UniversalShape(self.SHAPE.INPUT, *(self.SHAPE.OUTPUT[0], self.SHAPE.INPUT[0]),
+ self.SHAPE.OUTPUT))
self.biases = self.INITIALIZER(UniversalShape(self.SHAPE.INPUT, *self.SHAPE.OUTPUT, self.SHAPE.OUTPUT))
self.delta = None
self.activeDerivedDelta = None
- self._initializeDepOptimizer()
- return ['weights', 'biases']
+ return ['weight', 'biases']
def __gradWeights(self, weights): # BottleNeck
self.delta = np.einsum('oi,...oj->...ij', weights, self.inputDelta, optimize='greedy')
@@ -55,10 +54,10 @@ def __gradBiases(self, _=None):
def _fire(self) -> "np.ndarray": # BottleNeck
return self.ACTIVATION_FUNCTION.activation(
- np.einsum('oi,...ij->...oj', self.weights, self.input, optimize='greedy') + self.biases)
+ np.einsum('oi,...ij->...oj', self.weight, self.input, optimize='greedy') + self.biases)
def _wire(self) -> "np.ndarray":
- self.weights -= self.weightOptimizer(self.__gradWeights, self.weights)
+ self.weight -= self.weightOptimizer(self.__gradWeights, self.weight)
self.biases -= self.biasesOptimizer(self.__gradBiases, self.biases)
return self.delta
@@ -74,9 +73,6 @@ class DenseNN(BaseNN):
"""
- def __save__(self):
- return super(DenseNN, self).__save__()
-
def __init__(self, shape: "DenseShape",
initializers: "Initializers" = None,
activators: "Activators" = None,
diff --git a/src/Topologies/__init__.py b/src/Topologies/__init__.py
index 7d5e38b..6d5cced 100644
--- a/src/Topologies/__init__.py
+++ b/src/Topologies/__init__.py
@@ -1,10 +1,16 @@
from ..tools import Collections
from .dataBase import DataBase, PlotDataBase
+from .activationFunction import BaseActivationFunction, \
+ Sigmoid, TanH, Relu, PRelu, Elu, SoftMax, SoftPlus
+from .initializer import BaseInitializer, \
+ Uniform, Normal, Xavier, NormalizedXavier
+from .optimizer import BaseOptimizer, \
+ GradientDecent, Decay, Momentum, NesterovMomentum, AdaGrad, RmsProp, AdaDelta, Adam
+from .lossFunction import BaseLossFunction, \
+ MeanSquare
class Activators(Collections):
- from .activationFunction import BaseActivationFunction, \
- Sigmoid, TanH, Relu, PRelu, Elu, SoftMax, SoftPlus
Base, Sigmoid, TanH, Relu, PRelu, Elu, SoftMax, SoftPlus = \
BaseActivationFunction, Sigmoid, TanH, Relu, PRelu, Elu, SoftMax, SoftPlus
@@ -13,18 +19,14 @@ def __init__(self, *activationFunctions: "Activators.Base"):
class Initializers(Collections):
- from .initializer import BaseInitializer, \
- Uniform, Normal, Xavier, NormalizedXavier
Base, Uniform, Normal, Xavier, NormalizedXavier = \
BaseInitializer, Uniform, Normal, Xavier, NormalizedXavier
- def __init__(self, *initializer: "Initializers.Base"):
+ def __init__(self, *initializer: "Initializers.Base"): # noqa
super(Initializers, self).__init__(*initializer)
class Optimizers(Collections):
- from .optimizer import BaseOptimizer, \
- GradientDecent, Decay, Momentum, NesterovMomentum, AdaGrad, RmsProp, AdaDelta, Adam
Base, GradientDecent, Decay, Momentum, NesterovMomentum, AdaGrad, RmpProp, AdaDelta, Adam = \
BaseOptimizer, GradientDecent, Decay, Momentum, NesterovMomentum, AdaGrad, RmsProp, AdaDelta, Adam
@@ -33,8 +35,6 @@ def __init__(self, *optimizers: "Optimizers.Base"):
class LossFunction:
- from .lossFunction import BaseLossFunction, \
- MeanSquare
Base, MeanSquare = BaseLossFunction, MeanSquare
diff --git a/src/Topologies/activationFunction.py b/src/Topologies/activationFunction.py
index efa7d05..7a1e749 100644
--- a/src/Topologies/activationFunction.py
+++ b/src/Topologies/activationFunction.py
@@ -1,4 +1,4 @@
-from typing import Union
+from typing import Union, Type
from abc import ABCMeta, abstractmethod
import numpy as np
@@ -11,8 +11,17 @@ class BaseActivationFunction(metaclass=ABCMeta):
def __repr__(self):
return f"<{self.__class__.__name__}>"
- def __save__(self):
- pass
+ def __new__(cls, *args, **kwargs):
+ cls.RAW_ARGS = args
+ cls.RAW_KWARGS = kwargs
+ return super(BaseActivationFunction, cls).__new__(cls)
+
+ def __save__(self) -> tuple["str", "tuple", "dict"]:
+ return self.__class__.__name__, self.RAW_ARGS, self.RAW_KWARGS
+
+ @staticmethod
+ def __load__(name, raw_args, raw_kwargs) -> "BaseActivationFunction":
+ return globals()[name](*raw_args, **raw_kwargs)
@abstractmethod
def activation(self, x: np.ndarray) -> "np.ndarray":
diff --git a/src/Topologies/initializer.py b/src/Topologies/initializer.py
index 8301b1d..c468879 100644
--- a/src/Topologies/initializer.py
+++ b/src/Topologies/initializer.py
@@ -1,24 +1,37 @@
from abc import ABCMeta, abstractmethod
from typing import TYPE_CHECKING
+import numpy as np
+
if TYPE_CHECKING:
from ..NeuralNetworks import *
-import numpy as np
-
class BaseInitializer(metaclass=ABCMeta):
+ rnd = np.random.default_rng()
+
def __repr__(self):
return f"<{self.__class__.__name__}>"
- def __save__(self):
- pass
+ def __new__(cls, *args, **kwargs):
+ cls.RAW_ARGS = args
+ cls.RAW_KWARGS = kwargs
+ return super(BaseInitializer, cls).__new__(cls)
- @abstractmethod
- def __init__(self, *args, **kwargs):
- self.rnd = np.random.default_rng()
+ def __save__(self) -> tuple["str", "tuple", "dict"]:
+ return self.__class__.__name__, self.RAW_ARGS, self.RAW_KWARGS
+
+ @staticmethod
+ def __load__(name, raw_args, raw_kwargs) -> "BaseInitializer":
+ return globals()[name](*raw_args, **raw_kwargs)
def __call__(self, shape: "Base.Shape") -> "np.ndarray":
+ """
+ :param shape: a UniversalShape with UniversalShape(...).INPUT as the lower layer,
+ UniversalShape(...).HIDDEN as the desired shape for initialization,
+ UniversalShape(...).OUTPUT as the higher layer
+ :return:
+ """
return self._initialize(shape)
@abstractmethod
@@ -33,7 +46,6 @@ def __repr__(self):
return f"{super(Uniform, self).__repr__()[:-1]}: {start=}: {stop}>"
def __init__(self, start: "float" = -1, stop: "float" = 1):
- super(Uniform, self).__init__()
self.start = start
self.stop = stop
@@ -47,7 +59,6 @@ def __repr__(self):
return f"{super(Normal, self).__repr__()[:-1]}: {scale=}>"
def __init__(self, scale: "float" = 1):
- super(Normal, self).__init__()
self.scale = scale
def _initialize(self, shape: "Base.Shape") -> "np.ndarray":
@@ -60,7 +71,6 @@ def __repr__(self):
return f"{super(Xavier, self).__repr__()[:-1]}: {he=}>"
def __init__(self, he: "float" = 1):
- super(Xavier, self).__init__()
self.he = he
def _initialize(self, shape: "Base.Shape") -> "np.ndarray":
@@ -73,7 +83,6 @@ def __repr__(self):
return f"{super(NormalizedXavier, self).__repr__()[:-1]}: {he=}>"
def __init__(self, he: "float" = 6):
- super(NormalizedXavier, self).__init__()
self.he = he
def _initialize(self, shape: "Base.Shape"):
diff --git a/src/Topologies/lossFunction.py b/src/Topologies/lossFunction.py
index 80ecb90..c79507e 100644
--- a/src/Topologies/lossFunction.py
+++ b/src/Topologies/lossFunction.py
@@ -7,8 +7,17 @@ class BaseLossFunction(metaclass=ABCMeta):
def __repr__(self):
return f"{self.__class__.__name__}"
- def __save__(self):
- pass
+ def __new__(cls, *args, **kwargs):
+ cls.RAW_ARGS = args
+ cls.RAW_KWARGS = kwargs
+ return super(BaseLossFunction, cls).__new__(cls)
+
+ def __save__(self) -> tuple["str", "tuple", "dict"]:
+ return self.__class__.__name__, self.RAW_ARGS, self.RAW_KWARGS
+
+ @staticmethod
+ def __load__(name, raw_args, raw_kwargs) -> "BaseLossFunction":
+ return globals()[name](*raw_args, **raw_kwargs)
def __call__(self, output, target):
return self._eval(output, target)
diff --git a/src/Topologies/optimizer.py b/src/Topologies/optimizer.py
index af7b48a..7c45880 100644
--- a/src/Topologies/optimizer.py
+++ b/src/Topologies/optimizer.py
@@ -14,8 +14,19 @@ def __repr__(self):
lr = self.LEARNING_RATE
return f"<{self.__class__.__name__}:{lr=}>"
- def __save__(self):
- pass
+ def __new__(cls, *args, **kwargs):
+ cls.RAW_ARGS = args
+ cls.RAW_KWARGS = kwargs
+ return super(BaseOptimizer, cls).__new__(cls)
+
+ def __save__(self) -> tuple["str", "tuple", "dict", "dict"]:
+ return self.__class__.__name__, self.RAW_ARGS, self.RAW_KWARGS, self.__dict__
+
+ @staticmethod
+ def __load__(name, raw_args, raw_kwargs, __dict__) -> "BaseOptimizer":
+ _return = globals()[name](*raw_args, **raw_kwargs)
+ _return.__dict__.update(__dict__)
+ return _return
def __init__(self, learningRate: float):
self.LEARNING_RATE = np.float32(learningRate)
diff --git a/test2.py b/test2.py
index f758a13..dc927d0 100644
--- a/test2.py
+++ b/test2.py
@@ -1,7 +1,10 @@
-from src import Conv
+from src import *
+from DataSets import dataSet
+from Models import model
-shape = Conv.Shape((10, 3),
- (10, 4, 3),
- (5, 3), ..., (5, 4, 5), ..., (10, 3, 3),
- (10, 2))
-print(shape)
+cl = Conv.Layer(UniversalShape((3, 28, 28), (10, 3, 3), (5, 4, 4)),
+ Initializers.Xavier(),
+ Optimizers.AdaGrad(),
+ Activators.PRelu())
+
+print(cl)