Skip to content
This repository has been archived by the owner on Apr 4, 2023. It is now read-only.

Commit

Permalink
ConvLayer idea initialized
Browse files Browse the repository at this point in the history
  • Loading branch information
amithm3 committed May 22, 2022
1 parent 4e4ae61 commit 2c09cb7
Show file tree
Hide file tree
Showing 14 changed files with 102 additions and 53 deletions.
4 changes: 2 additions & 2 deletions DataSets/dataSet.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
class TrainSets:
EmnistBalanced = "emnist.balanced.train.112800s.(28,28)_i.(47,1)o.zdb"
EmnistBalanced = "emnist.balanced.train.112800s.(28,28)i.(47,1)o.zdb"
Xor = "xor3.train.8s.(3,1)_i.(1,1)o.zdb"


class TestSets:
EmnistBalanced = "emnist.balanced.test.18800s.(28,28)_i.(47,1)o.zdb"
EmnistBalanced = "emnist.balanced.test.18800s.(28,28)i.(47,1)o.zdb"
Xor = "xor3.test.8s.(3,1)_i.(1,1)o.zdb"
1 change: 1 addition & 0 deletions main.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
# todo: DataBase shaping using self.SHAPE
# todo: auto hyperparameter tuning: Grid search, Population-based natural selection
# todo: auto train stop, inf train
# todo: database save inputs, targets, labels separately
from src import *
from DataSets import dataSet
from Models import model
Expand Down
2 changes: 1 addition & 1 deletion src/NeuralNetworks/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from .base import BaseShape, BaseLayer, BasePlot, BaseNN, \
UniversalShape, Network
from .dense import DenseShape, DenseLayer, DensePlot, DenseNN
from .conv import ConvShape, ConvLayer, ConvPlot, ConvNN
from .conv import ConvShape, ConvLayer, ConvPlot, ConvNN, Correlation, Pooling


class Base:
Expand Down
16 changes: 8 additions & 8 deletions src/NeuralNetworks/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,12 @@

import numpy as np

from ..tools import MagicBase, MagicProperty, makeMetaMagicProperty, \
from ..tools import MagicProperty, makeMetaMagicProperty, \
PrintCols, iterable, secToHMS, statPrinter
from ..Topologies import Activators, Initializers, Optimizers, LossFunction, DataBase


class BaseShape(MagicBase, metaclass=makeMetaMagicProperty(ABCMeta)):
class BaseShape(metaclass=makeMetaMagicProperty(ABCMeta)):
"""
"""
Expand Down Expand Up @@ -69,7 +69,7 @@ def _formatShapes(shapes) -> tuple:
return shapes


class BaseLayer(MagicBase, metaclass=makeMetaMagicProperty(ABCMeta)):
class BaseLayer(metaclass=makeMetaMagicProperty(ABCMeta)):
"""
"""
Expand Down Expand Up @@ -153,7 +153,7 @@ def _wire(self) -> "np.ndarray":
"""


class BasePlot(MagicBase, metaclass=makeMetaMagicProperty(ABCMeta)):
class BasePlot(metaclass=makeMetaMagicProperty(ABCMeta)):
"""
"""
Expand Down Expand Up @@ -219,7 +219,7 @@ def backPropagation(self, _delta) -> "np.ndarray":
return self.INPUT_LAYER.backProp(_delta)


class BaseNN(MagicBase, metaclass=makeMetaMagicProperty(ABCMeta)):
class BaseNN(metaclass=makeMetaMagicProperty(ABCMeta)):
"""
"""
Expand Down Expand Up @@ -252,7 +252,7 @@ def __save__(self):
def __init__(self, shape: "BaseShape",
initializers: "Initializers" = None,
activators: "Activators" = None,
lossFunction: "LossFunction.Base" = None):
lossFunction: "LossFunction.Base" = None, *ntwArgs, **ntwKwargs):
if initializers is None: initializers = Initializers(Initializers.Xavier(2), ..., Initializers.Xavier())
if activators is None: activators = Activators(Activators.PRelu(), ..., Activators.SoftMax())
if lossFunction is None: lossFunction = LossFunction.MeanSquare()
Expand All @@ -269,12 +269,12 @@ def __init__(self, shape: "BaseShape",
self.training = self.profiling = False
self.trainDataBase = self.testDataBase = None

self.NETWORK = self._constructNetwork(initializers, activators, lossFunction)
self.NETWORK = self._constructNetwork(initializers, activators, lossFunction, *ntwArgs, **ntwKwargs)

@abstractmethod
def _constructNetwork(self, initializers: "Initializers" = None,
activators: "Activators" = None,
lossFunction: "LossFunction.Base" = None) -> "Network":
lossFunction: "LossFunction.Base" = None, *args, **kwargs) -> "Network":
pass

def process(self, _input) -> "np.ndarray":
Expand Down
54 changes: 50 additions & 4 deletions src/NeuralNetworks/conv.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import numpy as np

from .base import BaseShape, BaseLayer, BasePlot, BaseNN, UniversalShape, Network
from ..tools import Collections


class ConvShape(BaseShape):
Expand All @@ -32,6 +33,43 @@ def _formatShapes(shapes) -> tuple:
return tuple(formattedShape)


def formatStride(stride) -> tuple:
if isinstance(stride, int):
assert stride > 0, "integer args of *strides must be > 0"
return stride, stride
assert isinstance(stride, tuple) and len(stride) == 2 and all(isinstance(s, int) for s in stride), \
"non integer args of *strides must be integer tuple of length == 2"
return stride


class Pooling(Collections):
def __init__(self, *pooling: "Pooling.Base"):
super(Pooling, self).__init__(*pooling)

class Base:
def __init__(self, stride: Union[int, tuple[int, int]]):
self.stride = formatStride(stride)

class MAX(Base): pass

class MEAN(Base): pass


class Correlation(Collections):
def __init__(self, *correlation: "Correlation.Base"):
super(Correlation, self).__init__(*correlation)

class Base:
def __init__(self, stride: Union[int, tuple[int, int]]):
self.stride = formatStride(stride)

class VALID(Base): pass

class FULL(Base): pass

class SAME(Base): pass


class ConvLayer(BaseLayer):
"""
Expand All @@ -41,10 +79,18 @@ def __repr__(self):
return super(ConvLayer, self).__repr__()

def _initializeDepOptimizer(self):
pass

def _defineDeps(self, *depArgs, **depKwargs) -> list['str']:
pass
self.kernelOptimizer = self.optimizer.__new_copy__()
self.biasesOptimizer = self.optimizer.__new_copy__()

def _defineDeps(self, correlation: "Correlation.Base" = None, pooling: "Pooling.Base" = None) -> list['str']:
if correlation is None: Correlation.VALID(1)
if pooling is None: pooling = Pooling.MAX(1)
self.pooling = pooling
self.correlation = correlation
# todo: how will shape be?
self.kernels = self.INITIALIZER(UniversalShape(self.SHAPE.INPUT, *self.SHAPE.HIDDEN, self.SHAPE.OUTPUT))
self.biases = self.INITIALIZER(UniversalShape(self.SHAPE.INPUT, *self.SHAPE.OUTPUT, self.SHAPE.OUTPUT))
return ["kernels", "biases"]

def _fire(self) -> "np.ndarray":
pass
Expand Down
4 changes: 2 additions & 2 deletions src/NeuralNetworks/dense.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def _initializeDepOptimizer(self):
def _defineDeps(self) -> list['str']:
self.weights = self.INITIALIZER(UniversalShape(self.SHAPE.INPUT, *(self.SHAPE.OUTPUT[0], self.SHAPE.INPUT[0]),
self.SHAPE.OUTPUT))
self.biases = self.INITIALIZER(UniversalShape(self.SHAPE.INPUT, *(self.SHAPE.OUTPUT[0], 1), self.SHAPE.OUTPUT))
self.biases = self.INITIALIZER(UniversalShape(self.SHAPE.INPUT, *self.SHAPE.OUTPUT, self.SHAPE.OUTPUT))
self.delta = None
self.activeDerivedDelta = None
self._initializeDepOptimizer()
Expand Down Expand Up @@ -83,7 +83,7 @@ def __init__(self, shape: "DenseShape",
lossFunction: "LossFunction.Base" = None):
super(DenseNN, self).__init__(shape, initializers, activators, lossFunction)

def _constructNetwork(self, initializers: "Initializers" = None,
def _constructNetwork(self, initializers: "Initializers" = None, # noqa
activators: "Activators" = None,
lossFunction: "LossFunction.Base" = None) -> "Network":
layers = []
Expand Down
3 changes: 3 additions & 0 deletions src/Topologies/activationFunction.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,9 @@ class BaseActivationFunction(metaclass=ABCMeta):
def __repr__(self):
return f"<{self.__class__.__name__}>"

def __save__(self):
pass

@abstractmethod
def activation(self, x: np.ndarray) -> "np.ndarray":
pass
Expand Down
3 changes: 3 additions & 0 deletions src/Topologies/initializer.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,9 @@ class BaseInitializer(metaclass=ABCMeta):
def __repr__(self):
return f"<{self.__class__.__name__}>"

def __save__(self):
pass

@abstractmethod
def __init__(self, *args, **kwargs):
self.rnd = np.random.default_rng()
Expand Down
3 changes: 3 additions & 0 deletions src/Topologies/lossFunction.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,9 @@ class BaseLossFunction(metaclass=ABCMeta):
def __repr__(self):
return f"{self.__class__.__name__}"

def __save__(self):
pass

def __call__(self, output, target):
return self._eval(output, target)

Expand Down
16 changes: 5 additions & 11 deletions src/Topologies/optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,24 +4,18 @@
import numpy as np
import numexpr as ne

from ..tools import NewCopy

class BaseOptimizer(metaclass=ABCMeta):
__args, __kwargs = (), {}

class BaseOptimizer(NewCopy, metaclass=ABCMeta):
ZERO, ONE = np.float32(0), np.float32(1)

def __repr__(self):
lr = self.LEARNING_RATE
return f"<{self.__class__.__name__}:{lr=}>"

def __new__(cls, *args, **kwargs):
cls.__args, cls.__kwargs = args, kwargs
obj = super(BaseOptimizer, cls).__new__(cls)
obj.__init__(*args, **kwargs)
return obj

@classmethod
def __new_copy__(cls):
return cls.__new__(cls, *cls.__args, *cls.__kwargs)
def __save__(self):
pass

def __init__(self, learningRate: float):
self.LEARNING_RATE = np.float32(learningRate)
Expand Down
9 changes: 5 additions & 4 deletions src/tools/__init__.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
from .base import BaseSave, BaseLoad, Plot
from .helperClass import NumpyDataCache, Collections, PrintCols
from .helperClass import NumpyDataCache, Collections, PrintCols, NewCopy
from .helperFunction import copyNumpyList, iterable, secToHMS, statPrinter, getSize
from .magicProperty import MagicBase, MagicProperty, makeMetaMagicProperty
from .magicProperty import MagicProperty, makeMetaMagicProperty

__all__ = [
"BaseSave", "BaseLoad", "Plot",
"NumpyDataCache", "Collections", "PrintCols", "copyNumpyList", "iterable", "secToHMS", "statPrinter", "getSize",
"MagicBase", "MagicProperty", "makeMetaMagicProperty",
"NumpyDataCache", "Collections", "PrintCols", "NewCopy",
"copyNumpyList", "iterable", "secToHMS", "statPrinter", "getSize",
"MagicProperty", "makeMetaMagicProperty",
]
25 changes: 18 additions & 7 deletions src/tools/helperClass.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,11 @@
import tempfile
import ctypes
from typing import TYPE_CHECKING

if TYPE_CHECKING:
from ..tools import *

import numpy as np
from numpy.lib import format as fm


# fixme: just make as function
class NumpyDataCache(np.ndarray):
def __new__(cls, array):
return cls.writeNpyCache(array)
Expand All @@ -25,11 +22,25 @@ def writeNpyCache(array: "np.ndarray") -> np.ndarray:
return memMap


class NewCopy:
__args, __kwargs = (), {}

def __new__(cls, *args, **kwargs):
cls.__args, cls.__kwargs = args, kwargs
obj = super(NewCopy, cls).__new__(cls)
obj.__init__(*args, **kwargs) # noqa
return obj

@classmethod
def __new_copy__(cls):
return cls.__new__(cls, *cls.__args, *cls.__kwargs)


class Collections:
def __repr__(self):
return f"<{self.__class__.__name__}:{self.collectables}>"

# todo: make collectables Type[_<class>] itself, and/or create Collection class generator in general
# todo: make collectables Type[_<class>] itself, and/or use __copy__?
def __init__(self, *collectables):
self.collectables = collectables

Expand Down Expand Up @@ -58,9 +69,9 @@ def get(self, length):
try:
kernel32 = ctypes.windll.kernel32
kernel32.SetConsoleMode(kernel32.GetStdHandle(-11), 7)
except: # noqa
except AttributeError:
pass
# noinspection SpellCheckingInspection
# todo: use NamedTuple
class PrintCols: # noqa
CEND = '\33[0m'
CBOLD = '\33[1m'
Expand Down
15 changes: 1 addition & 14 deletions src/tools/magicProperty.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,6 @@
import inspect


class MagicBase:
def __new__(cls, *args, **kwargs):
obj = super(MagicBase, cls).__new__(cls)
obj.toMagicProperty = set()
return obj

def __magic_start__(self):
self.toMagicProperty = set(self.__dict__.keys())

def __magic_end__(self):
self.toMagicProperty = set(self.__dict__.keys()) - self.toMagicProperty


class MagicProperty(property):
def __init__(self, fget=None, fset=None, fdel=None, doc=None):
super(MagicProperty, self).__init__(fget, self.makeMagicF(fset), self.makeMagicF(fdel), doc)
Expand All @@ -31,7 +18,7 @@ def f(*args, **kwargs):
def __magic__(self, stack=1):
caller = self.getCaller(stack + 1)
return any(c1 == c2 and c1 is not None for c1, c2 in zip(caller, self.__obj)) or \
(any(self.__obj[2] == base.__name__ for base in caller[1].__bases__)
(any(self.__obj[2] == base.__name__ for base in caller[1].__bases__)
if self.__obj[:2] == (None, None) and caller[1] is not None else 0)

@staticmethod
Expand Down
File renamed without changes.

0 comments on commit 2c09cb7

Please sign in to comment.