Skip to content
This repository has been archived by the owner on Apr 4, 2023. It is now read-only.

Commit

Permalink
stable saveload
Browse files Browse the repository at this point in the history
  • Loading branch information
amithm3 committed May 31, 2022
1 parent 38eee8c commit b8be0db
Show file tree
Hide file tree
Showing 8 changed files with 100 additions and 81 deletions.
6 changes: 3 additions & 3 deletions Tests/test.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
from src import *
import pickle

coo = Dense.Layer(shape=Dense.Shape(784, 398),
coo = Dense.Layer(shape=Dense.Shape(100, 200, 300),
initializer=Initializers.Xavier(5),
optimizer=Optimizers.Adam(epsilon=2),
activationFunction=Activators.PRelu())
print(coo, id(coo))
print(coo2 := load(*coo.__save__()), id(coo2))
print(coo.__save__()[2]["shape"].save)
34 changes: 16 additions & 18 deletions main.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@
# todo: string hyperparams
# todo: look into "NamedTuple"
# fixme: in DunderSaveLoad for __dict__ saves save only req stuff, maybe by subtracting the vars of base __dict__
import pickle

from src import *
from DataSets import dataSet
from Models import model
Expand All @@ -15,27 +17,23 @@
db2 = DataBase.load(dataSet.TestSets.EmnistBalanced, normalizeInp=1, reshapeInp=(-1, 1),
name='TestSets.EmnistBalanced')
# db2 = False
dense_nn = Dense.NN(shape=Dense.Shape(db.inpShape[0], *(392, 196), db.tarShape[0]),
initializers=None,
activators=None,
lossFunction=None)
# dense_nn = Dense.NN(shape=Dense.Shape(db.inpShape[0], *(392, 196), db.tarShape[0]),
# initializers=None,
# activators=None,
# lossFunction=None)
# dense_nn.train(epochs=1,
# batchSize=256,
# trainDataBase=db,
# optimizers=None,
# profile=False,
# test=db2)
# print(dense_nn)


def save():
import pickle as dill
dbb, dbb2 = dense_nn.trainDataBase, dense_nn.testDataBase
dense_nn.trainDataBase, dense_nn.testDataBase = None, None
dill.dump(dense_nn, open('t1.nntp', 'wb'))
dense_nn.trainDataBase, dense_nn.testDataBase = dbb, dbb2


def load():
import pickle as dill
return dill.load(open('t1.nntp', 'rb'))
# # print(dense_nn)
#
# coo = dense_nn
# print(coo, id(coo), sep='\n')
# save = coo.__save__()
# with open('temp.save', 'wb') as f:
# pickle.dump(save, f)
with open('temp.save', 'rb') as f:
save2 = pickle.load(f)
print(coo2 := load(*save2), id(coo2), sep='\n')
64 changes: 36 additions & 28 deletions src/NeuralNetworks/base.py
Original file line number Diff line number Diff line change
@@ -1,23 +1,23 @@
from typing import TYPE_CHECKING, Union
if TYPE_CHECKING:
from ..tools import *
from ..Topologies import *

import time
import warnings
import cProfile
import traceback
import pstats
import os
from abc import ABCMeta, abstractmethod
from typing import TYPE_CHECKING, Union

import numpy as np

from ..tools import MagicProperty, makeMetaMagicProperty, PrintCols, iterable, secToHMS, statPrinter, DunderSaveLoad
from ..tools import PrintCols, iterable, secToHMS, statPrinter, DunderSaveLoad
from ..Topologies import Activators, Initializers, Optimizers, LossFunction, DataBase

if TYPE_CHECKING:
from ..tools import *
from ..Topologies import *


class BaseShape(DunderSaveLoad, metaclass=makeMetaMagicProperty(ABCMeta)):
class BaseShape(DunderSaveLoad, metaclass=ABCMeta):
"""
"""
Expand All @@ -27,7 +27,7 @@ def __repr__(self):

def __getitem__(self, item):
shapes = self.RAW_SHAPES[item]
return self.__class__(*shapes) if isinstance(item, slice) and shapes else self.SHAPES[item]
return type(self)(*shapes) if isinstance(item, slice) and shapes else self.SHAPES[item]

def __hash__(self):
return hash(self.SHAPES)
Expand Down Expand Up @@ -65,7 +65,7 @@ def _formatShapes(shapes) -> tuple:
return shapes


class BaseLayer(DunderSaveLoad, metaclass=makeMetaMagicProperty(ABCMeta)):
class BaseLayer(DunderSaveLoad, metaclass=ABCMeta):
"""
"""
Expand All @@ -79,6 +79,14 @@ def __str__(self):
DEPS = ': '.join(f"{dName}:shape{getattr(self, dName).shape}" for dName in self.DEPS)
return f"{self.__repr__()[:-1]}:\n{DEPS=}>"

# todo: add ignore to DunderSaveLoad
def __save__(self):
_input, _output, inputDelta, outputDelta = self.input, self.output, self.inputDelta, self.outputDelta
self.input = self.output = self.inputDelta = self.outputDelta = None
_return = super(BaseLayer, self).__save__()
self.input, self.output, self.inputDelta, self.outputDelta = _input, _output, inputDelta, outputDelta
return _return

def __init__(self, shape: "BaseShape",
initializer: "Initializers.Base",
optimizer: "Optimizers.Base",
Expand All @@ -92,10 +100,10 @@ def __init__(self, shape: "BaseShape",
self.optimizer = optimizer
self.ACTIVATION_FUNCTION = activationFunction

self.input = np.zeros(self.SHAPE[0], dtype=np.float32)
self.output = np.zeros(self.SHAPE[-1], dtype=np.float32)
self.inputDelta = np.zeros(self.SHAPE[-1], dtype=np.float32)
self.outputDelta = np.zeros(self.SHAPE[0], dtype=np.float32)
self.input = None
self.output = None
self.inputDelta = None
self.outputDelta = None

self.DEPS = self._defineDeps(*depArgs, **depKwargs)
self._initializeDepOptimizer()
Expand Down Expand Up @@ -148,13 +156,13 @@ def _wire(self) -> "np.ndarray":
"""


class BasePlot(metaclass=makeMetaMagicProperty(ABCMeta)):
class BasePlot:
"""
"""


class Network:
class Network(DunderSaveLoad):
"""
"""
Expand All @@ -167,9 +175,6 @@ def __str__(self):
layers = "\n\t\t".join(repr(layer) for layer in self.LAYERS)
return f"{super(Network, self).__str__()}:\n\t\t{layers}"

def __save__(self):
pass

def __init__(self, inputLayer: "BaseLayer", *layers: "BaseLayer", lossFunction: "LossFunction.Base"):
assert len(layers) > 0
self.LAYERS = inputLayer, *layers
Expand Down Expand Up @@ -214,21 +219,25 @@ def backPropagation(self, _delta) -> "np.ndarray":
return self.INPUT_LAYER.backProp(_delta)


class BaseNN(metaclass=makeMetaMagicProperty(ABCMeta)):
class BaseNN(DunderSaveLoad, metaclass=ABCMeta):
"""
"""
STAT_PRINT_INTERVAL = 1
__optimizers = Optimizers(Optimizers.Adam(), ..., Optimizers.AdaGrad())
_dict = True

@MagicProperty
@property
def optimizers(self):
return self.__optimizers

@optimizers.setter
def optimizers(self, _optimizers: "Optimizers"):
self.__optimizers = _optimizers
self.NETWORK.changeOptimizer(self.__optimizers)
# todo: add ignore to DunderSaveLoad
def __save__(self):
trainDataBase, testDataBase = self.trainDataBase, self.testDataBase
self.trainDataBase = self.testDataBase = None
_return = super(BaseNN, self).__save__()
self.trainDataBase, self.testDataBase = trainDataBase, testDataBase
return _return

def __repr__(self):
Shape = self.SHAPE
Expand All @@ -237,13 +246,10 @@ def __repr__(self):
return f"<{self.__class__.__name__}:Acc={acc[0]}%,{acc[1]}%: {Cost=:07.4f}: {Time=}: {Epochs=}: {Shape=}>"

def __str__(self):
Optimizers = self.optimizers # noqa
Optimizers = self.__optimizers # noqa
TrainDataBase, TestDataBase = self.trainDataBase, self.testDataBase
return f"{self.__repr__()[1:-1]}:\n\t{Optimizers=}\n\t{TrainDataBase=}\n\t{TestDataBase=}\n\t{self.NETWORK}"

def __save__(self):
pass

def __init__(self, shape: "BaseShape",
initializers: "Initializers" = None,
activators: "Activators" = None,
Expand Down Expand Up @@ -341,6 +347,8 @@ def train(self, epochs: int = None,
profile: bool = False,
test: Union[bool, "DataBase"] = None):
# todo: implement "runs"
assert isinstance(epochs, int) or epochs is None
assert isinstance(batchSize, int) or batchSize is None
if epochs is not None: self.numEpochs = epochs
if batchSize is not None: self.batchSize = batchSize
if trainDataBase is not None: self.trainDataBase = trainDataBase
Expand Down
3 changes: 2 additions & 1 deletion src/NeuralNetworks/dense.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,8 @@ def _fire(self) -> "np.ndarray": # BottleNeck
def _wire(self) -> "np.ndarray":
self.weight -= self.weightOptimizer(self.__gradWeight, self.weight)
self.biases -= self.biasesOptimizer(self.__gradBiases, self.biases)
return self.delta
delta, self.delta, self.activeDerivedDelta = self.delta, None, None
return delta


class DensePlot(BasePlot):
Expand Down
18 changes: 9 additions & 9 deletions src/Topologies/optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def __init__(self, learningRate: float = None):

def _optimize(self, grad: Callable[["np.ndarray"], "np.ndarray"], theta: "np.ndarray") -> "np.ndarray":
delta = grad(theta)
(local_dict := vars(self)).update(locals())
(local_dict := vars(self).copy()).update(locals())
return ne.evaluate("delta * LEARNING_RATE", local_dict=local_dict)


Expand All @@ -56,7 +56,7 @@ def _optimize(self, grad: Callable[["np.ndarray"], "np.ndarray"], theta: "np.nda
delta = grad(theta)
self.decayCounter += self.ONE
locals()['ONE'] = self.ONE
(local_dict := vars(self)).update(locals())
(local_dict := vars(self).copy()).update(locals())
return ne.evaluate("delta * LEARNING_RATE / (ONE + decayCounter * DECAY)", local_dict=local_dict)


Expand All @@ -74,7 +74,7 @@ def __init__(self, learningRate: float = None, moment: float = None):

def _optimize(self, grad: Callable[["np.ndarray"], "np.ndarray"], theta: "np.ndarray") -> "np.ndarray":
delta = grad(theta)
(local_dict := vars(self)).update(locals())
(local_dict := vars(self).copy()).update(locals())
self.prevDelta = momentDelta = ne.evaluate("LEARNING_RATE * delta + MOMENT * prevDelta", local_dict=local_dict)
return momentDelta

Expand All @@ -93,7 +93,7 @@ def __init__(self, learningRate: float = None, moment: float = None):

def _optimize(self, grad: Callable[["np.ndarray"], "np.ndarray"], theta: "np.ndarray") -> "np.ndarray":
delta = grad(theta - self.MOMENT * self.prevDelta)
(local_dict := vars(self)).update(locals())
(local_dict := vars(self).copy()).update(locals())
self.prevDelta = momentDelta = ne.evaluate("LEARNING_RATE * delta + MOMENT * prevDelta", local_dict=local_dict)
return momentDelta

Expand All @@ -112,9 +112,9 @@ def __init__(self, learningRate: float = None, epsilon: float = None):

def _optimize(self, grad: Callable[["np.ndarray"], "np.ndarray"], theta: "np.ndarray") -> "np.ndarray":
delta = grad(theta)
(local_dict := vars(self)).update(locals())
(local_dict := vars(self).copy()).update(locals())
self.summationSquareDelta = ne.evaluate('summationSquareDelta + delta * delta', local_dict=local_dict)
(local_dict := vars(self)).update(locals())
(local_dict := vars(self).copy()).update(locals())
return ne.evaluate('delta * LEARNING_RATE / sqrt(summationSquareDelta + EPSILON)', global_dict=local_dict)


Expand Down Expand Up @@ -154,20 +154,20 @@ def __init__(self, learningRate: float = None, beta1: float = None, beta2: float

def _optimize(self, grad: Callable[["np.ndarray"], "np.ndarray"], theta: "np.ndarray") -> "np.ndarray":
delta = grad(theta)
(local_dict := vars(self)).update(locals())
(local_dict := vars(self).copy()).update(locals())
self.weightedSummationDelta = ne.evaluate(
"BETA1 * weightedSummationDelta + BETA1_BAR * delta", local_dict=local_dict)
self.weightedSummationSquareDelta = ne.evaluate(
"BETA2 * weightedSummationSquareDelta + BETA2_BAR * delta * delta", local_dict=local_dict)

(local_dict := vars(self)).update(locals())
(local_dict := vars(self).copy()).update(locals())
weightedSummationDeltaHat = ne.evaluate(
"weightedSummationDelta / (1 - BETA1 ** decayCounter)", local_dict=local_dict)
weightedSummationSquareDeltaHat = ne.evaluate(
"weightedSummationSquareDelta / (1 - BETA2 ** decayCounter)", local_dict=local_dict)

self.decayCounter += self.ONE
(local_dict := vars(self)).update(locals())
(local_dict := vars(self).copy()).update(locals())
return ne.evaluate(
"LEARNING_RATE * weightedSummationDeltaHat / sqrt(weightedSummationSquareDeltaHat + EPSILON)",
local_dict=local_dict)
39 changes: 27 additions & 12 deletions src/tools/helperClass.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,24 +62,39 @@ class DunderSaveLoad:
_dict = False

def __new__(cls, *args, **kwargs):
cls.__RAW_ARGS = [arg if not isinstance(arg, DunderSaveLoad) else Dunder(arg.__save__())
for arg in args]
cls.__RAW_KWARGS = {key: arg if not isinstance(arg, DunderSaveLoad) else Dunder(arg.__save__())
for key, arg in kwargs.items()}
return super().__new__(cls)
self = super().__new__(cls)
self.__RAW_ARGS = args
self.__RAW_KWARGS = kwargs
return self

def __save__(self):
cls_name = f"{self.__module__}.{type(self).__name__}"
return cls_name, self.__RAW_ARGS, self.__RAW_KWARGS, *([] if not self._dict else [{'_dict': self.__dict__}])
return cls_name, self.checkForDunderObjects(self.__RAW_ARGS, "encode"), \
self.checkForDunderObjects(self.__RAW_KWARGS, "encode"), \
*([] if not self._dict else [{'_dict': self.checkForDunderObjects(self.__dict__, "encode")}])

@classmethod
def __load__(cls, raw_args, raw_kwargs, **kwargs):
raw_args = [load(*arg.save) if isinstance(arg, Dunder) else arg for arg in raw_args]
raw_kwargs = {key: load(*arg.save) if isinstance(arg, Dunder) else arg for key, arg in raw_kwargs.items()}
raw_args = cls.checkForDunderObjects(raw_args, "decode")
raw_kwargs = cls.checkForDunderObjects(raw_kwargs, "decode")
self = cls(*raw_args, **raw_kwargs)
if self._dict: self.__dict__.update(kwargs['_dict'])
if self._dict and '_dict' in kwargs: self.__dict__.update(self.checkForDunderObjects(kwargs['_dict'], "decode"))
return self

# todo:
def checkForDunderObjects(self, _obj):
raise NotImplementedError
@classmethod
def checkForDunderObjects(cls, _obj, _type):
assert _type in (types := ("encode", "decode"))
if isinstance(_obj, dict):
keys, vals = _obj.keys(), _obj.values()
return {key: item for item, key in zip(cls.checkForDunderObjects(list(vals), _type), keys)}
elif isinstance(_obj, list):
return [cls.checkForDunderObjects(ob, _type) for ob in _obj]
elif isinstance(_obj, tuple):
return tuple([cls.checkForDunderObjects(ob, _type) for ob in _obj])
else:
if _type == types[0] and isinstance(_obj, DunderSaveLoad):
return Dunder(_obj.__save__())
elif _type == types[1] and isinstance(_obj, Dunder):
return load(*_obj.save)
else:
return _obj
17 changes: 7 additions & 10 deletions src/tools/helperFunction.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,26 +34,23 @@ def statPrinter(key, value, *, prefix='', suffix=PrintCols.CEND, end=' '):


# fixme: improve
def getSize(obj, seen=None, ref=''):
def getSize(obj, seen=None, depth=0):
"""Recursively finds size of objects"""
size = sys.getsizeof(obj)
if seen is None: seen = set()
if (obj_id := id(obj)) in seen: return 0
# Important mark as seen *before* entering recursion to gracefully handle
# self-referential objects
seen.add(obj_id)
ref += str(obj.__class__)
if isinstance(obj, dict):
size += sum([getSize(obj[k], seen, ref + str(k)) for k in obj.keys()])
size += sum([getSize(k, seen, ref) for k in obj.keys()])
for k in obj.keys():
siz = getSize(obj[k], seen, depth + 1) + getSize(k, seen, depth + 1)
print('\t' * depth, 'dict', k, siz, sep=': ')
size += siz
elif hasattr(obj, '__dict__'):
size += getSize(obj.__dict__, seen, ref)
size += getSize(obj.__dict__, seen, depth + 1)
elif hasattr(obj, '__iter__') and not isinstance(obj, (str, bytes, bytearray)):
size += sum([getSize(i, seen, ref) for i in obj])

if size > 1024 * 10: # show files > 10Mb
print(obj.__class__, size)
print(ref, '\n')
size += sum([getSize(i, seen) for i in obj], depth + 1)

return size

Expand Down
Binary file added temp.save
Binary file not shown.

0 comments on commit b8be0db

Please sign in to comment.