Skip to content

Commit

Permalink
Refactored a bit; separated logic for calculating number of parameter…
Browse files Browse the repository at this point in the history
…s required into a class method that can be called by container classes with the same arguments are the constructor.
  • Loading branch information
dwf committed Nov 26, 2009
1 parent 6b6300c commit 2cb5ed9
Showing 1 changed file with 26 additions and 6 deletions.
32 changes: 26 additions & 6 deletions convolupy/layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,9 +78,10 @@ def _common_bprop_grad(self, meth, dout, inputs):
class ConvolutionalFeatureMapLayer(AbstractFeatureMapLayer):
"""A layer of ConvolutionalFeatureMaps."""
def __init__(self, fsize, imsize, num, **kwargs):
params_per = np.prod(fsize) + 1
numparams = self.numparams_required(fsize, imsize, num, **kwargs)
params_per = numparams / num
super(ConvolutionalFeatureMapLayer, self).__init__(
nparams=params_per * num,
nparams=numparams,
**kwargs
)
self._create_maps(ConvolutionalFeatureMap,
Expand All @@ -89,12 +90,17 @@ def __init__(self, fsize, imsize, num, **kwargs):
fsize,
imsize,
**kwargs)

@classmethod
def numparams_required(cls, fsize, imsize, num, **kwargs):
return num * (np.prod(fsize) + 1)


class AveragePoolingFeatureMapLayer(AbstractFeatureMapLayer):
"""A layer of AveragePoolingFeatureMaps."""
def __init__(self, ratio, imsize, num, **kwargs):
params_per = 2

numparams = self.numparams_required(ratio, imsize, num, **kwargs)
super(AveragePoolingFeatureMapLayer, self).__init__(
nparams=params_per * num,
**kwargs
Expand All @@ -105,13 +111,16 @@ def __init__(self, ratio, imsize, num, **kwargs):
ratio,
imsize,
**kwargs)

@classmethod
def numparams_required(cls, ratio, imsize, num, **kwargs):
"""The number of parameters per map."""
return 2 * num

class MultiConvolutionalFeatureMapLayer(AbstractFeatureMapLayer):
"""A layer of MultiConvolutionalFeatureMaps."""
def __init__(self, fsize, imsize, nummaps, connections, **kwargs):
numparams = np.array([np.prod(fsize) * len(conn) + 1
for conn in connections])
numparams = self._params_per(fsize, imsize, nummaps,
connections, **kwargs)
super(MultiConvolutionalFeatureMapLayer, self).__init__(
nparams=np.sum(numparams),
**kwargs
Expand Down Expand Up @@ -139,6 +148,16 @@ def __init__(self, fsize, imsize, nummaps, connections, **kwargs):
)
self.maps.append(thismap)

@classmethod
def numparams_required(cls, fsize, imsize, nummaps, connections, **kwargs):
return np.sum(cls._params_per(fsize, imsize, nummaps,
connections, **kwargs))

@classmethod
def _params_per(cls, fsize, imsize, nummaps, connections, **kwargs):
numparams = np.array([np.prod(fsize) * len(conn) + 1
for conn in connections])
return numparams

def fprop(self, inputs):
"""Forward propagate input through this module."""
Expand Down Expand Up @@ -187,4 +206,5 @@ def grad(self, dout, inputs):
self.connections[index]]
out.append(fmap.grad(dout[index], theseinputs))
return out


0 comments on commit 2cb5ed9

Please sign in to comment.