Skip to content

Commit

Permalink
Merge branch 'DeepTrackAI:develop' into develop
Browse files Browse the repository at this point in the history
  • Loading branch information
Henrik-KM authored Dec 5, 2023
2 parents 18318c6 + eda2d02 commit d8ad092
Show file tree
Hide file tree
Showing 8 changed files with 255 additions and 55 deletions.
73 changes: 45 additions & 28 deletions deeplay/components/cnn/cnn.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,20 +86,35 @@ class ConvolutionalNeuralNetwork(DeeplayModule):
blocks: LayerList[PoolLayerActivationNormalization]

@property
def input_block(self):
def input(self):
"""Return the input layer of the network. Equivalent to `.blocks[0]`."""
return self.blocks[0]

@property
def hidden_blocks(self):
def hidden(self):
"""Return the hidden layers of the network. Equivalent to `.blocks[:-1]`"""
return self.blocks[:-1]

@property
def output_block(self):
def output(self):
"""Return the last layer of the network. Equivalent to `.blocks[-1]`."""
return self.blocks[-1]

@property
def layer(self) -> LayerList[Layer]:
"""Return the layers of the network. Equivalent to `.blocks.layer`."""
return self.blocks.layer

@property
def activation(self) -> LayerList[Layer]:
"""Return the activations of the network. Equivalent to `.blocks.activation`."""
return self.blocks.activation

@property
def normalization(self) -> LayerList[Layer]:
"""Return the normalizations of the network. Equivalent to `.blocks.normalization`."""
return self.blocks.normalization

def __init__(
self,
in_channels: Optional[int],
Expand Down Expand Up @@ -130,40 +145,42 @@ def __init__(
elif isinstance(out_activation, type) and issubclass(out_activation, nn.Module):
out_activation = Layer(out_activation)

if pool is None:
pool = Layer(nn.Identity)
elif isinstance(pool, type) and issubclass(pool, nn.Module):
pool = Layer(pool)

self.blocks = LayerList()

c_out = in_channels

for i, c_out in enumerate(self.hidden_channels):
for i, c_out in enumerate([*self.hidden_channels, out_channels]):
c_in = self.in_channels if i == 0 else self.hidden_channels[i - 1]

self.blocks.append(
PoolLayerActivationNormalization(
pool if i > 0 else Layer(nn.Identity),
Layer(nn.Conv2d, c_in, c_out, 3, 1, 1)
if c_in
else Layer(nn.LazyConv2d, c_out, 3, 1, 1),
Layer(nn.ReLU),
# We can give num_features as an argument to nn.Identity
# because it is ignored. This means that users do not have
# to specify the number of features for nn.Identity.
Layer(nn.Identity, num_features=out_channels),
)
if i == 0:
pool_layer = Layer(nn.Identity)
elif pool is None:
pool_layer = Layer(nn.Identity)
elif isinstance(pool, type) and issubclass(pool, nn.Module):
pool_layer = Layer(pool)
elif isinstance(pool, DeeplayModule):
pool_layer = pool.new()
else:
pool_layer = pool

layer = (
Layer(nn.Conv2d, c_in, c_out, 3, 1, 1)
if c_in
else Layer(nn.LazyConv2d, c_out, 3, 1, 1)
)
activation = (
Layer(nn.ReLU) if i < len(self.hidden_channels) else out_activation
)
normalization = Layer(nn.Identity, num_features=out_channels)

self.blocks.append(
PoolLayerActivationNormalization(
pool if len(self.hidden_channels) > 0 else Layer(nn.Identity),
Layer(nn.Conv2d, c_out, self.out_channels, 3, 1, 1),
out_activation,
Layer(nn.Identity, num_channels=self.out_channels),
block = PoolLayerActivationNormalization(
pool=pool_layer,
layer=layer,
activation=activation,
normalization=normalization,
)
)

self.blocks.append(block)

def forward(self, x):
for block in self.blocks:
Expand Down
53 changes: 49 additions & 4 deletions deeplay/external/external.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,15 @@ def kwargs(self):
full_kwargs = super().kwargs
classtype = full_kwargs.pop("classtype")

# If classtype accepts **kwargs, we can pass all the kwargs to it.'
argspec = self.get_argspec()
if argspec.varkw is not None:
kwargs = full_kwargs
kwargs["classtype"] = classtype
return kwargs

# Since the classtype can be configured by the user, we need to
# remove kwargs that are not part of the classtype's signature.

signature = self.get_signature()
signature_args = signature.parameters.keys()
kwargs = {}
Expand All @@ -33,15 +39,54 @@ def __pre_init__(self, classtype: type, *args, **kwargs):
# Hack
self.classtype = classtype
super().__pre_init__(*args, classtype=classtype, **kwargs)
self.assert_not_positional_only_and_variadic()

def __init__(self, classtype, *args, **kwargs):
super().__init__()
self.classtype = classtype
self.assert_not_positional_only_and_variadic()

def assert_not_positional_only_and_variadic(self):
argspec = self.get_argspec()
signature = self.get_signature()

positional_only_args = [
param
for param in signature.parameters.values()
if param.kind == param.POSITIONAL_ONLY
]

has_variadic = argspec.varargs is not None

if positional_only_args and has_variadic:
raise TypeError(
f"Cannot use both positional only arguments and *args with {self.__class__.__name__}. Consider wrapping the classtype in a wrapper class."
)


def build(self) -> nn.Module:
args = self.kwargs
args.pop("classtype", None)
return self.classtype(**args)
kwargs = self.kwargs
kwargs.pop("classtype", None)

args = ()

# check if classtype has *args variadic
argspec = self.get_argspec()
signature = self.get_signature()


positional_only_args =[param.name
for param in signature.parameters.values()
if param.kind == param.POSITIONAL_ONLY]

# Any positional only arguments should be moved from kwargs to args
for arg in positional_only_args:
args = args + (kwargs.pop(arg),)

if argspec.varargs is not None:
args = args + self._actual_init_args["args"]

return self.classtype(*args, **kwargs)

create = build

Expand Down
2 changes: 1 addition & 1 deletion deeplay/external/optimizers/rmsprop.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@


class RMSprop(Optimizer):
def __pre_init__(self, **optimzer_kwargs):
def __pre_init__(self, classtype=None, **optimzer_kwargs):
super().__pre_init__(torch.optim.RMSprop, **optimzer_kwargs)

def __init__(self, **optimzer_kwargs):
Expand Down
12 changes: 5 additions & 7 deletions deeplay/list.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,9 @@ def __pre_init__(self, *layers: Union[T, List[T]], _args: Tuple[T, ...] = ()):
if len(layers) == 1 and isinstance(layers[0], list):
input_layers: Tuple[T] = layers[0]
else:
input_layers: Tuple[T] = layers
layers = layers + _args

super().__pre_init__(_args=input_layers)
input_layers: tuple[T] = layers
layers = tuple(input_layers) + _args
super().__pre_init__(_args=layers)

def __init__(self, *layers: T):
super().__init__()
Expand All @@ -25,7 +24,7 @@ def __init__(self, *layers: T):

for idx, layer in enumerate(layers):
super().append(layer)
if isinstance(layer, DeeplayModule):
if isinstance(layer, DeeplayModule) and not layer._has_built:
self._give_user_configuration(layer, self._get_abs_string_index(idx))
layer.__construct__()

Expand Down Expand Up @@ -90,8 +89,7 @@ def __getattr__(self, name: str) -> "LayerList[T]":
submodules = [
getattr(layer, name)
for layer in self
if hasattr(layer, name)
and isinstance(getattr(layer, name), DeeplayModule)
if hasattr(layer, name) and isinstance(getattr(layer, name), nn.Module)
]
if len(submodules) > 0:
return LayerList(*submodules)
Expand Down
6 changes: 3 additions & 3 deletions deeplay/module.py
Original file line number Diff line number Diff line change
Expand Up @@ -298,8 +298,8 @@ def new(self):
kwargs = self._actual_init_args["kwargs"]

# Make sure that we don't modify the original arguments
args = (a.new() if isinstance(a, DeeplayModule) else a for a in args)
_args = (_a.new() if isinstance(_a, DeeplayModule) else _a for _a in _args)
args = tuple(a.new() if isinstance(a, DeeplayModule) else a for a in args)
_args = tuple(_a.new() if isinstance(_a, DeeplayModule) else _a for _a in _args)
kwargs = {
k: v.new() if isinstance(v, DeeplayModule) else v for k, v in kwargs.items()
}
Expand All @@ -308,7 +308,7 @@ def new(self):
type(self),
*args,
__user_config=user_config,
_args=_args,
_args=tuple(_args),
**kwargs,
)
# obj._take_user_configuration(user_config)
Expand Down
54 changes: 43 additions & 11 deletions deeplay/tests/test_cnn.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,20 +3,24 @@
import torch.nn as nn
from deeplay import ConvolutionalNeuralNetwork, Layer, LayerList

import itertools


class TestComponentCNN(unittest.TestCase):
...

def test_cnn_defaults(self):
cnn = ConvolutionalNeuralNetwork(3, [4], 1)
cnn.build()
cnn.create()

self.assertEqual(len(cnn.blocks), 2)

self.assertEqual(cnn.blocks[0].layer.in_channels, 3)
self.assertEqual(cnn.blocks[0].layer.out_channels, 4)

self.assertEqual(cnn.output_block.layer.in_channels, 4)
self.assertEqual(cnn.output_block.layer.out_channels, 1)
self.assertEqual(cnn.output.layer.in_channels, 4)
self.assertEqual(cnn.output.layer.out_channels, 1)

# test on a batch of 2
x = torch.randn(2, 3, 5, 5)
Expand All @@ -27,42 +31,46 @@ def test_cnn_lazy_input(self):
cnn = ConvolutionalNeuralNetwork(None, [4], 1).build()
self.assertEqual(len(cnn.blocks), 2)

self.assertEqual(cnn.blocks[0].layer.in_channels, 0)
self.assertEqual(cnn.input.layer.in_channels, 0)
self.assertEqual(cnn.blocks[0].layer.out_channels, 4)
self.assertEqual(cnn.output_block.layer.in_channels, 4)
self.assertEqual(cnn.output_block.layer.out_channels, 1)
self.assertEqual(cnn.output.layer.in_channels, 4)
self.assertEqual(cnn.output.layer.out_channels, 1)

# test on a batch of 2
x = torch.randn(2, 3, 5, 5)
y = cnn(x)
self.assertEqual(y.shape, (2, 1, 5, 5))
self.assertEqual(y.shape, (2, 1, 5, 5))

def test_cnn_change_depth(self):
cnn = ConvolutionalNeuralNetwork(2, [4], 3)
cnn.configure(hidden_channels=[4, 4])
cnn.create()
cnn.build()
self.assertEqual(len(cnn.blocks), 3)

def test_change_act(self):
cnn = ConvolutionalNeuralNetwork(2, [4], 3)
cnn.configure(out_activation=nn.Sigmoid)
cnn.create()
cnn.build()
self.assertEqual(len(cnn.blocks), 2)
self.assertIsInstance(cnn.output_block.activation, nn.Sigmoid)
self.assertIsInstance(cnn.output.activation, nn.Sigmoid)

def test_change_out_act_Layer(self):
cnn = ConvolutionalNeuralNetwork(2, [4], 3)
cnn.configure(out_activation=Layer(nn.Sigmoid))
cnn.create()
cnn.build()
self.assertEqual(len(cnn.blocks), 2)
self.assertIsInstance(cnn.output_block.activation, nn.Sigmoid)
self.assertIsInstance(cnn.output.activation, nn.Sigmoid)

def test_change_out_act_instance(self):
cnn = ConvolutionalNeuralNetwork(2, [4], 3)
cnn.configure(out_activation=nn.Sigmoid())
cnn.create()
cnn.build()
self.assertEqual(len(cnn.blocks), 2)
self.assertIsInstance(cnn.output_block.activation, nn.Sigmoid)
self.assertIsInstance(cnn.output.activation, nn.Sigmoid)

def test_default_values_initialization(self):
cnn = ConvolutionalNeuralNetwork(
Expand All @@ -80,11 +88,35 @@ def test_empty_hidden_channels(self):
self.assertEqual(cnn.blocks[0].layer.in_channels, 3)
self.assertEqual(cnn.blocks[0].layer.out_channels, 1)

self.assertIs(cnn.blocks[0], cnn.output_block)
self.assertIs(cnn.blocks[0], cnn.input_block)
self.assertIs(cnn.blocks[0], cnn.input)
self.assertIs(cnn.blocks[0], cnn.output)

def test_zero_out_channels(self):
with self.assertRaises(ValueError):
ConvolutionalNeuralNetwork(
in_channels=3, hidden_channels=[32, 64], out_channels=0
)

def test_all_cnn_blocks_are_not_same_object(self):
cnn_with_default = ConvolutionalNeuralNetwork(3, [4, 4, 4], 1)
cnn_with_pool_module = ConvolutionalNeuralNetwork(
3, [4, 4, 4], 1, pool=nn.MaxPool2d
)
cnn_with_pool_class = ConvolutionalNeuralNetwork(
3, [4, 4, 4], 1, pool=nn.MaxPool2d(2)
)
cnn_with_pool_layer = ConvolutionalNeuralNetwork(
3, [4, 4, 4], 1, pool=Layer(nn.MaxPool2d)
)

for a, b in itertools.combinations(cnn_with_default.blocks, 2):
self.assertIsNot(a.pool, b.pool)

for a, b in itertools.combinations(cnn_with_pool_module.blocks, 2):
self.assertIsNot(a.pool, b.pool)

for a, b in itertools.combinations(cnn_with_pool_class.blocks[1:], 2):
self.assertIs(a.pool, b.pool)

for a, b in itertools.combinations(cnn_with_pool_layer.blocks, 2):
self.assertIsNot(a.pool, b.pool)
Loading

0 comments on commit d8ad092

Please sign in to comment.