Skip to content
Snippets Groups Projects
Commit bbb0bcc3 authored by Dennis Noll's avatar Dennis Noll
Browse files

[keras] Layers: added implementation for common DNN Layers/Networks

parent 48431ab4
No related branches found
No related tags found
No related merge requests found
......@@ -728,3 +728,151 @@ def classification_metrics():
tf.keras.metrics.Recall(name="recall"),
tf.keras.metrics.AUC(name="auc"),
]
class DenseLayer(tf.keras.layers.Layer):
"""
The DenseLayer object is an extended implementation of the tf.keras.layers.Dense.
It features:
* l2 regu
* the weights (the real layer)
* batch norm
* activation function
* dynamically chosen dropout
Parameters
----------
nodes : int
The number of nodes.
activation : str or one of tf.keras.activations
The used activation function.
dropout : float
The used dropout ration.
If "selu" is used as activation function, dropout becomes AlphaDropout.
l2 : float
The used factor of l2 regu.
batch_norm : bool
Wether to use dropout or not.
If batch_norm is used, dropout is forced off.
"""
def __init__(self, nodes=0, activation=None, dropout=0.0, l2=0, batch_norm=False):
super().__init__()
parts = []
l2 = tf.keras.regularizers.l2(l2 if l2 else 0.0)
weights = tf.keras.layers.Dense(nodes, kernel_regularizer=l2)
parts.append(weights)
if batch_norm:
dropout = 0.0
bn = tf.keras.layers.BatchNormalization()
parts.append(bn)
act = tf.keras.layers.Activation(activation)
parts.append(act)
if activation == "selu":
dropout = tf.keras.layers.AlphaDropout(dropout)
else:
dropout = tf.keras.layers.Dropout(dropout)
parts.append(dropout)
self.parts = parts
def call(self, input_tensor, training=False):
x = input_tensor
for part in self.parts:
x = part(x, training=training)
return x
class ResNetBlock(tf.keras.layers.Layer):
"""
The ResNetBlock object is an implementation of one residual DNN block.
Parameters
----------
jump : int
The number layers to bypass.
kwargs :
Arguments for DenseLayer.
"""
def __init__(self, config, jump=2, **kwargs):
super().__init__(name="ResNetBlock")
layers = []
for i in range(jump - 1):
layers.append(DenseLayer(**kwargs))
activation = kwargs.pop("activation")
layers.append(DenseLayer(**kwargs))
self.layers = layers
self.out_activation = tf.keras.layers.Activation(activation)
def call(self, input_tensor, training=False):
x = input_tensor
for layer in self.layers:
x = layer(x, training=training)
x += input_tensor
x = self.out_activation(x)
return x
class FullyConnected(tf.keras.layers.Layer):
"""
The FullyConnected object is an implementation of a fully connected DNN.
Parameters
----------
number_layers : int
The number of layers.
kwargs :
Arguments for DenseLayer.
"""
def __init__(self, number_layers=0, **kwargs):
super().__init__(name="FullyConnected")
layers = []
for layer in range(number_layers):
layers.append(DenseLayer(**kwargs))
self.layers = layers
def call(self, input_tensor, training=False):
x = input_tensor
for layer in self.layers:
x = layer(x, training=training)
return x
class ResNet(tf.keras.layers.Layer):
"""
The ResNet object is an implementation of a Residual Neural Network.
Parameters
----------
number_layers : int
The number of residual blocks.
kwargs :
Arguments for ResNetBlock.
"""
def __init__(self, number_layers=1, **kwargs):
super().__init__(name="ResNet")
layers = []
for i in range(number_layers):
layers.append(ResNetBlock(**kwargs))
self.layers = layers
def call(self, input_tensor, training=False):
x = input_tensor
for layer in self.layers:
x = layer(x, training=training)
return x
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment