Skip to content

Commit

Permalink
add Laetitia as ctb
Browse files Browse the repository at this point in the history
  • Loading branch information
davidruegamer committed Feb 28, 2024
1 parent 84d6791 commit 163a6da
Show file tree
Hide file tree
Showing 3 changed files with 39 additions and 7 deletions.
3 changes: 2 additions & 1 deletion DESCRIPTION
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
Package: deepregression
Title: Fitting Deep Distributional Regression
Version: 2.0.0
Version: 2.1.0
Authors@R: c(
person("David", "Ruegamer", , "david.ruegamer@gmail.com", role = c("aut", "cre")),
person("Christopher", "Marquardt", , "ch.marquardt@campus.lmu.de", role = c("ctb")),
person("Laetitia", "Frost", , "lae.frost@campus.lmu.de ", role = c("ctb")),
person("Florian", "Pfisterer", , "florian.pfisterer@stat.uni-muenchen.de", role = c("ctb")),
person("Philipp", "Baumann", , "baumann@kof.ethz.ch", role = c("ctb")),
person("Chris", "Kolb", , "chris.kolb@stat.uni-muenchen.de", role = c("ctb")),
Expand Down
8 changes: 8 additions & 0 deletions R/layers.R
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,14 @@ pen_layer = function(units, P, ...) {
layers$CombinedModel(units = units, P = P, ...)
}

update_factor_callback = function(model, weightnr = -1L, ...) {
python_path <- system.file("python", package = "deepregression")
layers <- reticulate::import_from_path("psplines", path = python_path)
layers$UpdateMultiplicationFactorFromWeight(model = model,
weightnr = weightnr,
...)
}

#' Hadamard-type layers
#'
#' @param units integer; number of units
Expand Down
35 changes: 29 additions & 6 deletions inst/python/psplines/psplines.py
Original file line number Diff line number Diff line change
Expand Up @@ -250,29 +250,31 @@ class LambdaLayer(tf.keras.layers.Layer):
def __init__(self, units, P, damping = 1.0, scale = 1.0, **kwargs):
super(LambdaLayer, self).__init__(**kwargs)
self.units = units
self.lambdasqrt = self.add_weight(name='lambdasqrt',
self.trafolambda = self.add_weight(name='trafolambda',
shape=(units,len(P)),
initializer=tf.keras.initializers.RandomNormal,
initializer=tf.keras.initializers.Constant(value=0),
trainable=True)
self.phi = tf.Variable(1.0, name = 'phimultiplier', trainable=False, dtype=tf.float32)
self.damping = damping
self.scale = scale
self.P = P

def call(self, inputs, w):
for i in range(len(self.P)):
lmbda = tf.reshape(tf.math.square(self.lambdasqrt[:,i]), [])
lmbda = tf.reshape(tf.math.exp(self.trafolambda[:,i]), [])
inf = 0.5 * tf.reduce_sum(vecmatvec(w, tf.cast(self.P[i], dtype="float32")))
damp_term = self.damping * inf**2 / 2
l_term = lmbda * inf
l_term = lmbda * inf / self.phi
self.add_loss(self.scale * (l_term + damp_term))
return inputs

def get_config(self):
config = super().get_config().copy()
config.update({
'units': self.units,
'lambda': self.lambdasqrt.numpy(),
'P': self.P
'trafolambda': self.trafolambda.numpy(),
'P': self.P,
'phi': self.phi
})
return config

Expand All @@ -290,6 +292,27 @@ def call(self, inputs):
def compute_output_shape(self, input_shape):
output_shape = input_shape[:-1] + (self.units,)
return output_shape

class UpdateMultiplicationFactorFromWeight(tf.keras.callbacks.Callback):
def __init__(self, model, weightnr = -1, trafo = lambda x: tf.math.square(tf.math.exp(x))):
super().__init__()
self.model = model
self.weightnr = weightnr
self.trafo = trafo

def on_batch_begin(self, epoch, logs=None):
# Extract the value of the last weight of the model
new_phi_value = self.model.weights[self.weightnr].numpy()

# Iterate through all layers of the model
for layer in self.model.layers:
# Check if the layer is an instance of CombinedModel
if isinstance(layer, CombinedModel):
# Access the LambdaLayer within the CombinedModel
lambda_layer = layer.lambda_layer

# Update the phi variable within the LambdaLayer
tf.keras.backend.set_value(lambda_layer.phi, tf.reshape(self.trafo(new_phi_value), []))

def get_masks(mod):
masks = []
Expand Down

0 comments on commit 163a6da

Please sign in to comment.