Skip to content
Snippets Groups Projects
Commit f7da208a authored by Dennis Noll's avatar Dennis Noll
Browse files

[keras] Importance: fixed grad importance + adds sample_weights

parent 3784b6bd
No related branches found
No related tags found
No related merge requests found
......@@ -1315,18 +1315,21 @@ class LBNLayer(tf.keras.layers.Layer):
def feature_importance_grad(model, x=None, **kwargs):
inp = [tf.Variable(v) for v in x]
inp = [tf.constant(v) for v in x]
with tf.GradientTape() as tape:
tape.watch(inp)
pred = model(inp, training=False)
ix = np.argsort(pred, axis=-1)[:, -1]
decision = tf.gather(pred, ix, batch_dims=1)
gradients = tape.gradient(decision, inp) # gradients for decision nodes
normed_gradients = [_g * _x for (_g, _x) in zip(gradients, x)] # normed to input values
mean_gradients = np.concatenate(
[np.abs(g.numpy()).mean(axis=0).flatten() for g in normed_gradients]
)
gradients = [grad if grad is not None else 0 for grad in gradients] # categorical tensors
gradients = [
np.array(_g) * _x.std(axis=0) for (_g, _x) in zip(gradients, x)
] # norm to value ranges
if "sample_weight" in kwargs:
gradients = [(_g.T * kwargs["sample_weight"]).T for _g in gradients] # apply ev weight
mean_gradients = np.concatenate([np.abs(g).mean(axis=0).flatten() for g in gradients])
return mean_gradients / mean_gradients.max()
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment