summaryrefslogtreecommitdiff
path: root/ml_exp/kernels.py
diff options
context:
space:
mode:
authorDavid Luevano Alvarado <55825613+luevano@users.noreply.github.com>2020-03-07 10:11:53 -0700
committerDavid Luevano Alvarado <55825613+luevano@users.noreply.github.com>2020-03-07 10:11:53 -0700
commitf57f38c2886052bef92b3885acc2790ac5bc340d (patch)
treee7df0fa546df2de6552ff0378edecd929ea87dba /ml_exp/kernels.py
parent00301d1a9a8a7f975b64fe4ef85458f6a40776f7 (diff)
Add laplacian kernel
Diffstat (limited to 'ml_exp/kernels.py')
-rw-r--r--ml_exp/kernels.py55
1 files changed, 55 insertions, 0 deletions
diff --git a/ml_exp/kernels.py b/ml_exp/kernels.py
index abc71f7af..488232392 100644
--- a/ml_exp/kernels.py
+++ b/ml_exp/kernels.py
@@ -83,3 +83,58 @@ def gaussian_kernel(X1,
norm = np.linalg.norm(X2 - X1[i], axis=-1)
K[i, :] = np.exp(i_sigma * np.square(norm))
return K
+
+
+def laplacian_kernel(X1,
+ X2,
+ sigma,
+ use_tf=True):
+ """
+ Calculates the Laplacian Kernel.
+ X1: first representations.
+ X2: second representations.
+ sigma: kernel width.
+ use_tf: if tensorflow should be used.
+ """
+ # If tf is to be used but couldn't be imported, don't try to use it.
+ if use_tf and not TF_AV:
+ use_tf = False
+
+ X1_size = X1.shape[0]
+ X2_size = X2.shape[0]
+ i_sigma = -0.5 / sigma
+
+ if use_tf:
+ if tf.config.experimental.list_physical_devices('GPU'):
+ with tf.device('GPU:0'):
+ X1 = tf.convert_to_tensor(X1)
+ X2 = tf.convert_to_tensor(X2)
+ X2r = tf.rank(X2)
+
+ def cond(i, _):
+ return tf.less(i, X1_size)
+
+ def body(i, K):
+ if X2r == 3:
+ norm = tf.norm(X2 - X1[i], axis=(1, 2))
+ else:
+ norm = tf.norm(X2 - X1[i], axis=-1)
+
+ return (i + 1,
+ K.write(i, tf.exp(i_sigma * norm)))
+
+ K = tf.TensorArray(dtype=tf.float64,
+ size=X1_size)
+ i_state = (0, K)
+ n, K = tf.while_loop(cond, body, i_state)
+ K = K.stack()
+ else:
+ K = np.zeros((X1_size, X2_size), dtype=np.float64)
+ # Faster way of calculating the kernel (no numba support).
+ for i in range(X1_size):
+ if X2.ndim == 3:
+ norm = np.linalg.norm(X2 - X1[i], axis=(1, 2))
+ else:
+ norm = np.linalg.norm(X2 - X1[i], axis=-1)
+ K[i, :] = np.exp(i_sigma * norm)
+ return K