-
diff --git a/lossless-triplet-loss.html b/lossless-triplet-loss.html
index b93a1cf..825e9f1 100644
--- a/lossless-triplet-loss.html
+++ b/lossless-triplet-loss.html
@@ -139,7 +139,7 @@
THE PROBLEM
loss = K.maximum(basic_loss,0.0)
return loss
-
+
def create_base_network(in_dims, out_dims):
"""
Base network to be shared.
@@ -155,7 +155,7 @@ THE PROBLEM
model.add(BatchNormalization())
return model
-
+
in_dims = (N_MINS, n_feat)
out_dims = N_FACTORS
@@ -240,7 +240,7 @@ THE PROBLEM
OTHER LOSSES
-Another famous loss function the contrastive loss describe by Yan LeCun and his team in their paper Dimensionality Reduction by Learning an Invariant Mapping is also maxing the negative result, which creates the same issue.
+Another famous loss function the contrastive loss describe by Yan LeCun and his team in their paper Dimensionality Reduction by Learning an Invariant Mapping is also maxing the negative result, which creates the same issue.