Commits

Grzegorz Chrupała committed eb36c71

Updates Eq constraints.

Comments (0)

Files changed (1)

nlp-scores/NLP/Scores.hs

 -- | Kullback-Leibler divergence: KL(X,Y) = SUM_i P(X=i) log_2(P(X=i)/P(Y=i)). 
 -- The distributions can be unnormalized.
         
-kullbackLeibler :: (Floating a, F.Foldable f, T.Traversable t) => t a -> f a -> a
+kullbackLeibler :: (Eq a, Floating a, F.Foldable f, T.Traversable t) => t a -> f a -> a
 kullbackLeibler xs ys = sum . zipWithTF f xs $ ys
   where f !x !y = let px = x / sx in px `mult` logBase 2 (px/(y/sy))
         sx = sum xs
 
 -- | Jensen-Shannon divergence: JS(X,Y) = 1/2 KL(X,(X+Y)/2) + 1/2 KL(Y,(X+Y)/2).
 -- The distributions can be unnormalized.
-jensenShannon :: (Floating a, T.Traversable t, T.Traversable u) => t a -> u a -> a
+jensenShannon :: (Eq a, Floating a, T.Traversable t, T.Traversable u) => t a -> u a -> a
 jensenShannon xs ys = 0.5 * kullbackLeibler xs zs + 0.5 * kullbackLeibler ys zs
   where zs = zipWithTF (+) xs ys
           
 -- | The sum of a sequence of numbers
 sum :: (F.Foldable t, Num a) => t a -> a
 sum = F.foldl' (+) 0
-{-# SPECIALIZE sum :: [Double] -> Double #-}
-{-# SPECIALIZE sum :: [Int] -> Int #-}
 {-# INLINE sum #-}
 
 -- | The mean of a sequence of numbers.