[Scipy-svn] r4897 - in trunk/scipy/stats: . tests
scipy-svn at scipy.org
scipy-svn at scipy.org
Sat Nov 1 12:04:26 EDT 2008
Author: stefan
Date: 2008-11-01 11:04:04 -0500 (Sat, 01 Nov 2008)
New Revision: 4897
Modified:
trunk/scipy/stats/distributions.py
trunk/scipy/stats/tests/test_distributions.py
Log:
Fix a sign in `entropy` [patch by Herman Engelbrecht].
Modified: trunk/scipy/stats/distributions.py
===================================================================
--- trunk/scipy/stats/distributions.py 2008-11-01 15:17:33 UTC (rev 4896)
+++ trunk/scipy/stats/distributions.py 2008-11-01 16:04:04 UTC (rev 4897)
@@ -3180,10 +3180,10 @@
"""S = entropy(pk,qk=None)
calculate the entropy of a distribution given the p_k values
- S = -sum(pk * log(pk),axis=0)
+ S = -sum(pk * log(pk), axis=0)
If qk is not None, then compute a relative entropy
- S = -sum(pk * log(pk / qk),axis=0)
+ S = sum(pk * log(pk / qk), axis=0)
Routine will normalize pk and qk if they don't sum to 1
"""
@@ -3200,7 +3200,7 @@
# too, the relative entropy is infinite.
if any(take(pk,nonzero(qk==0.0),axis=0)!=0.0, 0):
return inf
- vec = where (pk == 0, 0.0, pk*log(pk / qk))
+ vec = where (pk == 0, 0.0, -pk*log(pk / qk))
return -sum(vec,axis=0)
Modified: trunk/scipy/stats/tests/test_distributions.py
===================================================================
--- trunk/scipy/stats/tests/test_distributions.py 2008-11-01 15:17:33 UTC (rev 4896)
+++ trunk/scipy/stats/tests/test_distributions.py 2008-11-01 16:04:04 UTC (rev 4897)
@@ -224,5 +224,15 @@
if stats.bernoulli.__doc__ is not None:
self.failUnless("bernoulli" in stats.bernoulli.__doc__.lower())
+class TestEntropy(TestCase):
+ def test_entropy_positive(self):
+ """See ticket #497"""
+ pk = [0.5,0.2,0.3]
+ qk = [0.1,0.25,0.65]
+ eself = stats.entropy(pk,pk)
+ edouble = stats.entropy(pk,qk)
+ assert(0.0 == eself)
+ assert(edouble >= 0.0)
+
if __name__ == "__main__":
run_module_suite()
More information about the Scipy-svn
mailing list