Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Next Next commit
Adding LogCosh, Poisson, KLDivergence metrics.
  • Loading branch information
pavithrasv committed Aug 30, 2019
commit 7a0dfd6b6fa8acde05908327c5ef0b9b465ef07e
52 changes: 52 additions & 0 deletions keras/metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -618,6 +618,58 @@ def __init__(self, k=5, name='sparse_top_k_categorical_accuracy', dtype=None):
sparse_top_k_categorical_accuracy, name, dtype=dtype, k=k)


class LogCoshError(MeanMetricWrapper):
"""Computes the logarithm of the hyperbolic cosine of the prediction error.

`metric = log((exp(x) + exp(-x))/2)`, where x is the error (y_pred - y_true)

Usage with the compile API:

```python
model = keras.Model(inputs, outputs)
model.compile('sgd', metrics=[keras.metrics.LogCoshError()])
```
"""

def __init__(self, name='logcosh', dtype=None):
super(LogCoshError, self).__init__(logcosh, name, dtype=dtype)


class Poisson(MeanMetricWrapper):
"""Computes the Poisson metric between `y_true` and `y_pred`.

`metric = y_pred - y_true * log(y_pred)`

Usage with the compile API:

```python
model = keras.Model(inputs, outputs)
model.compile('sgd', metrics=[keras.metrics.Poisson()])
```
"""

def __init__(self, name='poisson', dtype=None):
super(Poisson, self).__init__(poisson, name, dtype=dtype)


class KLDivergence(MeanMetricWrapper):
"""Computes Kullback-Leibler divergence metric between `y_true` and `y_pred`.

`metric = y_true * log(y_true / y_pred)`

Usage with the compile API:

```python
model = keras.Model(inputs, outputs)
model.compile('sgd', metrics=[keras.metrics.KLDivergence()])
```
"""

def __init__(self, name='kullback_leibler_divergence', dtype=None):
super(KLDivergence, self).__init__(
kullback_leibler_divergence, name, dtype=dtype)


def accuracy(y_true, y_pred):
if not K.is_tensor(y_pred):
y_pred = K.constant(y_pred)
Expand Down
110 changes: 110 additions & 0 deletions tests/keras/metrics_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -519,3 +519,113 @@ def test_weighted(self):
sample_weight = (1.0, 0.0, 1.0)
result = a_obj(y_true, y_pred, sample_weight=sample_weight)
assert np.allclose(1.0, K.eval(result), atol=1e-5)


class TestLogCoshError(object):

def setup(self):
self.y_pred = np.asarray([1, 9, 2, -5, -2, 6]).reshape((2, 3))
self.y_true = np.asarray([4, 8, 12, 8, 1, 3]).reshape((2, 3))
self.batch_size = 6
error = self.y_pred - self.y_true
self.expected_results = np.log((np.exp(error) + np.exp(-error)) / 2)

def test_config(self):
logcosh_obj = metrics.LogCoshError(name='logcosh', dtype='int32')
assert logcosh_obj.name == 'logcosh'
assert logcosh_obj.dtype == 'int32'

def test_unweighted(self):
self.setup()
logcosh_obj = metrics.LogCoshError()

result = logcosh_obj(self.y_true, self.y_pred)
expected_result = np.sum(self.expected_results) / self.batch_size
assert np.allclose(K.eval(result), expected_result, atol=1e-3)

def test_weighted(self):
self.setup()
logcosh_obj = metrics.LogCoshError()
sample_weight = [[1.2], [3.4]]
result = logcosh_obj(self.y_true, self.y_pred, sample_weight=sample_weight)

sample_weight = np.asarray([1.2, 1.2, 1.2, 3.4, 3.4, 3.4]).reshape((2, 3))
expected_result = np.multiply(self.expected_results, sample_weight)
expected_result = np.sum(expected_result) / np.sum(sample_weight)
assert np.allclose(K.eval(result), expected_result, atol=1e-3)


class TestPoisson(object):

def setup(self):
self.y_pred = np.asarray([1, 9, 2, 5, 2, 6]).reshape((2, 3))
self.y_true = np.asarray([4, 8, 12, 8, 1, 3]).reshape((2, 3))
self.batch_size = 6
self.expected_results = self.y_pred - np.multiply(
self.y_true, np.log(self.y_pred))

def test_config(self):
poisson_obj = metrics.Poisson(name='poisson', dtype='int32')
assert poisson_obj.name == 'poisson'
assert poisson_obj.dtype == 'int32'

poisson_obj2 = metrics.Poisson.from_config(poisson_obj.get_config())
assert poisson_obj2.name == 'poisson'
assert poisson_obj2.dtype == 'int32'

def test_unweighted(self):
self.setup()
poisson_obj = metrics.Poisson()

result = poisson_obj(self.y_true, self.y_pred)
expected_result = np.sum(self.expected_results) / self.batch_size
assert np.allclose(K.eval(result), expected_result, atol=1e-3)

def test_weighted(self):
self.setup()
poisson_obj = metrics.Poisson()
sample_weight = [[1.2], [3.4]]

result = poisson_obj(self.y_true, self.y_pred, sample_weight=sample_weight)
sample_weight = np.asarray([1.2, 1.2, 1.2, 3.4, 3.4, 3.4]).reshape((2, 3))
expected_result = np.multiply(self.expected_results, sample_weight)
expected_result = np.sum(expected_result) / np.sum(sample_weight)
assert np.allclose(K.eval(result), expected_result, atol=1e-3)


class TestKLDivergence(object):

def setup(self):
self.y_pred = np.asarray([.4, .9, .12, .36, .3, .4]).reshape((2, 3))
self.y_true = np.asarray([.5, .8, .12, .7, .43, .8]).reshape((2, 3))
self.batch_size = 2
self.expected_results = np.multiply(
self.y_true, np.log(self.y_true / self.y_pred))

def test_config(self):
k_obj = metrics.KLDivergence(name='kld', dtype='int32')
assert k_obj.name == 'kld'
assert k_obj.dtype == 'int32'

k_obj2 = metrics.KLDivergence.from_config(k_obj.get_config())
assert k_obj2.name == 'kld'
assert k_obj2.dtype == 'int32'

def test_unweighted(self):
self.setup()
k_obj = metrics.KLDivergence()

result = k_obj(self.y_true, self.y_pred)
expected_result = np.sum(self.expected_results) / self.batch_size
assert np.allclose(K.eval(result), expected_result, atol=1e-3)

def test_weighted(self):
self.setup()
k_obj = metrics.KLDivergence()
sample_weight = [[1.2], [3.4]]
result = k_obj(self.y_true, self.y_pred, sample_weight=sample_weight)

sample_weight = np.asarray([1.2, 1.2, 1.2, 3.4, 3.4, 3.4]).reshape((2, 3))
expected_result = np.multiply(self.expected_results, sample_weight)
expected_result = np.sum(expected_result) / (1.2 + 3.4)
assert np.allclose(K.eval(result), expected_result, atol=1e-3)