Skip to content

Commit

Permalink
Convienience method for conditional mutual information (#86)
Browse files Browse the repository at this point in the history
  • Loading branch information
kahaaga authored May 23, 2022
1 parent f5aa11c commit bc7d668
Show file tree
Hide file tree
Showing 6 changed files with 61 additions and 5 deletions.
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
# Changelog

## Release v1.8

- New function `cmi`, which computes conditional mutual information.

## Release v1.7

- Bug fix in `mutualinfo` for naive estimators from Entropies.jl.
Expand Down
2 changes: 1 addition & 1 deletion Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "TransferEntropy"
uuid = "ea221983-52f3-5440-99c7-13ea201cd633"
repo = "https://github.com/kahaaga/TransferEntropy.jl.git"
version = "1.7.0"
version = "1.8.0"

[deps]
DSP = "717857b8-e6f2-59f4-9121-6e50c889abd2"
Expand Down
1 change: 1 addition & 0 deletions docs/src/mutualinfo.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@

```@docs
mutualinfo
conditional_mutualinfo
```
28 changes: 24 additions & 4 deletions src/mutualinfo/interface.jl
Original file line number Diff line number Diff line change
@@ -1,18 +1,19 @@
export mutualinfo, Kraskov1, Kraskov2
export mutualinfo, conditional_mutualinfo, Kraskov1, Kraskov2

abstract type MutualInformationEstimator <: EntropyEstimator end

"""
mutualinfo(x, y, est; base = 2, q = 1)
Estimate mutual information between `x` and `y`, ``I^{q}(x; y)``, using the provided
entropy/probability estimator `est` from Entropies.jl, and Rényi entropy of order `q`
entropy/probability estimator `est` from Entropies.jl or specialized estimator from
TransferEntropy.jl (e.g. [`Kraskov1`](@ref)), and Rényi entropy of order `q`
(defaults to `q = 1`, which is the Shannon entropy), with logarithms to the given `base`.
Both `x` and `y` can be vectors or (potentially multivariate) [`Dataset`](@ref)s.
Worth highlighting here are the estimators that compute entropies _directly_, e.g.
nearest-neighbor based methhods. The choice is between naive
nearest-neighbor based methods. The choice is between naive
estimation using the [`KozachenkoLeonenko`](@ref) or [`Kraskov`](@ref) entropy estimators,
or the improved [`Kraskov1`](@ref) and [`Kraskov2`](@ref) dedicated ``I`` estimators. The
latter estimators reduce bias compared to the naive estimators.
Expand Down Expand Up @@ -49,6 +50,25 @@ function mutualinfo(x::Vector_or_Dataset, y::Vector_or_Dataset, est; base = 2, q
Y = genentropy(Dataset(y), est; base = base, q = q)
XY = genentropy(Dataset(x, y), est; base = base, q = q)
MI = X + Y - XY
end
end

"""
conditional_mutualinfo(x, y, z, est; base = 2, q = 1)
Estimate, ``I^{q}(x; y | z)``, the conditional mutual information between `x`, `y` given
`z`, using the provided entropy/probability estimator `est` from Entropies.jl or specialized
estimator from TransferEntropy.jl (e.g. [`Kraskov1`](@ref)), and Rényi entropy of order `q`
(defaults to `q = 1`, which is the Shannon entropy), with logarithms to the given
`base`.
As for [`mutualinfo`](@ref), the variables `x`, `y` and `z` can be vectors or potentially
multivariate) [`Dataset`](@ref)s, and the keyword `q` cannot be provided for
nearest-neighbor estimators (it is hard-coded to `q = 1`).
"""
function conditional_mutualinfo(x::Vector_or_Dataset, y::Vector_or_Dataset, z::Vector_or_Dataset, est;
base = 2, q = 1)
mutualinfo(x, Dataset(y, z), est; base = base, q = q) -
mutualinfo(x, z, est; base = base, q = q)
end

include("nearestneighbor.jl")
7 changes: 7 additions & 0 deletions src/mutualinfo/nearestneighbor.jl
Original file line number Diff line number Diff line change
Expand Up @@ -169,4 +169,11 @@ function mutualinfo(x::Vector_or_Dataset{D1, T}, y::Vector_or_Dataset{D2, T}, es
else
return MI
end
end

# knn estimators don't have the `q` keyword, so need specialized method
function conditional_mutualinfo(x::Vector_or_Dataset, y::Vector_or_Dataset, z::Vector_or_Dataset,
est::KNNMutualInformationEstimator; base = MathConstants.e)
mutualinfo(x, Dataset(y, z), est; base = base) -
mutualinfo(x, z, est; base = base)
end
24 changes: 24 additions & 0 deletions test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,30 @@ BruteForce = Entropies.BruteForce
@test mutualinfo(z, w, est_k2) isa Real
end

@testset "Conditional mutual information" begin
s, t, c = rand(100), rand(100), rand(100)
est_knn = Kraskov1(2)
est_bin = RectangularBinning(3)
# binning estimator yields non-negative values
@test conditional_mutualinfo(s, t, c, est_bin, q = 2) isa Real
@test conditional_mutualinfo(s, t, c, est_bin, q = 2) >= 0.0
# verify formula I(X, Y | Z) = I(X; Y, Z) - I(X, Z)
@test conditional_mutualinfo(s, t, c, est_bin, base = 2)
mutualinfo(s, Dataset(t, c), est_bin, base = 2) - mutualinfo(s, c, est_bin, base = 2)

@test conditional_mutualinfo(s, t, c, est_knn) isa Real
@test conditional_mutualinfo(s, t, c, est_knn, base = 2)
mutualinfo(s, Dataset(t, c), est_knn, base = 2) - mutualinfo(s, c, est_knn, base = 2)

# Different types of input
@test conditional_mutualinfo(s, Dataset(t, c), c, est_bin) isa Real
@test conditional_mutualinfo(Dataset(s, t), Dataset(t, c), c, est_bin) isa Real
@test conditional_mutualinfo(Dataset(s, t), Dataset(t, c), Dataset(c, s), est_bin) isa Real
@test conditional_mutualinfo(s, Dataset(t, c), Dataset(c, s), est_bin) isa Real
@test conditional_mutualinfo(s, t, Dataset(c, s), est_bin) isa Real
@test conditional_mutualinfo(Dataset(s, t), t, c, est_bin) isa Real
end

@testset "Transfer entropy" begin
s, t, c = rand(100), rand(100), rand(100)

Expand Down

2 comments on commit bc7d668

@kahaaga
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@JuliaRegistrator
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Registration pull request created: JuliaRegistries/General/60844

After the above pull request is merged, it is recommended that a tag is created on this repository for the registered package version.

This will be done automatically if the Julia TagBot GitHub Action is installed, or can be done manually through the github interface, or via:

git tag -a v1.8.0 -m "<description of version>" bc7d668e37f3da97d515e169de6ea423c7a1b63a
git push origin v1.8.0

Please sign in to comment.