@@ -19,8 +19,8 @@ entropy is compared with the largest Lyapunov exponents from time series of the
1919logistic map. Entropy estimates using [ ` SymbolicWeightedPermutation ` ] ( @ref )
2020and [ ` SymbolicAmplitudeAwarePermutation ` ] ( @ref ) are added here for comparison.
2121
22- ``` @example
23- using Entropies, DynamicalSystems, PyPlot
22+ ``` @example MAIN
23+ using DynamicalSystems, CairoMakie
2424
2525ds = Systems.logistic()
2626rs = 3.4:0.001:4
@@ -34,48 +34,35 @@ hs_wtperm = Float64[]
3434hs_ampperm = Float64[]
3535
3636base = Base.MathConstants.e
37-
38- # Original paper doesn't use random assignment for ties, here: sort after order of occurrence
39- lt = Base.isless
40- est = SymbolicPermutation(m = m, τ = τ, lt = lt)
41- est_aa = SymbolicAmplitudeAwarePermutation(m = m, τ = τ, lt = lt)
42- est_wt = SymbolicWeightedPermutation(m = m, τ = τ, lt = lt)
43-
4437for r in rs
4538 ds.p[1] = r
4639 push!(lyaps, lyapunov(ds, N_lyap))
4740
4841 x = trajectory(ds, N_ent) # time series
49- hperm = Entropies.genentropy(x, est, base = base)
50- hampperm = Entropies.genentropy(x, est_aa, base = base)
42+ hperm = Entropies.genentropy(x, SymbolicPermutation(m = m, τ = τ), base = base)
5143 hwtperm = Entropies.genentropy(x, SymbolicWeightedPermutation(m = m, τ = τ), base = base)
52- push!(hs_perm, hperm); push!(hs_ampperm, hampperm); push!(hs_wtperm, hwtperm);
44+ hampperm = Entropies.genentropy(x, SymbolicAmplitudeAwarePermutation(m = m, τ = τ), base = base)
45+
46+ push!(hs_perm, hperm); push!(hs_wtperm, hwtperm); push!(hs_ampperm, hampperm)
5347end
5448
55- f = figure(figsize = (6, 8))
56- a1 = subplot(411)
57- plot(rs, lyaps); ylim(-2, log(2)); ylabel("\$\\lambda\$")
58- a1.axes.get_xaxis().set_ticklabels([])
59- xlim(rs[1], rs[end]);
60-
61- a2 = subplot(412)
62- plot(rs, hs_perm; color = "C2"); xlim(rs[1], rs[end]);
63- xlabel(""); ylabel("\$h_6 (SP)\$")
64-
65- a3 = subplot(413)
66- plot(rs, hs_wtperm; color = "C3"); xlim(rs[1], rs[end]);
67- xlabel(""); ylabel("\$h_6 (SWP)\$")
68-
69- a4 = subplot(414)
70- plot(rs, hs_ampperm; color = "C4"); xlim(rs[1], rs[end]);
71- xlabel("\$r\$"); ylabel("\$h_6 (SAAP)\$")
72- tight_layout()
73- savefig("permentropy.png")
49+ fig = Figure()
50+ a1 = Axis(fig[1,1]; ylabel = L"\lambda")
51+ lines!(a1, rs, lyaps); ylims!(a1, (-2, log(2)))
52+ a2 = Axis(fig[2,1]; ylabel = L"h_6 (SP)")
53+ lines!(a2, rs, hs_perm; color = Cycled(2))
54+ a3 = Axis(fig[3,1]; ylabel = L"h_6 (WT)")
55+ lines!(a3, rs, hs_wtperm; color = Cycled(3))
56+ a4 = Axis(fig[4,1]; ylabel = L"h_6 (SAAP)")
57+ lines!(a4, rs, hs_ampperm; color = Cycled(4))
58+ a4.xlabel = L"r"
59+
60+ for a in (a1,a2,a3)
61+ hidexdecorations!(a, grid = false)
62+ end
63+ fig
7464```
7565
76- ![ ] ( permentropy.png )
77-
78-
7966## Visitation frequency (binning)
8067
8168``` @docs
@@ -116,20 +103,19 @@ points are within radius `1.5` of `p`. Plotting the actual points, along with th
116103associated probabilities estimated by the KDE procedure, we get the following surface
117104plot.
118105
119- ``` @example
120- using Distributions, PyPlot, DelayEmbeddings, Entropies
106+ ``` @example MAIN
107+ using DynamicalSystems, CairoMakie, Distributions
121108𝒩 = MvNormal([1, -4], 2)
122109N = 500
123110D = Dataset(sort([rand(𝒩) for i = 1:N]))
124111x, y = columns(D)
125112p = probabilities(D, NaiveKernel(1.5))
126- surf(x, y, p.p)
127- xlabel("x"); ylabel("y")
128- savefig("kernel_surface.png")
113+ fig, ax = surface(x, y, p.p; axis=(type=Axis3,))
114+ ax.zlabel = "P"
115+ ax.zticklabelsvisible = false
116+ fig
129117```
130118
131- ![ ] ( kernel_surface.png )
132-
133119## Time-scale (wavelet)
134120
135121``` @docs
@@ -138,41 +124,36 @@ TimeScaleMODWT
138124
139125### Example
140126
141- The scale-resolved wavelet entropy should be lower for very regular signals (most of the
127+ The scale-resolved wavelet entropy should be lower for very regular signals (most of the
142128energy is contained at one scale) and higher for very irregular signals (energy spread
143129more out across scales).
144130
145- ``` @example
146- using Entropies, PyPlot
131+ ``` @example MAIN
132+ using DynamicalSystems, CairoMakie
147133N, a = 1000, 10
148134t = LinRange(0, 2*a*π, N)
149135
150136x = sin.(t);
151- y = sin.(t .+ cos.(t/0.5));
137+ y = sin.(t .+ cos.(t/0.5));
152138z = sin.(rand(1:15, N) ./ rand(1:10, N))
153139
154140est = TimeScaleMODWT()
155- h_x = Entropies.genentropy(x, est)
156- h_y = Entropies.genentropy(y, est)
157- h_z = Entropies.genentropy(z, est)
158-
159- f = figure(figsize = (10,6))
160- ax = subplot(311)
161- px = plot(t, x; color = "C1", label = "h=$(h=round(h_x, sigdigits = 5))");
162- ylabel("x"); legend()
163- ay = subplot(312)
164- py = plot(t, y; color = "C2", label = "h=$(h=round(h_y, sigdigits = 5))");
165- ylabel("y"); legend()
166- az = subplot(313)
167- pz = plot(t, z; color = "C3", label = "h=$(h=round(h_z, sigdigits = 5))");
168- ylabel("z"); xlabel("Time"); legend()
169- tight_layout()
170- savefig("waveletentropy.png")
141+ h_x = genentropy(x, est)
142+ h_y = genentropy(y, est)
143+ h_z = genentropy(z, est)
144+
145+ fig = Figure()
146+ ax = Axis(fig[1,1]; ylabel = "x")
147+ lines!(ax, t, x; color = Cycled(1), label = "h=$(h=round(h_x, sigdigits = 5))");
148+ ay = Axis(fig[2,1]; ylabel = "y")
149+ lines!(ay, t, y; color = Cycled(2), label = "h=$(h=round(h_y, sigdigits = 5))");
150+ az = Axis(fig[3,1]; ylabel = "z", xlabel = "time")
151+ lines!(az, t, z; color = Cycled(3), label = "h=$(h=round(h_z, sigdigits = 5))");
152+ for a in (ax, ay, az); axislegend(a); end
153+ for a in (ax, ay); hidexdecorations!(a; grid=false); end
154+ fig
171155```
172156
173- ![ ] ( waveletentropy.png )
174-
175-
176157## Nearest neighbor estimators
177158
178159### Kraskov
@@ -189,20 +170,20 @@ KozachenkoLeonenko
189170
190171#### Example
191172
192- This example reproduces Figure in Charzyńska & Gambin (2016)[ ^ Charzyńska2016 ] . Both
193- estimators nicely converge to the true entropy with increasing time series length.
194- For a uniform 1D distribution `` U(0, 1) `` , the true entropy is ` 0 ` (red line) .
173+ This example reproduces Figure in Charzyńska & Gambin (2016)[ ^ Charzyńska2016 ] . Both
174+ estimators nicely converge to the true entropy with increasing time series length.
175+ For a uniform 1D distribution `` U(0, 1) `` , the true entropy is ` 0 ` .
195176
196- ``` @example
197- using Entropies, DelayEmbeddings, StatsBase
198- import Distributions: Uniform, Normal
177+ ``` @example MAIN
178+ using DynamicalSystems, CairoMakie, Statistics
179+ using Distributions: Uniform, Normal
199180
200181Ns = [100:100:500; 1000:1000:10000]
201182Ekl = Vector{Vector{Float64}}(undef, 0)
202183Ekr = Vector{Vector{Float64}}(undef, 0)
203184
204185est_nn = KozachenkoLeonenko(w = 0)
205- # with k = 1, Kraskov is virtually identical to KozachenkoLeonenko, so pick a higher
186+ # with k = 1, Kraskov is virtually identical to KozachenkoLeonenko, so pick a higher
206187# number of neighbors
207188est_knn = Kraskov(w = 0, k = 3)
208189
@@ -220,25 +201,18 @@ for N in Ns
220201 push!(Ekr, kr)
221202end
222203
223- # Plot
224- using PyPlot, StatsBase
225- f = figure(figsize = (5,6))
226- ax = subplot(211)
227- px = PyPlot.plot(Ns, mean.(Ekl); color = "C1", label = "KozachenkoLeonenko");
228- PyPlot.plot(Ns, mean.(Ekl) .+ StatsBase.std.(Ekl); color = "C1", label = "");
229- PyPlot.plot(Ns, mean.(Ekl) .- StatsBase.std.(Ekl); color = "C1", label = "");
230-
231- xlabel("Time step"); ylabel("Entropy (nats)"); legend()
232- ay = subplot(212)
233- py = PyPlot.plot(Ns, mean.(Ekr); color = "C2", label = "Kraskov");
234- PyPlot.plot(Ns, mean.(Ekr) .+ StatsBase.std.(Ekr); color = "C2", label = "");
235- PyPlot.plot(Ns, mean.(Ekr) .- StatsBase.std.(Ekr); color = "C2", label = "");
236-
237- xlabel("Time step"); ylabel("Entropy (nats)"); legend()
238- tight_layout()
239- PyPlot.savefig("nn_entropy_example.png")
240- ```
204+ fig = Figure()
205+ ax = Axis(fig[1,1]; ylabel = "entropy (nats)", title = "KozachenkoLeonenko")
206+ lines!(ax, Ns, mean.(Ekl); color = Cycled(1))
207+ band!(ax, Ns, mean.(Ekl) .+ std.(Ekl), mean.(Ekl) .- std.(Ekl);
208+ color = (Main.COLORS[1], 0.5))
209+
210+ ay = Axis(fig[2,1]; xlabel = "time step", ylabel = "entropy (nats)", title = "Kraskov")
211+ lines!(ay, Ns, mean.(Ekr); color = Cycled(2))
212+ band!(ay, Ns, mean.(Ekr) .+ std.(Ekr), mean.(Ekr) .- std.(Ekr);
213+ color = (Main.COLORS[2], 0.5))
241214
242- ![ ] ( nn_entropy_example.png )
215+ fig
216+ ```
243217
244218[ ^ Charzyńska2016 ] : Charzyńska, A., & Gambin, A. (2016). Improvement of the k-NN entropy estimator with applications in systems biology. Entropy, 18(1), 13.
0 commit comments