theorem
proved
zero_entropy_deterministic
show as:
view math explainer →
open explainer
Generate a durable explainer page for this declaration.
open lean source
IndisputableMonolith.Information.ShannonEntropy on GitHub at line 111.
browse module
All declarations in this module, on Recognition.
explainer page
depends on
-
of -
of -
cost -
cost -
is -
of -
probability -
is -
of -
is -
of -
ProbDist -
shannonEntropy -
uniform -
is -
Cost -
probability -
ProbDist -
probability
used by
formal source
108 ring
109
110/-- **THEOREM**: Entropy is 0 for deterministic distribution. -/
111theorem zero_entropy_deterministic {n : ℕ} (d : ProbDist n) (i : Fin n)
112 (hdet : d.probs i = 1) (hother : ∀ j ≠ i, d.probs j = 0) :
113 shannonEntropy d = 0 := by
114 unfold shannonEntropy
115 simp only [neg_eq_zero]
116 apply Finset.sum_eq_zero
117 intro j _
118 by_cases heq : j = i
119 · simp [heq, hdet, Real.log_one]
120 · simp [hother j heq]
121
122/-! ## J-Cost Connection -/
123
124/-- The J-cost of a probability (relative to uniform).
125 For p ∈ (0,1], this measures how "surprising" the probability is. -/
126noncomputable def probJCost (p : ℝ) (hp : p > 0) (hp1 : p ≤ 1) : ℝ :=
127 -Real.log p
128
129/-- **THEOREM**: J-cost of uniform probability is log(n).
130 -log(1/n) = log(n) -/
131theorem jcost_uniform (n : ℕ) (hn : n > 0) :
132 -Real.log (1 / (n : ℝ)) = Real.log n := by
133 rw [one_div, Real.log_inv, neg_neg]
134
135/-- The total J-cost of a distribution.
136 This equals the Shannon entropy! -/
137noncomputable def totalJCost {n : ℕ} (d : ProbDist n) : ℝ :=
138 Finset.univ.sum fun i =>
139 if h : d.probs i > 0 then
140 d.probs i * probJCost (d.probs i) h (by
141 have := d.normalized