pith. machine review for the scientific record. sign in

IndisputableMonolith.Foundation.CostAxioms

IndisputableMonolith/Foundation/CostAxioms.lean · 372 lines · 19 declarations

show as:
view math explainer →

open module explainer GitHub source

Explainer status: pending

   1import Mathlib
   2import IndisputableMonolith.Cost
   3import IndisputableMonolith.Cost.FunctionalEquation
   4import IndisputableMonolith.CostUniqueness
   5
   6/-!
   7# Cost Axioms: The Primitive Foundation
   8
   9This module formalizes the **three primitive axioms** from which the entire
  10Recognition Science framework derives. These axioms are more primitive than
  11logic itself—they describe the structure of "cheap configurations" from which
  12logical coherence emerges.
  13
  14## Axiomatic Hierarchy
  15
  16```
  17LEVEL 0 (Primitive Axioms):
  18├── A1: Normalization: F(1) = 0
  19├── A2: Recognition Composition (RCL): F(xy) + F(x/y) = 2F(x)·F(y) + 2F(x) + 2F(y)
  20└── A3: Calibration: F''_{log}(0) = 1
  21
  22LEVEL 1 (First Derived):
  23└── J(x) = ½(x + x⁻¹) - 1 is UNIQUE (T5 Uniqueness)
  24
  25LEVEL 2 (Existence Criterion):
  26└── Law of Existence: "x exists" ⟺ J(x) = 0
  27
  28LEVEL 3 (Derived Meta-Principle):
  29└── MP: "Nothing cannot recognize itself" because J(0) → ∞
  30```
  31
  32## Economic Interpretation
  33
  34The axioms are not arbitrary—they encode an **economic inevitability**:
  35- J(x) measures the "cost" of being at ratio x relative to unity
  36- J(1) = 0: unity costs nothing (perfect balance)
  37- J(0) → ∞: approaching nothingness costs infinity
  38- The d'Alembert functional equation forces multiplicative consistency
  39
  40This is more primitive than logic because:
  41- Contradiction has high cost (J >> 0)
  42- Consistency has low cost (J ≈ 0)
  43- Logic emerges as the structure of cheap configurations
  44-/
  45
  46namespace IndisputableMonolith
  47namespace Foundation
  48namespace CostAxioms
  49
  50open Real
  51
  52/-! ## The Three Primitive Axioms -/
  53
  54/-- **Axiom 1 (Normalization)**: The cost at unity is zero.
  55
  56This encodes that "perfect balance" (ratio = 1) has no cost.
  57Any cost functional measuring deviation must vanish at the reference point. -/
  58class Normalization (F : ℝ → ℝ) : Prop where
  59  unit_zero : F 1 = 0
  60
  61/-- **Axiom 2 (Recognition Composition Law)**: Multiplicative consistency.
  62
  63For all positive x, y:
  64  F(xy) + F(x/y) = 2·F(x)·F(y) + 2·F(x) + 2·F(y)
  65
  66This is the d'Alembert functional equation in multiplicative form.
  67It forces F to be compatible with the multiplicative structure of ℝ₊. -/
  68class Composition (F : ℝ → ℝ) : Prop where
  69  dAlembert : ∀ x y : ℝ, 0 < x → 0 < y →
  70    F (x * y) + F (x / y) = 2 * F x * F y + 2 * F x + 2 * F y
  71
  72/-- **Axiom 3 (Calibration)**: The second derivative at the origin (in log coordinates) equals 1.
  73
  74Let G(t) = F(exp(t)). Then G''(0) = 1.
  75
  76This normalizes the "curvature" of the cost functional at unity,
  77ensuring a unique solution rather than a family. -/
  78class Calibration (F : ℝ → ℝ) : Prop where
  79  second_deriv_at_zero : deriv (deriv (fun t => F (exp t))) 0 = 1
  80
  81/-- The complete axiom bundle for a cost functional. -/
  82class CostFunctionalAxioms (F : ℝ → ℝ) extends
  83  Normalization F, Composition F, Calibration F : Prop
  84
  85/-! ## The Canonical Cost Functional J -/
  86
  87/-- The canonical cost functional:
  88  J(x) = ½(x + x⁻¹) - 1
  89
  90This is the **unique** solution to the three axioms (proven below). -/
  91noncomputable def J (x : ℝ) : ℝ := (x + x⁻¹) / 2 - 1
  92
  93/-- J equals the Cost.Jcost defined in the Cost module. -/
  94lemma J_eq_Jcost : J = Cost.Jcost := by
  95  ext x; rfl
  96
  97/-! ## Verification: J satisfies the axioms -/
  98
  99/-- J satisfies Axiom 1 (Normalization). -/
 100instance : Normalization J where
 101  unit_zero := by simp [J]
 102
 103/-- J satisfies Axiom 2 (Recognition Composition Law). -/
 104instance : Composition J where
 105  dAlembert := by
 106    intro x y hx hy
 107    have hx0 : x ≠ 0 := hx.ne'
 108    have hy0 : y ≠ 0 := hy.ne'
 109    simp only [J]
 110    field_simp
 111    ring
 112
 113/-- J satisfies Axiom 3 (Calibration). -/
 114instance : Calibration J where
 115  second_deriv_at_zero := by
 116    -- G(t) = J(exp(t)) = cosh(t) - 1 (from Jlog_as_cosh)
 117    -- G'(t) = sinh(t), G''(t) = cosh(t), G''(0) = cosh(0) = 1
 118    -- First, show the function is Jlog
 119    have h_eq : (fun t => J (exp t)) = Cost.Jlog := by
 120      ext t; rfl
 121    rw [h_eq]
 122    -- deriv Jlog = sinh (from hasDerivAt_Jlog)
 123    have h_deriv : deriv Cost.Jlog = Real.sinh := by
 124      ext t
 125      exact (Cost.hasDerivAt_Jlog t).deriv
 126    rw [h_deriv]
 127    -- deriv sinh 0 = cosh 0 = 1
 128    have h_sinh_deriv : deriv Real.sinh 0 = Real.cosh 0 := by
 129      exact (Real.hasDerivAt_sinh 0).deriv
 130    rw [h_sinh_deriv, Real.cosh_zero]
 131
 132/-- J satisfies all three axioms. -/
 133instance : CostFunctionalAxioms J where
 134
 135/-! ## Key Properties of J -/
 136
 137/-- J is symmetric: J(x) = J(1/x) for positive x. -/
 138theorem J_symmetric {x : ℝ} (hx : 0 < x) : J x = J x⁻¹ := by
 139  have hx0 : x ≠ 0 := hx.ne'
 140  simp only [J]
 141  field_simp
 142  ring
 143
 144/-- J is non-negative for positive x (AM-GM inequality). -/
 145theorem J_nonneg {x : ℝ} (hx : 0 < x) : 0 ≤ J x := by
 146  simp only [J]
 147  have h : 0 ≤ (x - 1)^2 / x := by positivity
 148  calc (x + x⁻¹) / 2 - 1 = ((x - 1)^2 / x) / 2 := by field_simp; ring
 149    _ ≥ 0 := by positivity
 150
 151/-- J equals zero exactly at x = 1. -/
 152theorem J_eq_zero_iff {x : ℝ} (hx : 0 < x) : J x = 0 ↔ x = 1 := by
 153  constructor
 154  · intro hJ
 155    simp only [J] at hJ
 156    -- (x + 1/x)/2 - 1 = 0  ⟹  x + 1/x = 2  ⟹  x² - 2x + 1 = 0  ⟹  x = 1
 157    have h1 : x + x⁻¹ = 2 := by linarith
 158    have hx0 : x ≠ 0 := hx.ne'
 159    have h2 : x^2 + 1 = 2 * x := by
 160      field_simp at h1
 161      linarith
 162    have h3 : (x - 1)^2 = 0 := by ring_nf; linarith
 163    have h4 : x - 1 = 0 := by nlinarith [sq_nonneg (x - 1)]
 164    linarith
 165  · intro hx1
 166    simp [J, hx1]
 167
 168/-! ## The Economic Inevitability: J(0) → ∞ -/
 169
 170/-- For any bound M, there exists ε > 0 such that J(x) > M for all 0 < x < ε.
 171
 172    Direct proof: Choose ε = 1/(2(M + 2)). For 0 < x < ε, we have
 173    x⁻¹ > 2(M + 2), so J(x) ≥ x⁻¹/2 - 1 > M + 2 - 1 = M + 1 > M. -/
 174theorem J_arbitrarily_large_near_zero (M : ℝ) :
 175    ∃ ε > 0, ∀ x, 0 < x → x < ε → M < J x := by
 176  -- Choose ε = 1/(2(max M 0 + 2))
 177  let M' := max M 0 + 2
 178  have hM'_pos : M' > 0 := by positivity
 179  refine ⟨1 / (2 * M'), by positivity, ?_⟩
 180  intro x hx_pos hx_small
 181  simp only [J]
 182  -- Key computation: for 0 < x < 1/(2*M'), we have J(x) > M
 183  -- J(x) = (x + 1/x)/2 - 1 ≥ 1/(2x) - 1
 184  -- Since x < 1/(2*M'), we have 1/x > 2*M', so 1/(2x) > M', thus J(x) > M' - 1 > M
 185  have hx_ne : x ≠ 0 := hx_pos.ne'
 186  have h2M'_pos : 2 * M' > 0 := by positivity
 187  -- From x < 1/(2*M'), get 2*M' < 1/x
 188  have h_key : 2 * M' * x < 1 := by
 189    calc 2 * M' * x = x * (2 * M') := by ring
 190      _ < (1 / (2 * M')) * (2 * M') := mul_lt_mul_of_pos_right hx_small h2M'_pos
 191      _ = 1 := by field_simp
 192  have h_inv : 2 * M' < 1 / x := by
 193    rw [div_eq_mul_inv, lt_mul_inv_iff₀ hx_pos]
 194    exact h_key
 195  -- Now J(x) ≥ (1/x)/2 - 1 > M' - 1 > M
 196  have hJ_lower : (x + x⁻¹) / 2 - 1 > (1/x) / 2 - 1 := by
 197    -- (x + 1/x)/2 - 1 > (1/x)/2 - 1 ⟺ x/2 > 0, which follows from x > 0
 198    rw [one_div]
 199    have hx_half_pos : x / 2 > 0 := by linarith
 200    linarith
 201  have hJ_bound : (1/x) / 2 - 1 > M' - 1 := by
 202    -- From h_inv: 2*M' < 1/x, so M' < (1/x)/2
 203    nlinarith [h_inv]
 204  have hM_lt : M < M' - 1 := by simp only [M']; linarith [le_max_left M 0]
 205  linarith
 206
 207/-- As x → 0⁺, J(x) → +∞.
 208
 209This is the **core economic principle**: approaching "nothing" costs infinity.
 210This is why existence is inevitable—non-existence is infinitely expensive. -/
 211theorem J_tendsto_atTop_as_x_to_zero :
 212    Filter.Tendsto J (nhdsWithin 0 (Set.Ioi 0)) Filter.atTop := by
 213  rw [Filter.tendsto_atTop]
 214  intro M
 215  obtain ⟨ε, hε_pos, hε⟩ := J_arbitrarily_large_near_zero M
 216  -- We need {x : M ≤ J x} ∈ nhdsWithin 0 (Ioi 0)
 217  rw [Filter.Eventually, mem_nhdsWithin_iff_exists_mem_nhds_inter]
 218  use Set.Iio ε
 219  refine ⟨Iio_mem_nhds hε_pos, ?_⟩
 220  intro x ⟨hx_lt, hx_pos⟩
 221  exact le_of_lt (hε x hx_pos hx_lt)
 222
 223/-! ## Law of Existence -/
 224
 225/-- **Law of Existence**: A ratio x "exists" (is realizable) iff J(x) = 0.
 226
 227In the RS framework, existence corresponds to being at a cost minimum.
 228The only minimum is at x = 1 (perfect balance/golden ratio fixed point). -/
 229def Exists (x : ℝ) : Prop := 0 < x ∧ J x = 0
 230
 231/-- The Law of Existence: x exists ⟺ x = 1. -/
 232theorem law_of_existence {x : ℝ} (hx : 0 < x) : Exists x ↔ x = 1 := by
 233  simp only [Exists, J_eq_zero_iff hx, and_iff_right hx]
 234
 235/-- Unity is the unique existent. -/
 236theorem unity_is_unique_existent : ∀ x : ℝ, Exists x ↔ x = 1 := by
 237  intro x
 238  by_cases hx : 0 < x
 239  · exact law_of_existence hx
 240  · simp only [Exists]
 241    constructor
 242    · intro ⟨hpos, _⟩; exact absurd hpos hx
 243    · intro heq; subst heq; exact ⟨one_pos, by simp [J]⟩
 244
 245/-! ## Deriving MP from Cost -/
 246
 247/-- **Meta-Principle (Derived)**: "Nothing cannot recognize itself."
 248
 249In the cost framework, "Nothing" corresponds to the limit x → 0.
 250Recognition requires a finite cost, but J(0) → ∞, so recognition
 251of "Nothing" by "Nothing" would require infinite cost—impossible.
 252
 253This makes MP a **derived theorem**, not a primitive axiom. -/
 254theorem mp_from_cost :
 255    ∀ M : ℝ, ∃ ε > 0, ∀ x, 0 < x → x < ε → J x > M := by
 256  exact J_arbitrarily_large_near_zero
 257
 258/-- Alternative formulation: No finite-cost state can approach Nothing. -/
 259theorem nothing_costs_infinity :
 260    ¬∃ C : ℝ, ∀ x, 0 < x → J x ≤ C := by
 261  push_neg
 262  intro C
 263  obtain ⟨ε, hε, hJ⟩ := J_arbitrarily_large_near_zero C
 264  use ε / 2
 265  constructor
 266  · linarith
 267  · exact hJ (ε / 2) (by linarith) (by linarith)
 268
 269/-! ## Uniqueness Theorem (T5) -/
 270
 271/-- Composition axiom implies CoshAddIdentity in log coordinates. -/
 272theorem Composition_implies_CoshAddIdentity (F : ℝ → ℝ) [Composition F] :
 273    Cost.FunctionalEquation.CoshAddIdentity F := by
 274  intro t u
 275  -- G F (t+u) + G F (t-u) = F(exp(t+u)) + F(exp(t-u))
 276  -- = F(exp(t) * exp(u)) + F(exp(t) / exp(u))
 277  -- = 2 * F(exp(t)) * F(exp(u)) + 2 * F(exp(t)) + 2 * F(exp(u))  (by Composition)
 278  -- = 2 * (G F t * G F u) + 2 * (G F t + G F u)
 279  simp only [Cost.FunctionalEquation.G]
 280  have hpos_t : 0 < Real.exp t := Real.exp_pos t
 281  have hpos_u : 0 < Real.exp u := Real.exp_pos u
 282  have h1 : Real.exp (t + u) = Real.exp t * Real.exp u := Real.exp_add t u
 283  have h2 : Real.exp (t - u) = Real.exp t / Real.exp u := by
 284    rw [sub_eq_add_neg, Real.exp_add, Real.exp_neg]
 285    ring
 286  rw [h1, h2]
 287  have h_dAlembert := Composition.dAlembert (F := F) (Real.exp t) (Real.exp u) hpos_t hpos_u
 288  -- The RHS needs regrouping: 2 * F x * F y + 2 * F x + 2 * F y = 2 * (F x * F y) + 2 * (F x + F y)
 289  convert h_dAlembert using 1
 290  ring
 291
 292/-- Composition + Normalization implies symmetry: F(x) = F(1/x).
 293
 294    Proof: Apply Composition with x = 1:
 295    F(1 * y) + F(1 / y) = 2F(1)F(y) + 2F(1) + 2F(y)
 296    F(y) + F(1/y) = 2 * 0 * F(y) + 2 * 0 + 2F(y)  (by Normalization: F(1) = 0)
 297    F(y) + F(1/y) = 2F(y)
 298    F(1/y) = F(y)
 299
 300    Therefore F(y) = F(1/y) for all y > 0, which is symmetry. -/
 301theorem Composition_Normalization_implies_symmetry (F : ℝ → ℝ) [Composition F] [Normalization F] :
 302    ∀ {x : ℝ}, 0 < x → F x = F x⁻¹ := by
 303  intro x hx
 304  -- Apply Composition with x = 1, y = x
 305  have h := Composition.dAlembert (F := F) 1 x one_pos hx
 306  -- F(1 * x) + F(1 / x) = 2F(1)F(x) + 2F(1) + 2F(x)
 307  -- Simplify: F(1) = 0, 1 * x = x, 1 / x = x⁻¹
 308  simp only [one_mul, one_div, Normalization.unit_zero, zero_mul, add_zero, mul_zero] at h
 309  -- h is now: F(x) + F(x⁻¹) = 2F(x)
 310  -- Subtracting F(x) from both sides: F(x⁻¹) = F(x)
 311  have h_symm : F x⁻¹ = F x := by
 312    have h_sub : F x⁻¹ = (F x + F x⁻¹) - F x := by ring
 313    rw [h_sub, h]
 314    ring
 315  exact h_symm.symm
 316
 317/-- **T5 Uniqueness (Specification)**:
 318    Any function F satisfying the three cost axioms with regularity equals J.
 319
 320    This is the central uniqueness theorem of Recognition Science.
 321    The complete proof is in CostUniqueness.lean via T5_uniqueness_complete.
 322
 323    The proof structure is:
 324    1. CostFunctionalAxioms.composition gives d'Alembert: F(xy) + F(x/y) = 2F(x)F(y) + 2F(x) + 2F(y)
 325    2. Substituting G(t) = F(exp(t)) transforms to cosh-additive: G(s+t) + G(s-t) = 2G(s)G(t) + 2G(s) + 2G(t)
 326    3. Shifting H = G + 1 gives standard d'Alembert: H(s+t) + H(s-t) = 2H(s)H(t)
 327    4. The unique continuous solution is H(t) = cosh(t), so G(t) = cosh(t) - 1
 328    5. Therefore F(x) = cosh(log(x)) - 1 = ½(x + x⁻¹) - 1 = J(x)
 329
 330    The regularity hypotheses (Aczél theory for d'Alembert equations) are stated
 331    explicitly. These are standard results from functional equation theory:
 332    - Continuous d'Alembert solutions are smooth (Aczél 1966)
 333    - Smooth d'Alembert solutions satisfy ODE H'' = H
 334    - Linear ODE regularity bootstrap
 335
 336    See `IndisputableMonolith.T5_uniqueness_complete` for the rigorous proof. -/
 337theorem uniqueness_specification (F : ℝ → ℝ) [CostFunctionalAxioms F]
 338    (hCont : ContinuousOn F (Set.Ioi 0))
 339    (hConvex : StrictConvexOn ℝ (Set.Ioi 0) F)
 340    -- Regularity hypotheses from Aczél's theorem on d'Alembert equations
 341    (h_smooth : Cost.FunctionalEquation.dAlembert_continuous_implies_smooth_hypothesis
 342        (Cost.FunctionalEquation.H F))
 343    (h_ode : Cost.FunctionalEquation.dAlembert_to_ODE_hypothesis
 344        (Cost.FunctionalEquation.H F))
 345    (h_cont : Cost.FunctionalEquation.ode_regularity_continuous_hypothesis
 346        (Cost.FunctionalEquation.H F))
 347    (h_diff : Cost.FunctionalEquation.ode_regularity_differentiable_hypothesis
 348        (Cost.FunctionalEquation.H F))
 349    (h_boot : Cost.FunctionalEquation.ode_linear_regularity_bootstrap_hypothesis
 350        (Cost.FunctionalEquation.H F)) :
 351    ∀ x, 0 < x → F x = J x := by
 352  intro x hx
 353  -- Bridge from CostFunctionalAxioms to T5_uniqueness_complete hypotheses
 354  -- 1. Symmetry: F(x) = F(1/x)
 355  have hSymm : ∀ {x : ℝ}, 0 < x → F x = F x⁻¹ :=
 356    Composition_Normalization_implies_symmetry F
 357  -- 2. Unit normalization: F(1) = 0
 358  have hUnit : F 1 = 0 := Normalization.unit_zero
 359  -- 3. Calibration: deriv (deriv (F ∘ exp)) 0 = 1
 360  have hCalib : deriv (deriv (F ∘ exp)) 0 = 1 := Calibration.second_deriv_at_zero
 361  -- 4. CoshAddIdentity: from Composition axiom
 362  have hCoshAdd : Cost.FunctionalEquation.CoshAddIdentity F :=
 363    Composition_implies_CoshAddIdentity F
 364  -- Apply T5_uniqueness_complete with all hypotheses
 365  unfold J
 366  exact CostUniqueness.T5_uniqueness_complete F hSymm hUnit hConvex hCalib hCont hCoshAdd
 367    h_smooth h_ode h_cont h_diff h_boot hx
 368
 369end CostAxioms
 370end Foundation
 371end IndisputableMonolith
 372

source mirrored from github.com/jonwashburn/shape-of-logic