theorem
proved
gradient_nonincreasing
show as:
view math explainer →
open explainer
Generate a durable explainer page for this declaration.
open lean source
IndisputableMonolith.NavierStokes.DiscreteMaximumPrinciple on GitHub at line 88.
browse module
All declarations in this module, on Recognition.
explainer page
depends on
used by
formal source
85
86/-- The gradient maximum is non-increasing under one NS time step when
87the sub-Kolmogorov condition holds. -/
88theorem gradient_nonincreasing (data : OneStepData)
89 (hsubK : data.subKolmogorov)
90 (newGradMax : ℝ)
91 (h_update : newGradMax ≤
92 data.gradMax * (1 + data.dt * (data.advectionBound - data.viscousRate))) :
93 newGradMax ≤ data.gradMax := by
94 have hfactor := one_step_factor_le_one data hsubK
95 exact le_trans h_update (mul_le_of_le_one_right data.gradMax_nonneg hfactor)
96
97/-- The sub-Kolmogorov condition is preserved by one step. -/
98theorem subK_preserved (data : OneStepData)
99 (hsubK : data.subKolmogorov)
100 (newGradMax : ℝ)
101 (h_update : newGradMax ≤
102 data.gradMax * (1 + data.dt * (data.advectionBound - data.viscousRate)))
103 (newData : OneStepData)
104 (h_same_params : newData.viscousRate = data.viscousRate)
105 (h_new_grad : newData.gradMax = newGradMax) :
106 newData.subKolmogorov := by
107 unfold OneStepData.subKolmogorov
108 rw [h_new_grad, h_same_params]
109 exact le_trans (gradient_nonincreasing data hsubK newGradMax h_update) hsubK
110
111/-! ## Inductive Propagation -/
112
113/-- The sub-Kolmogorov condition propagates through arbitrarily many
114time steps by induction. -/
115theorem subK_propagation
116 (gradients : ℕ → ℝ) (bound : ℝ)
117 (h_init : gradients 0 ≤ bound)
118 (h_step : ∀ n, gradients (n + 1) ≤ gradients n) :