diff --git a/coq/NeuralNetworks/derivlemmas.v b/coq/NeuralNetworks/derivlemmas.v index 11548dd4..6cc48150 100644 --- a/coq/NeuralNetworks/derivlemmas.v +++ b/coq/NeuralNetworks/derivlemmas.v @@ -3,50 +3,6 @@ Require Import Lra. Set Bullet Behavior "Strict Subproofs". -(* -Lemma is_derive_comp : - forall (f : R -> R) (g : R -> R) (x : R) (df : R) (dg : R), - is_derive f (g x) df -> - is_derive g x dg -> - is_derive (fun x => f (g x)) x (dg * df). - -Lemma Derive_comp (f g : R -> R) (x : R) : - ex_derive f (g x) -> ex_derive g x - -> Derive (fun x => f (g x)) x = Derive g x * Derive f (g x). - -Lemma is_derive_plus : - forall (f g : K -> V) (x : K) (df dg : V), - is_derive f x df -> - is_derive g x dg -> - is_derive (fun x => plus (f x) (g x)) x (plus df dg). - -Lemma is_derive_minus : - forall (f g : K -> V) (x : K) (df dg : V), - is_derive f x df -> - is_derive g x dg -> - is_derive (fun x => minus (f x) (g x)) x (minus df dg). - -Lemma is_derive_mult : - forall (f g : R -> R) (x : R) (df dg : R), - is_derive f x df -> - is_derive g x dg -> - is_derive (fun t : R => f t * g t) x (df * g x + f x * dg) . - -Lemma is_derive_div : - forall (f g : R -> R) (x : R) (df dg : R), - is_derive f x df -> - is_derive g x dg -> - g x <> 0 -> - is_derive (fun t : R => f t / g t) x ((df * g x - f x * dg) / (g x ^ 2)). - -Lemma is_derive_Rabs (f : R -> R) (x df : R) : - is_derive f x df -> f x <> 0 - -> is_derive (fun x => Rabs (f x)) x (sign (f x) * df). - -Lemma is_derive_unique f x l : - is_derive f x l -> Derive f x = l. - *) - Lemma ball_abs (x y:R_AbsRing) (eps : posreal): ball x eps y <-> Rabs(y - x) < eps. Proof. @@ -399,6 +355,13 @@ Proof. apply is_derive_exp. Qed. +Lemma ex_derive_exp (x : R) : + ex_derive exp x. +Proof. + eexists. + apply is_derive_exp. +Qed. + Lemma is_derive_ln (x:R) : 0 < x -> is_derive ln x (/ x). Proof. rewrite is_derive_Reals. @@ -412,6 +375,13 @@ Proof. now apply is_derive_ln. Qed. +Lemma ex_derive_ln (x : R) : + 0 < x -> ex_derive ln x. +Proof. + eexists. + now apply is_derive_ln. +Qed. + Lemma is_derive_abs_pos (x:R): 0 < x -> is_derive Rabs x 1. Proof. rewrite is_derive_Reals. @@ -425,6 +395,13 @@ Proof. now apply is_derive_abs_pos. Qed. +Lemma ex_derive_abs_pos (x : R) : + 0 < x -> ex_derive Rabs x. +Proof. + eexists. + now apply is_derive_abs_pos. +Qed. + Lemma is_derive_abs_neg (x:R): 0 > x -> is_derive Rabs x (-1). Proof. rewrite is_derive_Reals. diff --git a/coq/QLearn/Dvoretzky.v b/coq/QLearn/Dvoretzky.v index 79ac45ac..d96da093 100644 --- a/coq/QLearn/Dvoretzky.v +++ b/coq/QLearn/Dvoretzky.v @@ -275,13 +275,6 @@ Lemma Dvoretzky_rel (n:nat) (theta:R) (X Y : nat -> Ts -> R) now apply exp_increasing. Qed. - Lemma ex_derive_exp (x : R) : - ex_derive exp x. - Proof. - eexists. - apply is_derive_exp. - Qed. - Lemma xm1_exp : exists (x : posreal), forall y, 0 <= y < x -> @@ -293,15 +286,13 @@ Lemma Dvoretzky_rel (n:nat) (theta:R) (X Y : nat -> Ts -> R) assert (df 0 = 1). { unfold df. - rewrite Rmult_0_r. - rewrite exp_0. + rewrite Rmult_0_r, exp_0. lra. } assert (f 0 = 0). { unfold f. - rewrite Rmult_0_r. - rewrite exp_0. + rewrite Rmult_0_r, exp_0. lra. } assert (derivable f). @@ -324,13 +315,9 @@ Lemma Dvoretzky_rel (n:nat) (theta:R) (X Y : nat -> Ts -> R) unfold f, df. rewrite Derive_minus. - rewrite Derive_minus. - + rewrite Derive_const. - rewrite Derive_id. - rewrite Derive_comp. + + rewrite Derive_const, Derive_id, Derive_comp. * rewrite Derive_mult. - -- rewrite Derive_const. - rewrite Derive_id. - rewrite Derive_exp. + -- rewrite Derive_const, Derive_id, Derive_exp. lra. -- apply ex_derive_const. -- apply ex_derive_id. @@ -394,13 +381,7 @@ Lemma Dvoretzky_rel (n:nat) (theta:R) (X Y : nat -> Ts -> R) specialize (H4 y). assert (Hierarchy.ball 0 x y). { - unfold Hierarchy.ball; simpl. - unfold AbsRing_ball; simpl. - unfold abs, minus; simpl. - unfold plus, opp; simpl. - rewrite Ropp_0. - rewrite Rplus_0_r. - rewrite Rabs_right; lra. + rewrite ball_abs, Rminus_0_r, Rabs_right; lra. } specialize (H4 H7). rewrite H in H4. @@ -433,17 +414,13 @@ Lemma Dvoretzky_rel (n:nat) (theta:R) (X Y : nat -> Ts -> R) assert (0 <= y <= x) by lra. specialize (H9 H12 H13). unfold f in H9. - rewrite Rminus_0_r in H9. - rewrite Rmult_0_r in H9. - rewrite exp_0 in H9. + rewrite Rminus_0_r, Rmult_0_r, exp_0 in H9. replace (1 - 1) with 0 in H9 by lra. destruct (Rlt_dec 0 y). - specialize (H9 r). lra. - assert (y = 0) by lra. - rewrite H14. - rewrite Rmult_0_r. - rewrite exp_0. + rewrite H14, Rmult_0_r, exp_0. lra. Qed. @@ -454,8 +431,7 @@ Lemma Dvoretzky_rel (n:nat) (theta:R) (X Y : nat -> Ts -> R) intros. induction h. - replace (n + 0)%nat with n by lia. - rewrite part_prod_n_k_k. - rewrite part_prod_n_k_k. + rewrite part_prod_n_k_k, part_prod_n_k_k. apply H. lia. - replace (n + S h)%nat with (S (n + h)) by lia.