Theory Multivariate_Taylor

section ‹Multivariate Taylor›
theory Multivariate_Taylor
imports
  "HOL-Analysis.Analysis"
  "../ODE_Auxiliarities"
begin

no_notation vec_nth (infixl $ 90)
notation blinfun_apply (infixl $ 999)

lemma
  fixes f::"'a::real_normed_vector  'b::banach"
    and Df::"'a  nat  'a  'a  'b"
  assumes "n > 0"
  assumes Df_Nil: "a x. Df a 0 H H = f a"
  assumes Df_Cons: "a i d. a  closed_segment X (X + H)  i < n 
      ((λa. Df a i H H) has_derivative (Df a (Suc i) H)) (at a within G)"
  assumes cs: "closed_segment X (X + H)  G"
  defines "i  λx.
      ((1 - x) ^ (n - 1) / fact (n - 1)) *R Df (X + x *R H) n H H"
  shows multivariate_Taylor_has_integral:
    "(i has_integral f (X + H) - (i<n. (1 / fact i) *R Df X i H H)) {0..1}"
  and multivariate_Taylor:
    "f (X + H) = (i<n. (1 / fact i) *R Df X i H H) + integral {0..1} i"
  and multivariate_Taylor_integrable:
    "i integrable_on {0..1}"
proof goal_cases
  case 1
  let ?G = "closed_segment X (X + H)"
  define line where "line t = X + t *R H" for t
  have segment_eq: "closed_segment X (X + H) = line ` {0 .. 1}"
    by (auto simp: line_def closed_segment_def algebra_simps)
  have line_deriv: "x. (line has_derivative (λt. t *R H)) (at x)"
    by (auto intro!: derivative_eq_intros simp: line_def [abs_def])
  define g where "g = f o line"
  define Dg where "Dg n t = Df (line t) n H H" for n :: nat and t :: real
  note n > 0
  moreover
  have Dg0: "Dg 0 = g" by (auto simp add: Dg_def Df_Nil g_def)
  moreover
  have DgSuc: "(Dg m has_vector_derivative Dg (Suc m) t) (at t within {0..1})"
    if "m < n" "0  t" "t  1" for m::nat and t::real
  proof -
    from that have [intro]: "line t  ?G" using assms
      by (auto simp: segment_eq)
    note [derivative_intros] = has_derivative_in_compose[OF _ has_derivative_subset[OF Df_Cons]]
    interpret Df: linear "(λd. Df (line t) (Suc m) H d)"
      by (auto intro!: has_derivative_linear derivative_intros m < n)
    note [derivative_intros] =
      has_derivative_compose[OF _ line_deriv]
    show ?thesis
      using Df.scaleR m < n
      by (auto simp: Dg_def [abs_def] has_vector_derivative_def g_def segment_eq
         intro!: derivative_eq_intros subsetD[OF cs])
  qed
  ultimately
  have g_Taylor: "(i has_integral g 1 - (i<n. ((1 - 0) ^ i / fact i) *R Dg i 0)) {0 .. 1}"
    unfolding i_def Dg_def [abs_def] line_def
    by (rule Taylor_has_integral) auto
  then show c: ?case using n > 0 by (auto simp: g_def line_def Dg_def)
  case 2 show ?case using c
    by (simp add: integral_unique add.commute)
  case 3 show ?case using c by force
qed


subsection ‹Symmetric second derivative›

lemma symmetric_second_derivative_aux:
  assumes first_fderiv[derivative_intros]:
    "a. a  G  (f has_derivative (f' a)) (at a within G)"
  assumes second_fderiv[derivative_intros]:
    "i. ((λx. f' x i) has_derivative (λj. f'' j i)) (at a within G)"
  assumes "i  j" "i  0" "j  0"
  assumes "a  G"
  assumes "s t. s  {0..1}  t  {0..1}  a + s *R i + t *R j  G"
  shows "f'' j i = f'' i j"
proof -
  let ?F = "at_right (0::real)"
  define B where "B i j = {a + s *R i + t *R j |s t. s  {0..1}  t  {0..1}}" for i j
  have "B i j  G" using assms by (auto simp: B_def)
  {
    fix e::real and i j::'a
    assume "e > 0"
    assume "i  j" "i  0" "j  0"
    assume "B i j  G"
    let ?ij' = "λs t. λu. a + (s * u) *R i + (t * u) *R j"
    let ?ij = "λt. λu. a + (t * u) *R i + u *R j"
    let ?i = "λt. λu. a + (t * u) *R i"
    let ?g = "λu t. f (?ij t u) - f (?i t u)"
    have filter_ij'I: "P. P a  eventually P (at a within G) 
      eventually (λx. s{0..1}. t{0..1}. P (?ij' s t x)) ?F"
    proof -
      fix P
      assume "P a"
      assume "eventually P (at a within G)"
      hence "eventually P (at a within B i j)" by (rule filter_leD[OF at_le[OF B i j  G]])
      then obtain d where d: "d > 0" and "x d2. x  B i j  x  a  dist x a < d  P x"
        by (auto simp: eventually_at)
      with P a have P: "x d2. x  B i j  dist x a < d  P x" by (case_tac "x = a") auto
      let ?d = "min (min (d/norm i) (d/norm j) / 2) 1"
      show "eventually (λx. s{0..1}. t{0..1}. P (?ij' s t x)) (at_right 0)"
        unfolding eventually_at
      proof (rule exI[where x="?d"], safe)
        show "0 < ?d" using 0 < d i  0 j  0 by simp
        fix x s t :: real assume *: "s  {0..1}" "t  {0..1}" "0 < x" "dist x 0 < ?d"
        show "P (?ij' s t x)"
        proof (rule P)
          have "x y::real. x  {0..1}  y  {0..1}  x * y  {0..1}"
            by (auto intro!: order_trans[OF mult_left_le_one_le])
          hence "s * x  {0..1}" "t * x  {0..1}" using * by (auto simp: dist_norm)
          thus "?ij' s t x  B i j" by (auto simp: B_def)
          have "norm (s *R x *R i + t *R x *R j)  norm (s *R x *R i) + norm (t *R x *R j)"
            by (rule norm_triangle_ineq)
          also have " < d / 2 + d / 2" using * i  0 j  0
            by (intro add_strict_mono) (auto simp: ac_simps dist_norm
              pos_less_divide_eq le_less_trans[OF mult_left_le_one_le])
          finally show "dist (?ij' s t x) a < d" by (simp add: dist_norm)
        qed
      qed
    qed
    have filter_ijI: "eventually (λx. t{0..1}. P (?ij t x)) ?F"
      if "P a" "eventually P (at a within G)" for P
      using filter_ij'I[OF that]
        by eventually_elim (force dest: bspec[where x=1])
    have filter_iI: "eventually (λx. t{0..1}. P (?i t x)) ?F"
      if "P a" "eventually P (at a within G)" for P
      using filter_ij'I[OF that] by eventually_elim force
    {
      from second_fderiv[of i, simplified has_derivative_iff_norm, THEN conjunct2,
        THEN tendstoD, OF 0 < e]
      have "eventually (λx. norm (f' x i - f' a i - f'' (x - a) i) / norm (x - a)  e)
          (at a within G)"
        by eventually_elim (simp add: dist_norm)
      from filter_ijI[OF _ this] filter_iI[OF _ this] 0 < e
      have
        "eventually (λij. t{0..1}. norm (f' (?ij t ij) i - f' a i - f'' (?ij t ij - a) i) /
          norm (?ij t ij - a)  e) ?F"
        "eventually (λij. t{0..1}. norm (f' (?i t ij) i - f' a i - f'' (?i t ij - a) i) /
          norm (?i t ij - a)  e) ?F"
        by auto
      moreover
      have "eventually (λx. x  G) (at a within G)" unfolding eventually_at_filter by simp
      hence eventually_in_ij: "eventually (λx. t{0..1}. ?ij t x  G) ?F" and
        eventually_in_i: "eventually (λx. t{0..1}. ?i t x  G) ?F"
        using a  G by (auto dest: filter_ijI filter_iI)
      ultimately
      have "eventually (λu. norm (?g u 1 - ?g u 0 - (u * u) *R f'' j i) 
          u * u * e * (2 * norm i + 3 * norm j)) ?F"
      proof eventually_elim
        case (elim u)
        hence ijsub: "(λt. ?ij t u) ` {0..1}  G" and isub: "(λt. ?i t u) ` {0..1}  G" by auto
        note has_derivative_subset[OF _ ijsub, derivative_intros]
        note has_derivative_subset[OF _ isub, derivative_intros]
        let ?g' = "λt. (λua. u *R ua *R (f' (?ij t u) i - (f' (?i t u) i)))"
        have g': "((?g u) has_derivative ?g' t) (at t within {0..1})" if "t  {0..1}" for t::real
        proof -
          from elim that have linear_f': "c x. f' (?ij t u) (c *R x) = c *R f' (?ij t u) x"
              "c x. f' (?i t u) (c *R x) = c *R f' (?i t u) x"
            using linear_cmul[OF has_derivative_linear, OF first_fderiv] by auto
          show ?thesis
            using elim t  {0..1}
            by (auto intro!: derivative_eq_intros has_derivative_in_compose[of  "λt. ?ij t u" _ _ _ f]
                has_derivative_in_compose[of  "λt. ?i t u" _ _ _ f]
              simp: linear_f' scaleR_diff_right mult.commute)
        qed
        from elim(1) i  0 j  0 0 < e have f'ij: "t. t  {0..1} 
            norm (f' (a + (t * u) *R i + u *R j) i - f' a i - f'' ((t * u) *R i + u *R j) i) 
            e * norm ((t * u) *R i + u *R j)"
          using  linear_0[OF has_derivative_linear, OF second_fderiv]
          by (case_tac "u *R j + (t * u) *R i = 0") (auto simp: field_simps
            simp del: pos_divide_le_eq simp add: pos_divide_le_eq[symmetric])
        from elim(2) have f'i: "t. t  {0..1}  norm (f' (a + (t * u) *R i) i - f' a i -
          f'' ((t * u) *R i) i)  e * abs (t * u) * norm i"
          using i  0 j  0 linear_0[OF has_derivative_linear, OF second_fderiv]
          by (case_tac "t * u = 0") (auto simp: field_simps simp del: pos_divide_le_eq
            simp add: pos_divide_le_eq[symmetric])
        have "norm (?g u 1 - ?g u 0 - (u * u) *R f'' j i) =
          norm ((?g u 1 - ?g u 0 - u *R (f' (a + u *R j) i - (f' a i)))
            + u *R (f' (a + u *R j) i - f' a i - u *R f'' j i))"
            (is "_ = norm (?g10 + ?f'i)")
          by (simp add: algebra_simps linear_cmul[OF has_derivative_linear, OF second_fderiv]
            linear_add[OF has_derivative_linear, OF second_fderiv])
        also have "  norm ?g10 + norm ?f'i"
          by (blast intro: order_trans add_mono norm_triangle_le)
        also
        have "0  {0..1::real}" by simp
        have "t  {0..1}. onorm ((λua. (u * ua) *R (f' (?ij t u) i - f' (?i t u) i)) -
              (λua. (u * ua) *R (f' (a + u *R j) i - f' a i)))
             2 * u * u * e * (norm i + norm j)" (is "t  _. onorm (?d t)  _")
        proof
          fix t::real assume "t  {0..1}"
          show "onorm (?d t)  2 * u * u * e * (norm i + norm j)"
          proof (rule onorm_le)
            fix x
            have "norm (?d t x) =
                norm ((u * x) *R (f' (?ij t u) i - f' (?i t u) i - f' (a + u *R j) i + f' a i))"
              by (simp add: algebra_simps)
            also have " =
                abs (u * x) * norm (f' (?ij t u) i - f' (?i t u) i - f' (a + u *R j) i + f' a i)"
              by simp
            also have " = abs (u * x) * norm (
                 f' (?ij t u) i - f' a i - f'' ((t * u) *R i + u *R j) i
               - (f' (?i t u) i - f' a i - f'' ((t * u) *R i) i)
               - (f' (a + u *R j) i - f' a i - f'' (u *R j) i))"
               (is "_ = _ * norm (?dij - ?di - ?dj)")
              using a  G
              by (simp add: algebra_simps
                linear_add[OF has_derivative_linear[OF second_fderiv]])
            also have "  abs (u * x) * (norm ?dij + norm ?di + norm ?dj)"
              by (rule mult_left_mono[OF _ abs_ge_zero]) norm
            also have "  abs (u * x) *
              (e * norm ((t * u) *R i + u *R j) + e * abs (t * u) * norm i + e * (¦u¦ * norm j))"
              using f'ij f'i f'ij[OF 0  {0..1}] t  {0..1}
              by (auto intro!: add_mono mult_left_mono)
            also have " = abs u * abs x * abs u *
              (e * norm (t *R i + j) + e * norm (t *R i) + e * (norm j))"
              by (simp add: algebra_simps norm_scaleR[symmetric] abs_mult del: norm_scaleR)
            also have " =
                u * u * abs x * (e * norm (t *R i + j) + e * norm (t *R i) + e * (norm j))"
              by (simp add: ac_simps)
            also have " = u * u * e * abs x * (norm (t *R i + j) + norm (t *R i) + norm j)"
              by (simp add: algebra_simps)
            also have "  u * u * e * abs x * ((norm (1 *R i) + norm j) + norm (1 *R i) + norm j)"
              using t  {0..1} 0 < e
              by (intro mult_left_mono add_mono) (auto intro!: norm_triangle_le add_right_mono
                mult_left_le_one_le zero_le_square)
            finally show "norm (?d t x)  2 * u * u * e * (norm i + norm j) * norm x"
              by (simp add: ac_simps)
          qed
        qed
        with differentiable_bound_linearization[where f="?g u" and f'="?g'", of 0 1 _ 0, OF _ g']
        have "norm ?g10  2 * u * u * e * (norm i + norm j)" by simp
        also have "norm ?f'i  abs u *
          norm ((f' (a + (u) *R j) i - f' a i - f'' (u *R j) i))"
          using linear_cmul[OF has_derivative_linear, OF second_fderiv]
          by simp
        also have "  abs u * (e * norm ((u) *R j))"
          using f'ij[OF 0  {0..1}] by (auto intro: mult_left_mono)
        also have " = u * u * e * norm j" by (simp add: algebra_simps abs_mult)
        finally show ?case by (simp add: algebra_simps)
      qed
    }
  } note wlog = this
  have e': "norm (f'' j i - f'' i j)  e * (5 * norm j + 5 * norm i)" if "0 < e" for e t::real
  proof -
    have "B i j = B j i" using i  j by (force simp: B_def)+
    with assms B i j  G have "j  i" "B j i  G" by (auto simp:)
    from wlog[OF 0 < e i  j i  0 j  0 B i j  G]
         wlog[OF 0 < e j  i j  0 i  0 B j i  G]
    have "eventually (λu. norm ((u * u) *R f'' j i - (u * u) *R f'' i j)
          u * u * e * (5 * norm j + 5 * norm i)) ?F"
    proof eventually_elim
      case (elim u)
      have "norm ((u * u) *R f'' j i - (u * u) *R f'' i j) =
        norm (f (a + u *R j + u *R i) - f (a + u *R j) -
         (f (a + u *R i) - f a) - (u * u) *R f'' i j
         - (f (a + u *R i + u *R j) - f (a + u *R i) -
         (f (a + u *R j) - f a) -
         (u * u) *R f'' j i))" by (simp add: field_simps)
      also have "  u * u * e * (2 * norm j + 3 * norm i) + u * u * e * (3 * norm j + 2 * norm i)"
        using elim by (intro order_trans[OF norm_triangle_ineq4]) (auto simp: ac_simps intro: add_mono)
      finally show ?case by (simp add: algebra_simps)
    qed
    hence "eventually (λu. norm ((u * u) *R (f'' j i - f'' i j)) 
        u * u * e * (5 * norm j + 5 * norm i)) ?F"
      by (simp add: algebra_simps)
    hence "eventually (λu. (u * u) * norm ((f'' j i - f'' i j)) 
        (u * u) * (e * (5 * norm j + 5 * norm i))) ?F"
      by (simp add: ac_simps)
    hence "eventually (λu. norm ((f'' j i - f'' i j))  e * (5 * norm j + 5 * norm i)) ?F"
      unfolding mult_le_cancel_left eventually_at_filter
      by eventually_elim auto
    then show ?thesis
      by (auto simp add:eventually_at dist_norm dest!: bspec[where x="d/2" for d])
  qed
  have e: "norm (f'' j i - f'' i j) < e" if "0 < e" for e::real
  proof -
    let ?e = "e/2/(5 * norm j + 5 * norm i)"
    have "?e > 0" using 0 < e i  0 j  0 by (auto intro!: divide_pos_pos add_pos_pos)
    from e'[OF this] have "norm (f'' j i - f'' i j)  ?e * (5 * norm j + 5 * norm i)" .
    also have " = e / 2" using i  0 j  0 by (auto simp: ac_simps add_nonneg_eq_0_iff)
    also have " < e" using 0 < e by simp
    finally show ?thesis .
  qed
  have "norm (f'' j i - f'' i j) = 0"
  proof (rule ccontr)
    assume "norm (f'' j i - f'' i j)  0"
    hence "norm (f'' j i - f'' i j) > 0" by simp
    from e[OF this] show False by simp
  qed
  thus ?thesis by simp
qed

locale second_derivative_within =
  fixes f f' f'' a G
  assumes first_fderiv[derivative_intros]:
    "a. a  G  (f has_derivative blinfun_apply (f' a)) (at a within G)"
  assumes in_G: "a  G"
  assumes second_fderiv[derivative_intros]:
    "(f' has_derivative blinfun_apply f'') (at a within G)"
begin

lemma symmetric_second_derivative_within:
  assumes "a  G"
  assumes "s t. s  {0..1}  t  {0..1}  a + s *R i + t *R j  G"
  shows "f'' i j = f'' j i"
  apply (cases "i = j  i = 0  j = 0")
    apply (force simp add: blinfun.zero_right blinfun.zero_left)
  using first_fderiv _ _ _ _ assms
  by (rule symmetric_second_derivative_aux[symmetric])
    (auto intro!: derivative_eq_intros simp: blinfun.bilinear_simps assms)

end

locale second_derivative =
  fixes f::"'a::real_normed_vector  'b::banach"
    and f' :: "'a  'a L 'b"
    and f'' :: "'a L 'a L 'b"
    and a :: 'a
    and G :: "'a set"
  assumes first_fderiv[derivative_intros]:
    "a. a  G  (f has_derivative f' a) (at a)"
  assumes in_G: "a  interior G"
  assumes second_fderiv[derivative_intros]:
    "(f' has_derivative f'') (at a)"
begin

lemma symmetric_second_derivative:
  assumes "a  interior G"
  shows "f'' i j = f'' j i"
proof -
  from assms have "a  G"
    using interior_subset by blast
  interpret second_derivative_within
    by unfold_locales
      (auto intro!: derivative_intros intro: has_derivative_at_withinI a  G)
  from assms open_interior[of G] interior_subset[of G]
  obtain e where e: "e > 0" "y. dist y a < e  y  G"
    by (force simp: open_dist)
  define e' where "e' = e / 3"
  define i' j' where "i' = e' *R i /R norm i" and "j' = e' *R j /R norm j"
  hence "norm i'  e'" "norm j'  e'"
    by (auto simp: field_simps e'_def 0 < e less_imp_le)
  hence "¦s¦  1  ¦t¦  1  norm (s *R i' + t *R j')  e' + e'" for s t
    by (intro norm_triangle_le[OF add_mono])
      (auto intro!: order_trans[OF mult_left_le_one_le])
  also have " < e" by (simp add: e'_def 0 < e)
  finally
  have "f'' $ i' $ j' = f'' $ j' $ i'"
    by (intro symmetric_second_derivative_within a  G e)
      (auto simp add: dist_norm)
  thus ?thesis
    using e(1)
    by (auto simp: i'_def j'_def e'_def
      blinfun.zero_right blinfun.zero_left
      blinfun.scaleR_left blinfun.scaleR_right algebra_simps)
qed

end

lemma
  uniform_explicit_remainder_Taylor_1:
  fixes f::"'a::{banach,heine_borel,perfect_space}  'b::banach"
  assumes f'[derivative_intros]: "x. x  G  (f has_derivative blinfun_apply (f' x)) (at x)"
  assumes f'_cont: "x. x  G  isCont f' x"
  assumes "open G"
  assumes "J  {}" "compact J" "J  G"
  assumes "e > 0"
  obtains d R
  where "d > 0"
    "x z. f z = f x + f' x (z - x) + R x z"
    "x y. x  J  y  J  dist x y < d  norm (R x y)  e * dist x y"
    "continuous_on (G × G) (λ(a, b). R a b)"
proof -
  from assms have "continuous_on G f'" by (auto intro!: continuous_at_imp_continuous_on)
  note [continuous_intros] = continuous_on_compose2[OF this]
  define R where "R x z = f z - f x - f' x (z - x)" for x z
  from compact_in_open_separated[OF J  {} compact J open G J  G]
  obtain η where η: "0 < η" "{x. infdist x J  η}  G" (is "?J'  _")
    by auto
  hence infdist_in_G: "infdist x J  η  x  G" for x
    by auto
  have dist_in_G: "y. dist x y < η  y  G" if "x  J" for x
    by (auto intro!: infdist_in_G infdist_le2 that simp: dist_commute)

  have "compact ?J'" by (rule compact_infdist_le; fact)
  let ?seg = ?J'
  from continuous_on G f'
  have ucont: "uniformly_continuous_on ?seg f'"
    using ?seg  G
    by (auto intro!: compact_uniformly_continuous compact ?seg intro: continuous_on_subset)

  define e' where "e' = e / 2"
  have "e' > 0" using e > 0 by (simp add: e'_def)
  from ucont[unfolded uniformly_continuous_on_def, rule_format, OF 0 < e']
  obtain du where du:
    "du > 0"
    "x y. x  ?seg  y  ?seg  dist x y < du  norm (f' x - f' y) < e'"
    by (auto simp: dist_norm)
  have "min η du > 0" using du > 0 η > 0 by simp
  moreover
  have "f z = f x + f' x (z - x) + R x z" for x z
    by (auto simp: R_def)
  moreover
  {
    fix x z::'a
    assume "x  J" "z  J"
    hence "x  G" "z  G" using assms by auto

    assume "dist x z < min η du"
    hence d_eta: "dist x z < η" and d_du: "dist x z < du"
      by (auto simp add: min_def split: if_split_asm)

    from dist x z < η have line_in:
      "xa. 0  xa  xa  1  x + xa *R (z - x)  G"
      "(λxa. x + xa *R (z - x)) ` {0..1}  G"
      by (auto intro!: dist_in_G x  J le_less_trans[OF mult_left_le_one_le]
        simp: dist_norm norm_minus_commute)

    have "R x z = f z - f x - f' x (z - x)"
      by (simp add: R_def)
    also have "f z - f x = f (x + (z - x)) - f x" by simp
    also have "f (x + (z - x)) - f x = integral {0..1} (λt. (f' (x + t *R (z - x))) (z - x))"
      using dist x z < η
      by (intro mvt_integral[of "ball x η" f f' x "z - x"])
        (auto simp: dist_norm norm_minus_commute at_within_ball 0 < η mem_ball
          intro!: le_less_trans[OF mult_left_le_one_le] derivative_eq_intros dist_in_G x  J)
    also have
      "(integral {0..1} (λt. (f' (x + t *R (z - x))) (z - x)) - (f' x) (z - x)) =
        integral {0..1} (λt. f' (x + t *R (z - x)) - f' x) (z - x)"
      by (simp add: Henstock_Kurzweil_Integration.integral_diff integral_linear[where h="λy. blinfun_apply y (z - x)", simplified o_def]
        integrable_continuous_real continuous_intros line_in
        blinfun.bilinear_simps[symmetric])
    finally have "R x z = integral {0..1} (λt. f' (x + t *R (z - x)) - f' x) (z - x)"
      .
    also have "norm   norm (integral {0..1} (λt. f' (x + t *R (z - x)) - f' x)) * norm (z - x)"
      by (auto intro!: order_trans[OF norm_blinfun])
    also have "  e' * (1 - 0) * norm (z - x)"
      using d_eta d_du 0 < η
      by (intro mult_right_mono integral_bound)
        (auto simp: dist_norm norm_minus_commute
          intro!: line_in du[THEN less_imp_le] infdist_le2[OF x  J] line_in continuous_intros
            order_trans[OF mult_left_le_one_le] le_less_trans[OF mult_left_le_one_le])
    also have "  e * dist x z" using 0 < e by (simp add: e'_def norm_minus_commute dist_norm)
    finally have "norm (R x z)  e * dist x z" .
  }
  moreover
  {
    from f' have f_cont: "continuous_on G f"
      by (rule has_derivative_continuous_on[OF has_derivative_at_withinI])
    note [continuous_intros] = continuous_on_compose2[OF this]
    from f'_cont have f'_cont: "continuous_on G f'"
      by (auto intro!: continuous_at_imp_continuous_on)

    note continuous_on_diff2=continuous_on_diff[OF continuous_on_compose[OF continuous_on_snd] continuous_on_compose[OF continuous_on_fst], where s="G × G", simplified]
    have "continuous_on (G × G) (λ(a, b). f b - f a)"
      by (auto intro!: continuous_intros simp: split_beta)
    moreover have "continuous_on (G × G) (λ(a, b). f' a (b - a))"
      by (auto intro!: continuous_intros simp: split_beta')
    ultimately have "continuous_on (G × G) (λ(a, b). R a b)"
      by (rule iffD1[OF continuous_on_cong[OF refl] continuous_on_diff, rotated], auto simp: R_def)
  }
  ultimately
  show thesis ..
qed


text ‹TODO: rename, duplication?›

locale second_derivative_within' =
  fixes f f' f'' a G
  assumes first_fderiv[derivative_intros]:
    "a. a  G  (f has_derivative f' a) (at a within G)"
  assumes in_G: "a  G"
  assumes second_fderiv[derivative_intros]:
    "i. ((λx. f' x i) has_derivative f'' i) (at a within G)"
begin

lemma symmetric_second_derivative_within:
  assumes "a  G"  "open G"
  assumes "s t. s  {0..1}  t  {0..1}  a + s *R i + t *R j  G"
  shows "f'' i j = f'' j i"
proof (cases "i = j  i = 0  j = 0")
  case True
  interpret bounded_linear "f'' k" for k
    by (rule has_derivative_bounded_linear) (rule second_fderiv)
  have z1: "f'' j 0 = 0" "f'' i 0 = 0" by (simp_all add: zero)
  have f'z: "f' x 0 = 0" if "x  G" for x
  proof -
    interpret bounded_linear "f' x"
      by (rule has_derivative_bounded_linear) (rule first_fderiv that)+
    show ?thesis by (simp add: zero)
  qed
  note aw = at_within_open[OF a  G open G]
  have "((λx. f' x 0) has_derivative (λ_. 0)) (at a within G)"
    apply (rule has_derivative_transform_within)
       apply (rule has_derivative_const[where c=0])
      apply (rule zero_less_one)
     apply fact
    by (simp add: f'z)
  from has_derivative_unique[OF second_fderiv[unfolded aw] this[unfolded aw]]
  have "f'' 0 = (λ_. 0)" .
  with True z1 show ?thesis
    by (auto)
next
  case False
  show ?thesis
    using first_fderiv _ _ _ _ assms(1,3-)
    by (rule symmetric_second_derivative_aux[])
       (use False in auto intro!: derivative_eq_intros simp: blinfun.bilinear_simps assms)
qed

end

locale second_derivative_on_open =
  fixes f::"'a::real_normed_vector  'b::banach"
    and f' :: "'a  'a  'b"
    and f'' :: "'a  'a  'b"
    and a :: 'a
    and G :: "'a set"
  assumes first_fderiv[derivative_intros]:
    "a. a  G  (f has_derivative f' a) (at a)"
  assumes in_G: "a  G" and open_G: "open G"
  assumes second_fderiv[derivative_intros]:
    "((λx. f' x i) has_derivative f'' i) (at a)"
begin

lemma symmetric_second_derivative:
  assumes "a  G"
  shows "f'' i j = f'' j i"
proof -
  interpret second_derivative_within'
    by unfold_locales
      (auto intro!: derivative_intros intro: has_derivative_at_withinI a  G)
  from a  G open_G
  obtain e where e: "e > 0" "y. dist y a < e  y  G"
    by (force simp: open_dist)
  define e' where "e' = e / 3"
  define i' j' where "i' = e' *R i /R norm i" and "j' = e' *R j /R norm j"
  hence "norm i'  e'" "norm j'  e'"
    by (auto simp: field_simps e'_def 0 < e less_imp_le)
  hence "¦s¦  1  ¦t¦  1  norm (s *R i' + t *R j')  e' + e'" for s t
    by (intro norm_triangle_le[OF add_mono])
      (auto intro!: order_trans[OF mult_left_le_one_le])
  also have " < e" by (simp add: e'_def 0 < e)
  finally
  have "f'' i' j' = f'' j' i'"
    by (intro symmetric_second_derivative_within a  G e)
      (auto simp add: dist_norm open_G)
  moreover
  interpret f'': bounded_linear "f'' k" for k
    by (rule has_derivative_bounded_linear) (rule second_fderiv)
  note aw = at_within_open[OF a  G open G]
  have z1: "f'' j 0 = 0" "f'' i 0 = 0" by (simp_all add: f''.zero)
  have f'z: "f' x 0 = 0" if "x  G" for x
  proof -
    interpret bounded_linear "f' x"
      by (rule has_derivative_bounded_linear) (rule first_fderiv that)+
    show ?thesis by (simp add: zero)
  qed
  have "((λx. f' x 0) has_derivative (λ_. 0)) (at a within G)"
    apply (rule has_derivative_transform_within)
       apply (rule has_derivative_const[where c=0])
      apply (rule zero_less_one)
     apply fact
    by (simp add: f'z)
  from has_derivative_unique[OF second_fderiv[unfolded aw] this[unfolded aw]]
  have z2: "f'' 0 = (λ_. 0)" .
  have "((λa. f' a (r *R x)) has_derivative f'' (r *R x)) (at a within G)"
    "((λa. f' a (r *R x)) has_derivative (λy. r *R f'' x y)) (at a within G)"
    for r x
    subgoal by (rule second_fderiv)
    subgoal
    proof -
      have "((λa. r *R f' a (x)) has_derivative (λy. r *R f'' x y)) (at a within G)"
        by (auto intro!: derivative_intros)
      then show ?thesis
        apply (rule has_derivative_transform[rotated 2])
         apply (rule in_G)
        subgoal premises prems for a'
        proof -
          interpret bounded_linear "f' a'"
            apply (rule has_derivative_bounded_linear)
            by (rule first_fderiv[OF prems])
          show ?thesis
            by (simp add: scaleR)
        qed
        done
    qed
    done
  then have "((λa. f' a (r *R x)) has_derivative f'' (r *R x)) (at a)"
    "((λa. f' a (r *R x)) has_derivative (λy. r *R f'' x y)) (at a)" for r x
    unfolding aw by auto
  then have f'z: "f'' (r *R x) = (λy. r *R f'' x y)" for r x
    by (rule has_derivative_unique[where f="(λa. f' a (r *R x))"])
  ultimately show ?thesis
    using e(1)
    by (auto simp: i'_def j'_def e'_def f''.scaleR z1 z2
      blinfun.zero_right blinfun.zero_left
      blinfun.scaleR_left blinfun.scaleR_right algebra_simps)
qed

end

no_notation
  blinfun_apply (infixl $ 999)
notation vec_nth (infixl $ 90)

end