refactor(frontends/lean): remove 'by+' and 'begin+' tokens
This commit is contained in:
parent
79ba2638b7
commit
fbe5188480
11 changed files with 17 additions and 58 deletions
|
@ -111,6 +111,7 @@ section
|
|||
end
|
||||
|
||||
definition wf : well_founded Q :=
|
||||
using H₂,
|
||||
well_founded.intro (λ a, accessible proof (@apply A R H₂ a) qed)
|
||||
end
|
||||
end subrelation
|
||||
|
|
|
@ -68,6 +68,7 @@ parameter [dp : decidable_pred p]
|
|||
include dp
|
||||
|
||||
private lemma acc_of_ex (x : nat) : acc gtb x :=
|
||||
using ex,
|
||||
obtain (w : nat) (pw : p w), from ex,
|
||||
lt.by_cases
|
||||
(suppose x < w, acc_of_acc_of_lt (acc_of_px pw) this)
|
||||
|
|
|
@ -95,6 +95,7 @@ open set
|
|||
from image_subset_of_maps_to_of_subset g_maps_to this
|
||||
|
||||
lemma g_ginv_eq {a : X} (aA : a ∈ A) (anU : a ∉ Union U) : g (ginv a) = a :=
|
||||
using ginj,
|
||||
have a ∈ g ' B, from by_contradiction
|
||||
(suppose a ∉ g ' B,
|
||||
have a ∈ U 0, from and.intro aA this,
|
||||
|
@ -124,6 +125,7 @@ open set
|
|||
/- h is injective -/
|
||||
|
||||
lemma aux {a₁ a₂ : X} (H₁ : a₁ ∈ Union U) (a₂A : a₂ ∈ A) (heq : h a₁ = h a₂) : a₂ ∈ Union U :=
|
||||
using ginj,
|
||||
obtain n (a₁Un : a₁ ∈ U n), from H₁,
|
||||
have ha₁eq : h a₁ = f a₁,
|
||||
from dif_pos H₁,
|
||||
|
@ -179,11 +181,10 @@ open set
|
|||
lemma h_surj : surj_on h A B :=
|
||||
take b,
|
||||
suppose b ∈ B,
|
||||
have g b ∈ A, from g_maps_to this,
|
||||
using f_maps_to,
|
||||
by_cases
|
||||
(suppose g b ∈ Union U,
|
||||
obtain n (gbUn : g b ∈ U n), from this,
|
||||
using ginj f_maps_to,
|
||||
begin
|
||||
cases n with n,
|
||||
{have g b ∈ U 0, from gbUn,
|
||||
|
@ -205,6 +206,7 @@ open set
|
|||
(suppose g b ∉ Union U,
|
||||
have eq₁ : h (g b) = ginv (g b), from dif_neg this,
|
||||
have eq₂ : ginv (g b) = b, from ginv_g_eq `b ∈ B`,
|
||||
have g b ∈ A, from g_maps_to `b ∈ B`,
|
||||
show b ∈ h ' A, from mem_image `g b ∈ A` (eq₁ ⬝ eq₂))
|
||||
end
|
||||
end schroeder_bernstein
|
||||
|
@ -221,6 +223,7 @@ section
|
|||
parameter (g_maps_to : maps_to g B A)
|
||||
parameter (ginj : inj_on g B)
|
||||
|
||||
include g g_maps_to ginj
|
||||
theorem schroeder_bernstein : ∃ h, bij_on h A B :=
|
||||
by_cases
|
||||
(assume H : ∀ b, b ∉ B,
|
||||
|
|
|
@ -145,13 +145,6 @@ static expr parse_by(parser & p, unsigned, expr const *, pos_info const & pos) {
|
|||
return p.mk_by(t, pos);
|
||||
}
|
||||
|
||||
static expr parse_by_plus(parser & p, unsigned, expr const *, pos_info const & pos) {
|
||||
p.next();
|
||||
expr t = p.parse_tactic();
|
||||
p.update_pos(t, pos);
|
||||
return p.mk_by_plus(t, pos);
|
||||
}
|
||||
|
||||
static expr parse_begin_end_core(parser & p, pos_info const & start_pos,
|
||||
name const & end_token, bool plus, bool nested = false) {
|
||||
if (!p.has_tactic_decls())
|
||||
|
@ -304,8 +297,6 @@ static expr parse_begin_end_core(parser & p, pos_info const & start_pos,
|
|||
r = p.save_pos(mk_begin_end_annotation(r), end_pos);
|
||||
if (nested)
|
||||
return r;
|
||||
else if (plus)
|
||||
return p.mk_by_plus(r, end_pos);
|
||||
else
|
||||
return p.mk_by(r, end_pos);
|
||||
}
|
||||
|
@ -314,10 +305,6 @@ static expr parse_begin_end(parser & p, unsigned, expr const *, pos_info const &
|
|||
return parse_begin_end_core(p, pos, get_end_tk(), false);
|
||||
}
|
||||
|
||||
static expr parse_begin_end_plus(parser & p, unsigned, expr const *, pos_info const & pos) {
|
||||
return parse_begin_end_core(p, pos, get_end_tk(), true);
|
||||
}
|
||||
|
||||
static expr parse_tactic_expr(parser & p, unsigned, expr const *, pos_info const &) {
|
||||
return p.parse_tactic();
|
||||
}
|
||||
|
@ -325,7 +312,7 @@ static expr parse_tactic_expr(parser & p, unsigned, expr const *, pos_info const
|
|||
static expr parse_proof_qed_core(parser & p, pos_info const & pos) {
|
||||
expr r = p.parse_expr();
|
||||
p.check_token_next(get_qed_tk(), "invalid proof-qed, 'qed' expected");
|
||||
r = p.mk_by_plus(p.mk_app(get_exact_tac_fn(), r, pos), pos);
|
||||
r = p.mk_by(p.mk_app(get_exact_tac_fn(), r, pos), pos);
|
||||
return r;
|
||||
}
|
||||
|
||||
|
@ -381,15 +368,9 @@ static expr parse_proof(parser & p) {
|
|||
} else if (p.curr_is_token(get_begin_tk())) {
|
||||
auto pos = p.pos();
|
||||
return parse_begin_end_core(p, pos, get_end_tk(), false);
|
||||
} else if (p.curr_is_token(get_begin_plus_tk())) {
|
||||
auto pos = p.pos();
|
||||
return parse_begin_end_core(p, pos, get_end_tk(), true);
|
||||
} else if (p.curr_is_token(get_by_tk())) {
|
||||
auto pos = p.pos();
|
||||
return parse_by(p, 0, nullptr, pos);
|
||||
} else if (p.curr_is_token(get_by_plus_tk())) {
|
||||
auto pos = p.pos();
|
||||
return parse_by_plus(p, 0, nullptr, pos);
|
||||
} else {
|
||||
throw parser_error("invalid expression, 'by', 'begin', 'proof', 'using' or 'from' expected", p.pos());
|
||||
}
|
||||
|
@ -595,11 +576,11 @@ static expr parse_obtain(parser & p, unsigned, expr const *, pos_info const & po
|
|||
//
|
||||
if (is_constant(from_term) || is_local(from_term)) {
|
||||
expr r = p.rec_save_pos(mk_obtain_expr(s, ps, from_term, goal_term), pos);
|
||||
return p.mk_by_plus(p.mk_app(get_exact_tac_fn(), r, pos), pos);
|
||||
return p.mk_by(p.mk_app(get_exact_tac_fn(), r, pos), pos);
|
||||
} else {
|
||||
expr H_type = p.save_pos(mk_expr_placeholder(), pos);
|
||||
expr body = p.rec_save_pos(mk_obtain_expr(s, ps, mk_var(0), goal_term), pos);
|
||||
body = p.mk_by_plus(p.mk_app(get_exact_tac_fn(), body, pos), pos);
|
||||
body = p.mk_by(p.mk_app(get_exact_tac_fn(), body, pos), pos);
|
||||
expr fn = p.rec_save_pos(mk_lambda(*H_obtain_from, H_type, body), pos);
|
||||
expr r = p.mk_app(fn, from_term, pos);
|
||||
return p.save_pos(mk_have_annotation(r), pos);
|
||||
|
@ -731,7 +712,6 @@ parse_table init_nud_table() {
|
|||
parse_table r;
|
||||
r = r.add({transition("_", mk_ext_action(parse_placeholder))}, x0);
|
||||
r = r.add({transition("by", mk_ext_action_core(parse_by))}, x0);
|
||||
r = r.add({transition("by+", mk_ext_action_core(parse_by_plus))}, x0);
|
||||
r = r.add({transition("have", mk_ext_action(parse_have))}, x0);
|
||||
r = r.add({transition("assert", mk_ext_action(parse_have))}, x0);
|
||||
r = r.add({transition("suppose", mk_ext_action(parse_suppose))}, x0);
|
||||
|
@ -756,7 +736,6 @@ parse_table init_nud_table() {
|
|||
r = r.add({transition("@@", mk_ext_action(parse_partial_explicit_expr))}, x0);
|
||||
r = r.add({transition("!", mk_ext_action(parse_consume_args_expr))}, x0);
|
||||
r = r.add({transition("begin", mk_ext_action_core(parse_begin_end))}, x0);
|
||||
r = r.add({transition("begin+", mk_ext_action_core(parse_begin_end_plus))}, x0);
|
||||
r = r.add({transition("proof", mk_ext_action(parse_proof_qed))}, x0);
|
||||
r = r.add({transition("using", mk_ext_action(parse_using))}, x0);
|
||||
r = r.add({transition("sorry", mk_ext_action(parse_sorry))}, x0);
|
||||
|
|
|
@ -82,12 +82,10 @@ static name const * g_using_tk = nullptr;
|
|||
static name const * g_then_tk = nullptr;
|
||||
static name const * g_else_tk = nullptr;
|
||||
static name const * g_by_tk = nullptr;
|
||||
static name const * g_by_plus_tk = nullptr;
|
||||
static name const * g_rewrite_tk = nullptr;
|
||||
static name const * g_proof_tk = nullptr;
|
||||
static name const * g_qed_tk = nullptr;
|
||||
static name const * g_begin_tk = nullptr;
|
||||
static name const * g_begin_plus_tk = nullptr;
|
||||
static name const * g_end_tk = nullptr;
|
||||
static name const * g_private_tk = nullptr;
|
||||
static name const * g_protected_tk = nullptr;
|
||||
|
@ -231,12 +229,10 @@ void initialize_tokens() {
|
|||
g_then_tk = new name{"then"};
|
||||
g_else_tk = new name{"else"};
|
||||
g_by_tk = new name{"by"};
|
||||
g_by_plus_tk = new name{"by+"};
|
||||
g_rewrite_tk = new name{"rewrite"};
|
||||
g_proof_tk = new name{"proof"};
|
||||
g_qed_tk = new name{"qed"};
|
||||
g_begin_tk = new name{"begin"};
|
||||
g_begin_plus_tk = new name{"begin+"};
|
||||
g_end_tk = new name{"end"};
|
||||
g_private_tk = new name{"private"};
|
||||
g_protected_tk = new name{"protected"};
|
||||
|
@ -381,12 +377,10 @@ void finalize_tokens() {
|
|||
delete g_then_tk;
|
||||
delete g_else_tk;
|
||||
delete g_by_tk;
|
||||
delete g_by_plus_tk;
|
||||
delete g_rewrite_tk;
|
||||
delete g_proof_tk;
|
||||
delete g_qed_tk;
|
||||
delete g_begin_tk;
|
||||
delete g_begin_plus_tk;
|
||||
delete g_end_tk;
|
||||
delete g_private_tk;
|
||||
delete g_protected_tk;
|
||||
|
@ -530,12 +524,10 @@ name const & get_using_tk() { return *g_using_tk; }
|
|||
name const & get_then_tk() { return *g_then_tk; }
|
||||
name const & get_else_tk() { return *g_else_tk; }
|
||||
name const & get_by_tk() { return *g_by_tk; }
|
||||
name const & get_by_plus_tk() { return *g_by_plus_tk; }
|
||||
name const & get_rewrite_tk() { return *g_rewrite_tk; }
|
||||
name const & get_proof_tk() { return *g_proof_tk; }
|
||||
name const & get_qed_tk() { return *g_qed_tk; }
|
||||
name const & get_begin_tk() { return *g_begin_tk; }
|
||||
name const & get_begin_plus_tk() { return *g_begin_plus_tk; }
|
||||
name const & get_end_tk() { return *g_end_tk; }
|
||||
name const & get_private_tk() { return *g_private_tk; }
|
||||
name const & get_protected_tk() { return *g_protected_tk; }
|
||||
|
|
|
@ -84,12 +84,10 @@ name const & get_using_tk();
|
|||
name const & get_then_tk();
|
||||
name const & get_else_tk();
|
||||
name const & get_by_tk();
|
||||
name const & get_by_plus_tk();
|
||||
name const & get_rewrite_tk();
|
||||
name const & get_proof_tk();
|
||||
name const & get_qed_tk();
|
||||
name const & get_begin_tk();
|
||||
name const & get_begin_plus_tk();
|
||||
name const & get_end_tk();
|
||||
name const & get_private_tk();
|
||||
name const & get_protected_tk();
|
||||
|
|
|
@ -77,12 +77,10 @@ using using
|
|||
then then
|
||||
else else
|
||||
by by
|
||||
by_plus by+
|
||||
rewrite rewrite
|
||||
proof proof
|
||||
qed qed
|
||||
begin begin
|
||||
begin_plus begin+
|
||||
end end
|
||||
private private
|
||||
protected protected
|
||||
|
|
|
@ -7,7 +7,7 @@ definition H : is_equiv f := sorry
|
|||
definition loop [instance] [h : is_equiv f] : is_equiv f :=
|
||||
h
|
||||
|
||||
notation `noinstances` t:max := by+ with_options [elaborator.ignore_instances true] (exact t)
|
||||
notation `noinstances` t:max := by with_options [elaborator.ignore_instances true] (exact t)
|
||||
|
||||
example (a : A) : let H' : is_equiv f := H in @(is_equiv.inv f) H' (f a) = a :=
|
||||
noinstances (left_inv f a)
|
||||
|
|
|
@ -11,15 +11,15 @@ show P, from H
|
|||
|
||||
example : P :=
|
||||
have H : P, from HP,
|
||||
by+ exact H
|
||||
by exact H
|
||||
|
||||
example : P :=
|
||||
have H : P, from HP,
|
||||
show P, by+ exact H
|
||||
show P, by exact H
|
||||
|
||||
example : P :=
|
||||
have H : P, from HP,
|
||||
show P, begin+ exact H end
|
||||
show P, begin exact H end
|
||||
|
||||
example : P :=
|
||||
have H : P, from HP,
|
||||
|
|
|
@ -5,14 +5,14 @@ constant P : nat → Prop
|
|||
|
||||
example (Hex : ∃ n, P n) : true :=
|
||||
obtain n Hn, from Hex,
|
||||
begin+
|
||||
begin
|
||||
note Hn2 := Hn,
|
||||
exact trivial
|
||||
end
|
||||
|
||||
example (Hex : ∃ n, P n) : true :=
|
||||
obtain n Hn, from Hex,
|
||||
begin+
|
||||
begin
|
||||
have H : n ≥ 0, from sorry,
|
||||
exact trivial
|
||||
end
|
||||
|
|
|
@ -1,13 +0,0 @@
|
|||
example (a b c : Prop) : a → b → c → a ∧ c ∧ b :=
|
||||
assume Ha Hb Hc,
|
||||
have aux : c ∧ b, from and.intro Hc Hb,
|
||||
begin+ -- the whole context is visible
|
||||
split,
|
||||
state,
|
||||
repeat assumption
|
||||
end
|
||||
|
||||
example (a b c : Prop) : a → b → c → a ∧ c ∧ b :=
|
||||
assume Ha Hb Hc,
|
||||
have aux : c ∧ b, from and.intro Hc Hb,
|
||||
by+ split; repeat assumption
|
Loading…
Reference in a new issue