feat(frontends/lean/parse_rewrite_tactic): improve rewrite tactic parser
This commit is contained in:
parent
787fed95aa
commit
3197e6d403
3 changed files with 25 additions and 20 deletions
|
@ -157,7 +157,7 @@ section group
|
||||||
by rewrite ⟨mul.assoc, mul.left_inv, mul_one⟩
|
by rewrite ⟨mul.assoc, mul.left_inv, mul_one⟩
|
||||||
|
|
||||||
theorem inv_eq_of_mul_eq_one {a b : A} (H : a * b = 1) : a⁻¹ = b :=
|
theorem inv_eq_of_mul_eq_one {a b : A} (H : a * b = 1) : a⁻¹ = b :=
|
||||||
by rewrite ⟨-(mul_one a⁻¹), -H, inv_mul_cancel_left⟩
|
by rewrite ⟨-mul_one a⁻¹, -H, inv_mul_cancel_left⟩
|
||||||
|
|
||||||
theorem inv_one : 1⁻¹ = 1 := inv_eq_of_mul_eq_one (one_mul 1)
|
theorem inv_one : 1⁻¹ = 1 := inv_eq_of_mul_eq_one (one_mul 1)
|
||||||
|
|
||||||
|
@ -240,10 +240,10 @@ section group
|
||||||
iff.intro eq_mul_inv_of_mul_eq mul_eq_of_eq_mul_inv
|
iff.intro eq_mul_inv_of_mul_eq mul_eq_of_eq_mul_inv
|
||||||
|
|
||||||
theorem mul_left_cancel {a b c : A} (H : a * b = a * c) : b = c :=
|
theorem mul_left_cancel {a b c : A} (H : a * b = a * c) : b = c :=
|
||||||
by rewrite ⟨-(inv_mul_cancel_left a b), H, inv_mul_cancel_left⟩
|
by rewrite ⟨-inv_mul_cancel_left a b, H, inv_mul_cancel_left⟩
|
||||||
|
|
||||||
theorem mul_right_cancel {a b c : A} (H : a * b = c * b) : a = c :=
|
theorem mul_right_cancel {a b c : A} (H : a * b = c * b) : a = c :=
|
||||||
by rewrite ⟨-(mul_inv_cancel_right a b), H, mul_inv_cancel_right⟩
|
by rewrite ⟨-mul_inv_cancel_right a b, H, mul_inv_cancel_right⟩
|
||||||
|
|
||||||
definition group.to_left_cancel_semigroup [instance] [coercion] [reducible] : left_cancel_semigroup A :=
|
definition group.to_left_cancel_semigroup [instance] [coercion] [reducible] : left_cancel_semigroup A :=
|
||||||
⦃ left_cancel_semigroup, s,
|
⦃ left_cancel_semigroup, s,
|
||||||
|
|
|
@ -411,8 +411,8 @@ section
|
||||||
|
|
||||||
theorem abs_neg (a : A) : |-a| = |a| :=
|
theorem abs_neg (a : A) : |-a| = |a| :=
|
||||||
or.elim (le.total 0 a)
|
or.elim (le.total 0 a)
|
||||||
(assume H1 : 0 ≤ a, by rewrite [(abs_of_nonpos (neg_nonpos_of_nonneg H1)), neg_neg, (abs_of_nonneg H1)])
|
(assume H1 : 0 ≤ a, by rewrite [abs_of_nonpos (neg_nonpos_of_nonneg H1), neg_neg, abs_of_nonneg H1])
|
||||||
(assume H1 : a ≤ 0, by rewrite [(abs_of_nonneg (neg_nonneg_of_nonpos H1)), (abs_of_nonpos H1)])
|
(assume H1 : a ≤ 0, by rewrite [abs_of_nonneg (neg_nonneg_of_nonpos H1), abs_of_nonpos H1])
|
||||||
|
|
||||||
theorem abs_nonneg (a : A) : | a | ≥ 0 :=
|
theorem abs_nonneg (a : A) : | a | ≥ 0 :=
|
||||||
or.elim (le.total 0 a)
|
or.elim (le.total 0 a)
|
||||||
|
|
|
@ -21,7 +21,8 @@ static optional<expr> parse_pattern(parser & p) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static expr parse_rule(parser & p) {
|
static expr parse_rule(parser & p, bool use_paren) {
|
||||||
|
if (use_paren) {
|
||||||
if (p.curr_is_token(get_lparen_tk())) {
|
if (p.curr_is_token(get_lparen_tk())) {
|
||||||
p.next();
|
p.next();
|
||||||
expr r = p.parse_expr();
|
expr r = p.parse_expr();
|
||||||
|
@ -30,6 +31,9 @@ static expr parse_rule(parser & p) {
|
||||||
} else {
|
} else {
|
||||||
return p.parse_id();
|
return p.parse_id();
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
return p.parse_expr();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static expr parse_rewrite_unfold_core(parser & p) {
|
static expr parse_rewrite_unfold_core(parser & p) {
|
||||||
|
@ -56,7 +60,8 @@ static expr parse_rewrite_unfold(parser & p) {
|
||||||
return parse_rewrite_unfold_core(p);
|
return parse_rewrite_unfold_core(p);
|
||||||
}
|
}
|
||||||
|
|
||||||
expr parse_rewrite_element(parser & p) {
|
// If use_paren is true, then lemmas must be identifiers or be wrapped with parenthesis
|
||||||
|
static expr parse_rewrite_element(parser & p, bool use_paren) {
|
||||||
if (p.curr_is_token(get_up_tk()) || p.curr_is_token(get_caret_tk()))
|
if (p.curr_is_token(get_up_tk()) || p.curr_is_token(get_caret_tk()))
|
||||||
return parse_rewrite_unfold(p);
|
return parse_rewrite_unfold(p);
|
||||||
if (p.curr_is_token(get_down_tk())) {
|
if (p.curr_is_token(get_down_tk())) {
|
||||||
|
@ -75,25 +80,25 @@ expr parse_rewrite_element(parser & p) {
|
||||||
if (p.curr_is_token(get_greater_tk())) {
|
if (p.curr_is_token(get_greater_tk())) {
|
||||||
p.next();
|
p.next();
|
||||||
optional<expr> pat = parse_pattern(p);
|
optional<expr> pat = parse_pattern(p);
|
||||||
expr H = parse_rule(p);
|
expr H = parse_rule(p, use_paren);
|
||||||
location loc = parse_tactic_location(p);
|
location loc = parse_tactic_location(p);
|
||||||
return mk_rewrite_at_most_n(pat, H, symm, n, loc);
|
return mk_rewrite_at_most_n(pat, H, symm, n, loc);
|
||||||
} else {
|
} else {
|
||||||
optional<expr> pat = parse_pattern(p);
|
optional<expr> pat = parse_pattern(p);
|
||||||
expr H = parse_rule(p);
|
expr H = parse_rule(p, use_paren);
|
||||||
location loc = parse_tactic_location(p);
|
location loc = parse_tactic_location(p);
|
||||||
return mk_rewrite_exactly_n(pat, H, symm, n, loc);
|
return mk_rewrite_exactly_n(pat, H, symm, n, loc);
|
||||||
}
|
}
|
||||||
} else if (p.curr_is_token(get_star_tk())) {
|
} else if (p.curr_is_token(get_star_tk())) {
|
||||||
p.next();
|
p.next();
|
||||||
optional<expr> pat = parse_pattern(p);
|
optional<expr> pat = parse_pattern(p);
|
||||||
expr H = parse_rule(p);
|
expr H = parse_rule(p, use_paren);
|
||||||
location loc = parse_tactic_location(p);
|
location loc = parse_tactic_location(p);
|
||||||
return mk_rewrite_zero_or_more(pat, H, symm, loc);
|
return mk_rewrite_zero_or_more(pat, H, symm, loc);
|
||||||
} else if (p.curr_is_token(get_plus_tk())) {
|
} else if (p.curr_is_token(get_plus_tk())) {
|
||||||
p.next();
|
p.next();
|
||||||
optional<expr> pat = parse_pattern(p);
|
optional<expr> pat = parse_pattern(p);
|
||||||
expr H = parse_rule(p);
|
expr H = parse_rule(p, use_paren);
|
||||||
location loc = parse_tactic_location(p);
|
location loc = parse_tactic_location(p);
|
||||||
return mk_rewrite_one_or_more(pat, H, symm, loc);
|
return mk_rewrite_one_or_more(pat, H, symm, loc);
|
||||||
} else if (p.curr_is_token(get_triangle_tk()) || p.curr_is_token(get_greater_tk())) {
|
} else if (p.curr_is_token(get_triangle_tk()) || p.curr_is_token(get_greater_tk())) {
|
||||||
|
@ -109,7 +114,7 @@ expr parse_rewrite_element(parser & p) {
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
optional<expr> pat = parse_pattern(p);
|
optional<expr> pat = parse_pattern(p);
|
||||||
expr H = parse_rule(p);
|
expr H = parse_rule(p, use_paren);
|
||||||
location loc = parse_tactic_location(p);
|
location loc = parse_tactic_location(p);
|
||||||
return mk_rewrite_once(pat, H, symm, loc);
|
return mk_rewrite_once(pat, H, symm, loc);
|
||||||
}
|
}
|
||||||
|
@ -122,7 +127,7 @@ expr parse_rewrite_tactic(parser & p) {
|
||||||
p.next();
|
p.next();
|
||||||
while (true) {
|
while (true) {
|
||||||
auto pos = p.pos();
|
auto pos = p.pos();
|
||||||
elems.push_back(p.save_pos(parse_rewrite_element(p), pos));
|
elems.push_back(p.save_pos(parse_rewrite_element(p, false), pos));
|
||||||
if (!p.curr_is_token(get_comma_tk()))
|
if (!p.curr_is_token(get_comma_tk()))
|
||||||
break;
|
break;
|
||||||
p.next();
|
p.next();
|
||||||
|
@ -133,7 +138,7 @@ expr parse_rewrite_tactic(parser & p) {
|
||||||
p.check_token_next(get_rangle_tk(), "invalid rewrite tactic, '⟩' expected");
|
p.check_token_next(get_rangle_tk(), "invalid rewrite tactic, '⟩' expected");
|
||||||
} else {
|
} else {
|
||||||
auto pos = p.pos();
|
auto pos = p.pos();
|
||||||
elems.push_back(p.save_pos(parse_rewrite_element(p), pos));
|
elems.push_back(p.save_pos(parse_rewrite_element(p, true), pos));
|
||||||
}
|
}
|
||||||
return mk_rewrite_tactic_expr(elems);
|
return mk_rewrite_tactic_expr(elems);
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue