feat(frontends/lean): add 'begin+' and 'by+' that enter tactic mode with the whole context visible

This commit is contained in:
Leonardo de Moura 2015-05-05 18:47:25 -07:00
parent 5d515a06f7
commit 7cd444882c
6 changed files with 45 additions and 10 deletions

View file

@ -9,7 +9,7 @@
(defconst lean-keywords
'("import" "prelude" "tactic_hint" "protected" "private" "opaque" "definition" "renaming"
"hiding" "exposing" "parameter" "parameters" "begin" "proof" "qed" "conjecture" "constant" "constants"
"hiding" "exposing" "parameter" "parameters" "begin" "begin+" "proof" "qed" "conjecture" "constant" "constants"
"hypothesis" "lemma" "corollary" "variable" "variables" "premise" "premises"
"print" "theorem" "example" "abbreviation"
"open" "as" "export" "axiom" "axioms" "inductive" "with" "structure" "record" "universe" "universes"
@ -98,7 +98,7 @@
(defconst lean-font-lock-defaults
`((;; Keywords
(,(rx word-start (or "calc" "have" "obtains" "show" "by" "in" "at" "let" "forall" "fun"
(,(rx word-start (or "calc" "have" "obtains" "show" "by" "by+" "in" "at" "let" "forall" "fun"
"exists" "if" "dif" "then" "else" "assume" "assert" "take" "obtain" "from") word-end)
. font-lock-keyword-face)
;; String

View file

@ -130,7 +130,13 @@ static expr parse_by(parser & p, unsigned, expr const *, pos_info const & pos) {
return p.mk_by(t, pos);
}
static expr parse_begin_end_core(parser & p, pos_info const & pos, name const & end_token, bool nested = false) {
static expr parse_by_plus(parser & p, unsigned, expr const *, pos_info const & pos) {
p.next();
expr t = p.parse_tactic();
return p.mk_by_plus(t, pos);
}
static expr parse_begin_end_core(parser & p, pos_info const & pos, name const & end_token, bool plus, bool nested = false) {
if (!p.has_tactic_decls())
throw parser_error("invalid 'begin-end' expression, tactic module has not been imported", pos);
p.next();
@ -159,10 +165,10 @@ static expr parse_begin_end_core(parser & p, pos_info const & pos, name const &
}
if (p.curr_is_token(get_begin_tk())) {
auto pos = p.pos();
tacs.push_back(parse_begin_end_core(p, pos, get_end_tk(), true));
tacs.push_back(parse_begin_end_core(p, pos, get_end_tk(), plus, true));
} else if (p.curr_is_token(get_lcurly_tk())) {
auto pos = p.pos();
tacs.push_back(parse_begin_end_core(p, pos, get_rcurly_tk(), true));
tacs.push_back(parse_begin_end_core(p, pos, get_rcurly_tk(), plus, true));
} else if (p.curr_is_token(end_token)) {
break;
} else if (p.curr_is_token(get_assert_tk())) {
@ -211,7 +217,7 @@ static expr parse_begin_end_core(parser & p, pos_info const & pos, name const &
add_tac(t, pos);
} else if (p.curr_is_token(get_begin_tk())) {
auto pos = p.pos();
tacs.push_back(parse_begin_end_core(p, pos, get_end_tk(), true));
tacs.push_back(parse_begin_end_core(p, pos, get_end_tk(), plus, true));
} else if (p.curr_is_token(get_by_tk())) {
// parse: 'by' tactic
auto pos = p.pos();
@ -265,12 +271,18 @@ static expr parse_begin_end_core(parser & p, pos_info const & pos, name const &
r = p.save_pos(mk_begin_end_annotation(r), end_pos);
if (nested)
return r;
else if (plus)
return p.mk_by_plus(r, end_pos);
else
return p.mk_by(r, end_pos);
}
static expr parse_begin_end(parser & p, unsigned, expr const *, pos_info const & pos) {
return parse_begin_end_core(p, pos, get_end_tk());
return parse_begin_end_core(p, pos, get_end_tk(), false);
}
static expr parse_begin_end_plus(parser & p, unsigned, expr const *, pos_info const & pos) {
return parse_begin_end_core(p, pos, get_end_tk(), true);
}
static expr parse_proof_qed_core(parser & p, pos_info const & pos) {
@ -328,7 +340,10 @@ static expr parse_proof(parser & p, expr const & prop) {
return parse_proof_qed_core(p, pos);
} else if (p.curr_is_token(get_begin_tk())) {
auto pos = p.pos();
return parse_begin_end_core(p, pos, get_end_tk());
return parse_begin_end_core(p, pos, get_end_tk(), false);
} else if (p.curr_is_token(get_begin_plus_tk())) {
auto pos = p.pos();
return parse_begin_end_core(p, pos, get_end_tk(), true);
} else if (p.curr_is_token(get_by_tk())) {
// parse: 'by' tactic
auto pos = p.pos();
@ -576,6 +591,7 @@ parse_table init_nud_table() {
parse_table r;
r = r.add({transition("_", mk_ext_action(parse_placeholder))}, x0);
r = r.add({transition("by", mk_ext_action_core(parse_by))}, x0);
r = r.add({transition("by+", mk_ext_action_core(parse_by_plus))}, x0);
r = r.add({transition("have", mk_ext_action(parse_have))}, x0);
r = r.add({transition("assert", mk_ext_action(parse_assert))}, x0);
r = r.add({transition("show", mk_ext_action(parse_show))}, x0);
@ -593,6 +609,7 @@ parse_table init_nud_table() {
r = r.add({transition("@", mk_ext_action(parse_explicit_expr))}, x0);
r = r.add({transition("!", mk_ext_action(parse_consume_args_expr))}, x0);
r = r.add({transition("begin", mk_ext_action_core(parse_begin_end))}, x0);
r = r.add({transition("begin+", mk_ext_action_core(parse_begin_end_plus))}, x0);
r = r.add({transition("proof", mk_ext_action(parse_proof_qed))}, x0);
r = r.add({transition("using", mk_ext_action(parse_using))}, x0);
r = r.add({transition("sorry", mk_ext_action(parse_sorry))}, x0);

View file

@ -89,7 +89,7 @@ static char const * g_decreasing_unicode = "↓";
void init_token_table(token_table & t) {
pair<char const *, unsigned> builtin[] =
{{"fun", 0}, {"Pi", 0}, {"let", 0}, {"in", 0}, {"at", 0}, {"have", 0}, {"assert", 0}, {"show", 0}, {"obtain", 0},
{"if", 0}, {"then", 0}, {"else", 0}, {"by", 0}, {"hiding", 0}, {"replacing", 0}, {"renaming", 0},
{"if", 0}, {"then", 0}, {"else", 0}, {"by", 0}, {"by+", 0}, {"hiding", 0}, {"replacing", 0}, {"renaming", 0},
{"from", 0}, {"(", g_max_prec}, {")", 0}, {"{", g_max_prec}, {"}", 0}, {"_", g_max_prec},
{"[", g_max_prec}, {"]", 0}, {"", g_max_prec}, {"", 0}, {".{", 0}, {"Type", g_max_prec},
{"{|", g_max_prec}, {"|}", 0}, {"", 0}, {"", g_max_prec}, {"", 0}, {"^", 0}, {"", 0}, {"", 0},
@ -97,7 +97,7 @@ void init_token_table(token_table & t) {
{".", 0}, {":", 0}, {"::", 0}, {"calc", 0}, {"rewrite", 0}, {"esimp", 0}, {"fold", 0}, {"unfold", 0},
{"generalize", 0}, {"as", 0},
{":=", 0}, {"--", 0}, {"#", 0},
{"(*", 0}, {"/-", 0}, {"begin", g_max_prec}, {"proof", g_max_prec}, {"qed", 0}, {"@", g_max_prec},
{"(*", 0}, {"/-", 0}, {"begin", g_max_prec}, {"begin+", g_max_prec}, {"proof", g_max_prec}, {"qed", 0}, {"@", g_max_prec},
{"sorry", g_max_prec}, {"+", g_plus_prec}, {g_cup, g_cup_prec}, {"->", g_arrow_prec},
{"?(", g_max_prec}, {"", g_max_prec}, {"", 0}, {"match", 0},
{"<d", g_decreasing_prec}, {"renaming", 0}, {"extends", 0}, {nullptr, 0}};

View file

@ -89,6 +89,7 @@ static name * g_rewrite = nullptr;
static name * g_proof = nullptr;
static name * g_qed = nullptr;
static name * g_begin = nullptr;
static name * g_beginp = nullptr;
static name * g_end = nullptr;
static name * g_private = nullptr;
static name * g_definition = nullptr;
@ -223,6 +224,7 @@ void initialize_tokens() {
g_proof = new name("proof");
g_qed = new name("qed");
g_begin = new name("begin");
g_beginp = new name("beginp");
g_end = new name("end");
g_private = new name("private");
g_definition = new name("definition");
@ -338,6 +340,7 @@ void finalize_tokens() {
delete g_proof;
delete g_qed;
delete g_begin;
delete g_beginp;
delete g_end;
delete g_raw;
delete g_true;
@ -494,6 +497,7 @@ name const & get_rewrite_tk() { return *g_rewrite; }
name const & get_proof_tk() { return *g_proof; }
name const & get_qed_tk() { return *g_qed; }
name const & get_begin_tk() { return *g_begin; }
name const & get_begin_plus_tk() { return *g_beginp; }
name const & get_end_tk() { return *g_end; }
name const & get_private_tk() { return *g_private; }
name const & get_definition_tk() { return *g_definition; }

View file

@ -90,6 +90,7 @@ name const & get_by_tk();
name const & get_rewrite_tk();
name const & get_proof_tk();
name const & get_begin_tk();
name const & get_begin_plus_tk();
name const & get_qed_tk();
name const & get_end_tk();
name const & get_private_tk();

View file

@ -0,0 +1,13 @@
example (a b c : Prop) : a → b → c → a ∧ c ∧ b :=
assume Ha Hb Hc,
have aux : c ∧ b, from and.intro Hc Hb,
begin+ -- the whole context is visible
split,
state,
repeat assumption
end
example (a b c : Prop) : a → b → c → a ∧ c ∧ b :=
assume Ha Hb Hc,
have aux : c ∧ b, from and.intro Hc Hb,
by+ split; repeat assumption