2013-11-21 01:02:41 +00:00
|
|
|
/*
|
|
|
|
Copyright (c) 2013 Microsoft Corporation. All rights reserved.
|
|
|
|
Released under Apache 2.0 license as described in the file LICENSE.
|
|
|
|
|
|
|
|
Author: Leonardo de Moura
|
|
|
|
*/
|
|
|
|
#include <utility>
|
|
|
|
#include <vector>
|
2013-11-22 00:44:31 +00:00
|
|
|
#include <algorithm>
|
2013-11-21 01:02:41 +00:00
|
|
|
#include "util/name_set.h"
|
|
|
|
#include "util/buffer.h"
|
|
|
|
#include "kernel/for_each_fn.h"
|
2013-12-03 20:40:52 +00:00
|
|
|
#include "kernel/replace_fn.h"
|
2013-11-21 01:02:41 +00:00
|
|
|
#include "kernel/abstract.h"
|
2013-11-27 03:15:49 +00:00
|
|
|
#include "library/kernel_bindings.h"
|
2013-11-21 01:02:41 +00:00
|
|
|
#include "library/type_inferer.h"
|
|
|
|
#include "library/tactic/goal.h"
|
|
|
|
|
|
|
|
namespace lean {
|
|
|
|
|
2013-11-25 09:06:11 +00:00
|
|
|
goal::goal(hypotheses const & hs, expr const & c):m_hypotheses(hs), m_conclusion(c) {}
|
2013-11-21 01:02:41 +00:00
|
|
|
|
2013-11-22 00:44:31 +00:00
|
|
|
format goal::pp(formatter const & fmt, options const & opts) const {
|
|
|
|
unsigned indent = get_pp_indent(opts);
|
|
|
|
bool unicode = get_pp_unicode(opts);
|
|
|
|
format turnstile = unicode ? format("\u22A2") /* ⊢ */ : format("|-");
|
2013-11-25 09:06:11 +00:00
|
|
|
buffer<hypothesis> tmp;
|
2013-11-22 00:44:31 +00:00
|
|
|
to_buffer(m_hypotheses, tmp);
|
|
|
|
bool first = true;
|
|
|
|
format r;
|
|
|
|
for (auto const & p : tmp) {
|
|
|
|
if (first) {
|
|
|
|
first = false;
|
|
|
|
} else {
|
|
|
|
r += compose(comma(), line());
|
|
|
|
}
|
|
|
|
r += format{format(p.first), space(), colon(), nest(indent, compose(line(), fmt(p.second, opts)))};
|
|
|
|
}
|
|
|
|
|
|
|
|
r = group(r);
|
|
|
|
r += format{line(), turnstile, space(), nest(indent, fmt(m_conclusion, opts))};
|
|
|
|
return group(r);
|
|
|
|
}
|
|
|
|
|
2013-11-25 00:29:04 +00:00
|
|
|
name goal::mk_unique_hypothesis_name(name const & suggestion) const {
|
|
|
|
name n = suggestion;
|
|
|
|
unsigned i = 0;
|
2013-11-25 18:39:40 +00:00
|
|
|
// TODO(Leo): investigate if this method is a performance bottleneck
|
2013-11-25 00:29:04 +00:00
|
|
|
while (true) {
|
|
|
|
bool ok = true;
|
|
|
|
for (auto const & p : m_hypotheses) {
|
2013-11-25 05:00:38 +00:00
|
|
|
if (is_prefix_of(n, p.first)) {
|
2013-11-25 00:29:04 +00:00
|
|
|
ok = false;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (ok) {
|
|
|
|
return n;
|
|
|
|
} else {
|
|
|
|
i++;
|
|
|
|
n = name(suggestion, i);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-11-21 01:02:41 +00:00
|
|
|
goal_proof_fn::goal_proof_fn(std::vector<expr> && consts):
|
|
|
|
m_constants(consts) {
|
|
|
|
}
|
|
|
|
|
2013-11-21 20:34:37 +00:00
|
|
|
expr goal_proof_fn::operator()(expr const & pr) const {
|
2013-11-21 01:02:41 +00:00
|
|
|
return abstract(pr, m_constants.size(), m_constants.data());
|
|
|
|
}
|
|
|
|
|
|
|
|
static name_set collect_used_names(context const & ctx, expr const & t) {
|
|
|
|
name_set r;
|
|
|
|
auto f = [&r](expr const & e, unsigned) { if (is_constant(e)) r.insert(const_name(e)); return true; };
|
|
|
|
for_each_fn<decltype(f)> visitor(f);
|
|
|
|
for (auto const & e : ctx) {
|
|
|
|
if (expr const & d = e.get_domain())
|
|
|
|
visitor(d);
|
|
|
|
if (expr const & b = e.get_body())
|
|
|
|
visitor(b);
|
|
|
|
}
|
|
|
|
visitor(t);
|
|
|
|
return r;
|
|
|
|
}
|
|
|
|
|
|
|
|
static name mk_unique_name(name_set & s, name const & suggestion) {
|
|
|
|
unsigned i = 1;
|
|
|
|
name n = suggestion;
|
|
|
|
while (true) {
|
|
|
|
if (s.find(n) == s.end()) {
|
|
|
|
s.insert(n);
|
|
|
|
return n;
|
|
|
|
} else {
|
|
|
|
n = name(suggestion, i);
|
|
|
|
i++;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
std::pair<goal, goal_proof_fn> to_goal(environment const & env, context const & ctx, expr const & t) {
|
|
|
|
type_inferer inferer(env);
|
2013-11-22 00:44:31 +00:00
|
|
|
if (!inferer.is_proposition(t, ctx))
|
2013-11-26 19:31:45 +00:00
|
|
|
throw exception("to_goal failed, type is not a proposition");
|
2013-11-21 01:02:41 +00:00
|
|
|
name_set used_names = collect_used_names(ctx, t);
|
|
|
|
buffer<context_entry> entries;
|
|
|
|
for (auto const & e : ctx)
|
|
|
|
entries.push_back(e);
|
2013-11-29 05:08:12 +00:00
|
|
|
std::reverse(entries.begin(), entries.end());
|
|
|
|
buffer<hypothesis> hypotheses; // normalized names and types of the entries processed so far
|
2013-11-21 01:02:41 +00:00
|
|
|
buffer<expr> bodies; // normalized bodies of the entries processed so far
|
|
|
|
std::vector<expr> consts; // cached consts[i] == mk_constant(names[i], hypotheses[i])
|
2013-11-21 16:53:37 +00:00
|
|
|
auto replace_vars = [&](expr const & e, unsigned offset) -> expr {
|
2013-11-21 01:02:41 +00:00
|
|
|
if (is_var(e)) {
|
|
|
|
unsigned vidx = var_idx(e);
|
|
|
|
if (vidx >= offset) {
|
|
|
|
unsigned nvidx = vidx - offset;
|
|
|
|
unsigned nfv = consts.size();
|
|
|
|
if (nvidx >= nfv)
|
|
|
|
throw exception("to_goal failed, unknown free variable");
|
|
|
|
unsigned lvl = nfv - nvidx - 1;
|
|
|
|
if (bodies[lvl])
|
|
|
|
return bodies[lvl];
|
|
|
|
else
|
|
|
|
return consts[lvl];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return e;
|
|
|
|
};
|
|
|
|
replace_fn<decltype(replace_vars)> replacer(replace_vars);
|
2013-11-29 05:08:12 +00:00
|
|
|
auto it = entries.begin();
|
|
|
|
auto end = entries.end();
|
|
|
|
for (; it != end; ++it) {
|
2013-11-21 01:02:41 +00:00
|
|
|
auto const & e = *it;
|
|
|
|
name n = mk_unique_name(used_names, e.get_name());
|
|
|
|
expr d = replacer(e.get_domain());
|
|
|
|
expr b = replacer(e.get_body());
|
fix(library/tactic/goal): to_goal way of handling context_entries of the form (name, domain, body) where domain is null, and body is a proof term
This commit fixes a problem exposed by t13.lean.
It has a theorem of the form:
Theorem T1 (A B : Bool) : A /\ B -> B /\ A :=
fun assumption : A /\ B,
let lemma1 := (show A by auto),
lemma2 := (show B by auto)
in (show B /\ A by auto)
When to_goal creates a goal for the metavariable associated with (show B /\ A by auto) it receives a context and proposition of the form
[ A : Bool, B : Bool, assumption : A /\ B, lemma1 := Conjunct1 assumption, lemma2 := Conjunct2 assumption ] |- B /\ A
The context_entries "lemma1 := Conjunct1 assumption" and "lemma2 := Conjunct2 assumption" do not have a domain (aka type).
Before this commit, to_goal would simply replace and references to "lemma1" and "lemma2" in "B /\ A" with their definitions.
Note that, "B /\ A" does not contain references to "lemma1" and "lemma2". Then, the following goal is created
A : Bool, B : Bool, assumption : A /\ B |- B /\ A
That is, the lemmas are not available when solving B /\ A.
Thus, the tactic auto produced the following (weird) proof for T1, where the lemmas are computed but not used.
Theorem T1 (A B : Bool) (assumption : A ∧ B) : B ∧ A :=
let lemma1 := Conjunct1 assumption,
lemma2 := Conjunct2 assumption
in Conj (Conjunct2 assumption) (Conjunct1 assumption)
This commit fixed that. It computes the types of "Conjunct1 assumption" and "Conjunct2 assumption", and creates the goal
A : Bool, B : Bool, assumption : A /\ B, lemma1 : A, lemma2 : B |- B /\ A
After this commit, the proof for theorem T1 is
Theorem T1 (A B : Bool) (assumption : A ∧ B) : B ∧ A :=
let lemma1 := Conjunct1 assumption,
lemma2 := Conjunct2 assumption
in Conj lemma2 lemma1
as expected.
Finally, this example suggests that the encoding
Theorem T1 (A B : Bool) : A /\ B -> B /\ A :=
fun assumption : A /\ B,
let lemma1 : A := (by auto),
lemma2 : B := (by auto)
in (show B /\ A by auto)
is more efficient than
Theorem T1 (A B : Bool) : A /\ B -> B /\ A :=
fun assumption : A /\ B,
let lemma1 := (show A by auto),
lemma2 := (show B by auto)
in (show B /\ A by auto)
Signed-off-by: Leonardo de Moura <leonardo@microsoft.com>
2013-12-07 00:14:15 +00:00
|
|
|
if (b && !d) {
|
|
|
|
d = inferer(b);
|
|
|
|
}
|
2013-11-21 01:02:41 +00:00
|
|
|
replacer.clear();
|
fix(library/tactic/goal): to_goal way of handling context_entries of the form (name, domain, body) where domain is null, and body is a proof term
This commit fixes a problem exposed by t13.lean.
It has a theorem of the form:
Theorem T1 (A B : Bool) : A /\ B -> B /\ A :=
fun assumption : A /\ B,
let lemma1 := (show A by auto),
lemma2 := (show B by auto)
in (show B /\ A by auto)
When to_goal creates a goal for the metavariable associated with (show B /\ A by auto) it receives a context and proposition of the form
[ A : Bool, B : Bool, assumption : A /\ B, lemma1 := Conjunct1 assumption, lemma2 := Conjunct2 assumption ] |- B /\ A
The context_entries "lemma1 := Conjunct1 assumption" and "lemma2 := Conjunct2 assumption" do not have a domain (aka type).
Before this commit, to_goal would simply replace and references to "lemma1" and "lemma2" in "B /\ A" with their definitions.
Note that, "B /\ A" does not contain references to "lemma1" and "lemma2". Then, the following goal is created
A : Bool, B : Bool, assumption : A /\ B |- B /\ A
That is, the lemmas are not available when solving B /\ A.
Thus, the tactic auto produced the following (weird) proof for T1, where the lemmas are computed but not used.
Theorem T1 (A B : Bool) (assumption : A ∧ B) : B ∧ A :=
let lemma1 := Conjunct1 assumption,
lemma2 := Conjunct2 assumption
in Conj (Conjunct2 assumption) (Conjunct1 assumption)
This commit fixed that. It computes the types of "Conjunct1 assumption" and "Conjunct2 assumption", and creates the goal
A : Bool, B : Bool, assumption : A /\ B, lemma1 : A, lemma2 : B |- B /\ A
After this commit, the proof for theorem T1 is
Theorem T1 (A B : Bool) (assumption : A ∧ B) : B ∧ A :=
let lemma1 := Conjunct1 assumption,
lemma2 := Conjunct2 assumption
in Conj lemma2 lemma1
as expected.
Finally, this example suggests that the encoding
Theorem T1 (A B : Bool) : A /\ B -> B /\ A :=
fun assumption : A /\ B,
let lemma1 : A := (by auto),
lemma2 : B := (by auto)
in (show B /\ A by auto)
is more efficient than
Theorem T1 (A B : Bool) : A /\ B -> B /\ A :=
fun assumption : A /\ B,
let lemma1 := (show A by auto),
lemma2 := (show B by auto)
in (show B /\ A by auto)
Signed-off-by: Leonardo de Moura <leonardo@microsoft.com>
2013-12-07 00:14:15 +00:00
|
|
|
if (b && !inferer.is_proposition(d)) {
|
2013-11-21 01:02:41 +00:00
|
|
|
bodies.push_back(b);
|
|
|
|
consts.push_back(expr());
|
|
|
|
} else {
|
2013-12-07 00:03:06 +00:00
|
|
|
lean_assert(d);
|
2013-11-21 01:02:41 +00:00
|
|
|
hypotheses.emplace_back(n, d);
|
|
|
|
bodies.push_back(expr());
|
|
|
|
consts.push_back(mk_constant(n, d));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
expr conclusion = replacer(t);
|
2013-11-29 05:08:12 +00:00
|
|
|
return mk_pair(goal(to_list(hypotheses.begin(), hypotheses.end()), conclusion),
|
2013-11-21 01:02:41 +00:00
|
|
|
goal_proof_fn(std::move(consts)));
|
|
|
|
}
|
2013-11-27 03:15:49 +00:00
|
|
|
|
|
|
|
DECL_UDATA(hypotheses)
|
|
|
|
|
|
|
|
static int mk_hypotheses(lua_State * L) {
|
|
|
|
int nargs = lua_gettop(L);
|
|
|
|
if (nargs == 0) {
|
|
|
|
return push_hypotheses(L, hypotheses());
|
|
|
|
} else if (nargs == 2) {
|
2013-12-03 20:53:49 +00:00
|
|
|
return push_hypotheses(L, hypotheses(mk_pair(to_name_ext(L, 1), to_nonnull_expr(L, 2)), hypotheses()));
|
2013-11-27 03:15:49 +00:00
|
|
|
} else if (nargs == 3) {
|
2013-12-03 20:53:49 +00:00
|
|
|
return push_hypotheses(L, hypotheses(mk_pair(to_name_ext(L, 1), to_nonnull_expr(L, 2)), to_hypotheses(L, 3)));
|
2013-11-27 03:15:49 +00:00
|
|
|
} else {
|
|
|
|
throw exception("hypotheses functions expects 0 (empty list), 2 (name & expr for singleton hypotheses list), or 3 (name & expr & hypotheses list) arguments");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static int hypotheses_is_nil(lua_State * L) {
|
|
|
|
lua_pushboolean(L, !to_hypotheses(L, 1));
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int hypotheses_head(lua_State * L) {
|
|
|
|
hypotheses const & hs = to_hypotheses(L, 1);
|
|
|
|
if (!hs)
|
|
|
|
throw exception("head method expects a non-empty hypotheses list");
|
|
|
|
push_name(L, head(hs).first);
|
|
|
|
push_expr(L, head(hs).second);
|
|
|
|
return 2;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int hypotheses_tail(lua_State * L) {
|
|
|
|
hypotheses const & hs = to_hypotheses(L, 1);
|
|
|
|
if (!hs)
|
|
|
|
throw exception("tail method expects a non-empty hypotheses list");
|
|
|
|
push_hypotheses(L, tail(hs));
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int hypotheses_next(lua_State * L) {
|
|
|
|
hypotheses & hs = to_hypotheses(L, lua_upvalueindex(1));
|
|
|
|
if (hs) {
|
|
|
|
push_hypotheses(L, tail(hs));
|
|
|
|
lua_replace(L, lua_upvalueindex(1));
|
|
|
|
push_name(L, head(hs).first);
|
|
|
|
push_expr(L, head(hs).second);
|
|
|
|
return 2;
|
|
|
|
} else {
|
|
|
|
lua_pushnil(L);
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static int hypotheses_items(lua_State * L) {
|
|
|
|
hypotheses & hs = to_hypotheses(L, 1);
|
|
|
|
push_hypotheses(L, hs); // upvalue(1): hypotheses
|
|
|
|
lua_pushcclosure(L, &safe_function<hypotheses_next>, 1); // create closure with 1 upvalue
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int hypotheses_len(lua_State * L) {
|
|
|
|
lua_pushinteger(L, length(to_hypotheses(L, 1)));
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
static const struct luaL_Reg hypotheses_m[] = {
|
|
|
|
{"__gc", hypotheses_gc}, // never throws
|
|
|
|
{"__len", safe_function<hypotheses_len>},
|
|
|
|
{"size", safe_function<hypotheses_len>},
|
|
|
|
{"pairs", safe_function<hypotheses_items>},
|
|
|
|
{"is_nil", safe_function<hypotheses_is_nil>},
|
|
|
|
{"empty", safe_function<hypotheses_is_nil>},
|
|
|
|
{"head", safe_function<hypotheses_head>},
|
|
|
|
{"tail", safe_function<hypotheses_tail>},
|
|
|
|
{0, 0}
|
|
|
|
};
|
|
|
|
|
|
|
|
DECL_UDATA(goal)
|
|
|
|
|
|
|
|
static int mk_goal(lua_State * L) {
|
2013-12-03 20:53:49 +00:00
|
|
|
return push_goal(L, goal(to_hypotheses(L, 1), to_nonnull_expr(L, 2)));
|
2013-11-27 03:15:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
static int goal_is_null(lua_State * L) {
|
|
|
|
lua_pushboolean(L, !to_goal(L, 1));
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int goal_hypotheses(lua_State * L) {
|
|
|
|
return push_hypotheses(L, to_goal(L, 1).get_hypotheses());
|
|
|
|
}
|
|
|
|
|
|
|
|
static int goal_conclusion(lua_State * L) {
|
|
|
|
return push_expr(L, to_goal(L, 1).get_conclusion());
|
|
|
|
}
|
|
|
|
|
|
|
|
static int goal_unique_name(lua_State * L) {
|
|
|
|
return push_name(L, to_goal(L, 1).mk_unique_hypothesis_name(to_name_ext(L, 2)));
|
|
|
|
}
|
|
|
|
|
|
|
|
static int goal_tostring(lua_State * L) {
|
|
|
|
std::ostringstream out;
|
|
|
|
goal & g = to_goal(L, 1);
|
|
|
|
if (g) {
|
|
|
|
formatter fmt = get_global_formatter(L);
|
|
|
|
options opts = get_global_options(L);
|
|
|
|
out << mk_pair(g.pp(fmt, opts), opts);
|
|
|
|
} else {
|
|
|
|
out << "<null-goal>";
|
|
|
|
}
|
|
|
|
lua_pushstring(L, out.str().c_str());
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int goal_pp(lua_State * L) {
|
|
|
|
int nargs = lua_gettop(L);
|
|
|
|
goal & g = to_goal(L, 1);
|
|
|
|
if (!g) {
|
|
|
|
return push_format(L, format());
|
|
|
|
} else if (nargs == 1) {
|
|
|
|
return push_format(L, g.pp(get_global_formatter(L), get_global_options(L)));
|
|
|
|
} else if (nargs == 2) {
|
|
|
|
if (is_formatter(L, 2))
|
|
|
|
return push_format(L, g.pp(to_formatter(L, 2), get_global_options(L)));
|
|
|
|
else
|
|
|
|
return push_format(L, g.pp(get_global_formatter(L), to_options(L, 2)));
|
|
|
|
} else {
|
|
|
|
return push_format(L, g.pp(to_formatter(L, 2), to_options(L, 3)));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static const struct luaL_Reg goal_m[] = {
|
|
|
|
{"__gc", goal_gc}, // never throws
|
|
|
|
{"__tostring", safe_function<goal_tostring>},
|
|
|
|
{"is_null", safe_function<goal_is_null>},
|
|
|
|
{"hypotheses", safe_function<goal_hypotheses>},
|
|
|
|
{"hyps", safe_function<goal_hypotheses>},
|
|
|
|
{"conclusion", safe_function<goal_conclusion>},
|
|
|
|
{"unique_name", safe_function<goal_unique_name>},
|
|
|
|
{"pp", safe_function<goal_pp>},
|
|
|
|
{0, 0}
|
|
|
|
};
|
|
|
|
|
2013-11-28 16:03:05 +00:00
|
|
|
static void hypotheses_migrate(lua_State * src, int i, lua_State * tgt) {
|
|
|
|
push_hypotheses(tgt, to_hypotheses(src, i));
|
|
|
|
}
|
|
|
|
|
|
|
|
static void goal_migrate(lua_State * src, int i, lua_State * tgt) {
|
|
|
|
push_goal(tgt, to_goal(src, i));
|
|
|
|
}
|
|
|
|
|
2013-11-27 03:15:49 +00:00
|
|
|
void open_goal(lua_State * L) {
|
|
|
|
luaL_newmetatable(L, hypotheses_mt);
|
2013-11-28 16:03:05 +00:00
|
|
|
set_migrate_fn_field(L, -1, hypotheses_migrate);
|
2013-11-27 03:15:49 +00:00
|
|
|
lua_pushvalue(L, -1);
|
|
|
|
lua_setfield(L, -2, "__index");
|
|
|
|
setfuncs(L, hypotheses_m, 0);
|
|
|
|
|
|
|
|
SET_GLOBAL_FUN(hypotheses_pred, "is_hypotheses");
|
|
|
|
SET_GLOBAL_FUN(mk_hypotheses, "hypotheses");
|
|
|
|
|
|
|
|
luaL_newmetatable(L, goal_mt);
|
2013-11-28 16:03:05 +00:00
|
|
|
set_migrate_fn_field(L, -1, goal_migrate);
|
2013-11-27 03:15:49 +00:00
|
|
|
lua_pushvalue(L, -1);
|
|
|
|
lua_setfield(L, -2, "__index");
|
|
|
|
setfuncs(L, goal_m, 0);
|
|
|
|
|
|
|
|
SET_GLOBAL_FUN(goal_pred, "is_goal");
|
|
|
|
SET_GLOBAL_FUN(mk_goal, "goal");
|
|
|
|
}
|
2013-11-21 01:02:41 +00:00
|
|
|
}
|