chore(frontends/lean): use consistent name conventions, rename token_set to token_table

Signed-off-by: Leonardo de Moura <leonardo@microsoft.com>
This commit is contained in:
Leonardo de Moura 2014-06-10 09:11:45 -07:00
parent 00e0cc15ba
commit 546f9dc00b
9 changed files with 80 additions and 80 deletions

View file

@ -1,4 +1,4 @@
add_library(lean_frontend register_module.cpp token_set.cpp add_library(lean_frontend register_module.cpp token_table.cpp
scanner.cpp parse_table.cpp) scanner.cpp parse_table.cpp)
target_link_libraries(lean_frontend ${LEAN_LIBS}) target_link_libraries(lean_frontend ${LEAN_LIBS})

View file

@ -9,7 +9,7 @@ Author: Leonardo de Moura
#include "util/buffer.h" #include "util/buffer.h"
#include "util/lua.h" #include "util/lua.h"
#include "kernel/expr.h" #include "kernel/expr.h"
#include "frontends/lean/token_set.h" #include "frontends/lean/token_table.h"
namespace lean { namespace lean {
class parser_context; class parser_context;

View file

@ -7,12 +7,12 @@ Author: Leonardo de Moura
#include <sstream> #include <sstream>
#include "util/lua.h" #include "util/lua.h"
#include "util/script_state.h" #include "util/script_state.h"
#include "frontends/lean/token_set.h" #include "frontends/lean/token_table.h"
#include "frontends/lean/parse_table.h" #include "frontends/lean/parse_table.h"
namespace lean { namespace lean {
void open_frontend_lean(lua_State * L) { void open_frontend_lean(lua_State * L) {
open_token_set(L); open_token_table(L);
open_parse_table(L); open_parse_table(L);
} }
void register_frontend_lean_module() { void register_frontend_lean_module() {

View file

@ -211,7 +211,7 @@ auto scanner::read_key_cmd_id() -> token_kind {
static char const * error_msg = "unexpected token"; static char const * error_msg = "unexpected token";
char c = curr(); char c = curr();
unsigned num_cs = 1; // number of characters read unsigned num_cs = 1; // number of characters read
token_set const * it = find(m_tokens, c); token_table const * it = find(m_tokens, c);
token_info const * info = nullptr; token_info const * info = nullptr;
unsigned key_size = 0; unsigned key_size = 0;
if (it) { if (it) {
@ -318,7 +318,7 @@ auto scanner::scan() -> token_kind {
} }
} }
scanner::scanner(token_set const & tks, std::istream & strm, char const * strm_name): scanner::scanner(token_table const & tks, std::istream & strm, char const * strm_name):
m_tokens(tks), m_stream(strm), m_spos(0), m_sline(1), m_curr(0), m_pos(0), m_line(1), m_tokens(tks), m_stream(strm), m_spos(0), m_sline(1), m_curr(0), m_pos(0), m_line(1),
m_token_info(nullptr) { m_token_info(nullptr) {
m_stream_name = strm_name ? strm_name : "[unknown]"; m_stream_name = strm_name ? strm_name : "[unknown]";

View file

@ -8,7 +8,7 @@ Author: Leonardo de Moura
#include <iostream> #include <iostream>
#include "util/name.h" #include "util/name.h"
#include "util/numerics/mpq.h" #include "util/numerics/mpq.h"
#include "frontends/lean/token_set.h" #include "frontends/lean/token_table.h"
namespace lean { namespace lean {
/** /**
@ -22,7 +22,7 @@ class scanner {
public: public:
enum class token_kind {Keyword, CommandKeyword, ScriptBlock, Identifier, Numeral, Decimal, String, Eof}; enum class token_kind {Keyword, CommandKeyword, ScriptBlock, Identifier, Numeral, Decimal, String, Eof};
protected: protected:
token_set m_tokens; token_table m_tokens;
std::istream & m_stream; std::istream & m_stream;
std::string m_stream_name; std::string m_stream_name;
@ -60,7 +60,7 @@ protected:
token_kind read_key_cmd_id(); token_kind read_key_cmd_id();
public: public:
scanner(token_set const & tks, std::istream & strm, char const * strm_name = nullptr); scanner(token_table const & tks, std::istream & strm, char const * strm_name = nullptr);
int get_line() const { return m_line; } int get_line() const { return m_line; }
int get_pos() const { return m_pos; } int get_pos() const { return m_pos; }

View file

@ -5,28 +5,28 @@ Released under Apache 2.0 license as described in the file LICENSE.
Author: Leonardo de Moura Author: Leonardo de Moura
*/ */
#include <utility> #include <utility>
#include "frontends/lean/token_set.h" #include "frontends/lean/token_table.h"
namespace lean { namespace lean {
token_set add_command_token(token_set const & s, char const * token) { token_table add_command_token(token_table const & s, char const * token) {
return insert(s, token, token_info(token)); return insert(s, token, token_info(token));
} }
token_set add_command_token(token_set const & s, char const * token, char const * val) { token_table add_command_token(token_table const & s, char const * token, char const * val) {
return insert(s, token, token_info(val)); return insert(s, token, token_info(val));
} }
token_set add_token(token_set const & s, char const * token, unsigned prec) { token_table add_token(token_table const & s, char const * token, unsigned prec) {
return insert(s, token, token_info(token, prec)); return insert(s, token, token_info(token, prec));
} }
token_set add_token(token_set const & s, char const * token, char const * val, unsigned prec) { token_table add_token(token_table const & s, char const * token, char const * val, unsigned prec) {
return insert(s, token, token_info(val, prec)); return insert(s, token, token_info(val, prec));
} }
token_set const * find(token_set const & s, char c) { token_table const * find(token_table const & s, char c) {
return s.find(c); return s.find(c);
} }
token_info const * value_of(token_set const & s) { token_info const * value_of(token_table const & s) {
return s.value(); return s.value();
} }
void for_each(token_set const & s, std::function<void(char const *, token_info const &)> const & fn) { void for_each(token_table const & s, std::function<void(char const *, token_info const &)> const & fn) {
s.for_each([&](unsigned num, char const * keys, token_info const & info) { s.for_each([&](unsigned num, char const * keys, token_info const & info) {
buffer<char> str; buffer<char> str;
str.append(num, keys); str.append(num, keys);
@ -41,10 +41,10 @@ static char const * g_forall_unicode = "\u2200";
static char const * g_arrow_unicode = "\u2192"; static char const * g_arrow_unicode = "\u2192";
// Auxiliary class for creating the initial token set // Auxiliary class for creating the initial token set
class init_token_set_fn { class init_token_table_fn {
public: public:
token_set m_token_set; token_table m_token_table;
init_token_set_fn() { init_token_table_fn() {
char const * builtin[] = {"fun", "Pi", "let", "in", "have", "show", "by", "from", "(", ")", "{", "}", char const * builtin[] = {"fun", "Pi", "let", "in", "have", "show", "by", "from", "(", ")", "{", "}",
".{", "Type", "...", ",", ".", ":", "calc", ":=", "--", "(*", "(--", "->", ".{", "Type", "...", ",", ".", ":", "calc", ":=", "--", "(*", "(--", "->",
"proof", "qed", "private", nullptr}; "proof", "qed", "private", nullptr};
@ -64,52 +64,52 @@ public:
auto it = builtin; auto it = builtin;
while (*it) { while (*it) {
m_token_set = add_token(m_token_set, *it); m_token_table = add_token(m_token_table, *it);
it++; it++;
} }
it = commands; it = commands;
while (*it) { while (*it) {
m_token_set = add_command_token(m_token_set, *it); m_token_table = add_command_token(m_token_table, *it);
++it; ++it;
} }
auto it2 = aliases; auto it2 = aliases;
while (it2->first) { while (it2->first) {
m_token_set = add_token(m_token_set, it2->first, it2->second); m_token_table = add_token(m_token_table, it2->first, it2->second);
it2++; it2++;
} }
it2 = cmd_aliases; it2 = cmd_aliases;
while (it2->first) { while (it2->first) {
m_token_set = add_command_token(m_token_set, it2->first, it2->second); m_token_table = add_command_token(m_token_table, it2->first, it2->second);
++it2; ++it2;
} }
} }
}; };
static init_token_set_fn g_init; static init_token_table_fn g_init;
token_set mk_token_set() { return token_set(); } token_table mk_token_table() { return token_table(); }
token_set mk_default_token_set() { return g_init.m_token_set; } token_table mk_default_token_table() { return g_init.m_token_table; }
DECL_UDATA(token_set) DECL_UDATA(token_table)
static int mk_token_set(lua_State * L) { return push_token_set(L, mk_token_set()); } static int mk_token_table(lua_State * L) { return push_token_table(L, mk_token_table()); }
static int mk_default_token_set(lua_State * L) { return push_token_set(L, mk_default_token_set()); } static int mk_default_token_table(lua_State * L) { return push_token_table(L, mk_default_token_table()); }
static int add_command_token(lua_State * L) { static int add_command_token(lua_State * L) {
int nargs = lua_gettop(L); int nargs = lua_gettop(L);
if (nargs == 2) if (nargs == 2)
return push_token_set(L, add_command_token(to_token_set(L, 1), lua_tostring(L, 2))); return push_token_table(L, add_command_token(to_token_table(L, 1), lua_tostring(L, 2)));
else else
return push_token_set(L, add_command_token(to_token_set(L, 1), lua_tostring(L, 2), lua_tostring(L, 3))); return push_token_table(L, add_command_token(to_token_table(L, 1), lua_tostring(L, 2), lua_tostring(L, 3)));
} }
static int add_token(lua_State * L) { static int add_token(lua_State * L) {
int nargs = lua_gettop(L); int nargs = lua_gettop(L);
if (nargs == 3) if (nargs == 3)
return push_token_set(L, add_token(to_token_set(L, 1), lua_tostring(L, 2), lua_tonumber(L, 3))); return push_token_table(L, add_token(to_token_table(L, 1), lua_tostring(L, 2), lua_tonumber(L, 3)));
else else
return push_token_set(L, add_token(to_token_set(L, 1), lua_tostring(L, 2), lua_tostring(L, 3), lua_tonumber(L, 4))); return push_token_table(L, add_token(to_token_table(L, 1), lua_tostring(L, 2), lua_tostring(L, 3), lua_tonumber(L, 4)));
} }
static int merge(lua_State * L) { static int merge(lua_State * L) {
return push_token_set(L, merge(to_token_set(L, 1), to_token_set(L, 2))); return push_token_table(L, merge(to_token_table(L, 1), to_token_table(L, 2)));
} }
static int find(lua_State * L) { static int find(lua_State * L) {
char k; char k;
@ -121,14 +121,14 @@ static int find(lua_State * L) {
throw exception("arg #2 must be a string of length 1"); throw exception("arg #2 must be a string of length 1");
k = str[0]; k = str[0];
} }
auto it = to_token_set(L, 1).find(k); auto it = to_token_table(L, 1).find(k);
if (it) if (it)
return push_token_set(L, *it); return push_token_table(L, *it);
else else
return push_nil(L); return push_nil(L);
} }
static int value_of(lua_State * L) { static int value_of(lua_State * L) {
auto it = value_of(to_token_set(L, 1)); auto it = value_of(to_token_table(L, 1));
if (it) { if (it) {
push_boolean(L, it->is_command()); push_boolean(L, it->is_command());
push_name(L, it->value()); push_name(L, it->value());
@ -140,7 +140,7 @@ static int value_of(lua_State * L) {
} }
} }
static int for_each(lua_State * L) { static int for_each(lua_State * L) {
token_set const & t = to_token_set(L, 1); token_table const & t = to_token_table(L, 1);
luaL_checktype(L, 2, LUA_TFUNCTION); // user-fun luaL_checktype(L, 2, LUA_TFUNCTION); // user-fun
for_each(t, [&](char const * k, token_info const & info) { for_each(t, [&](char const * k, token_info const & info) {
lua_pushvalue(L, 2); lua_pushvalue(L, 2);
@ -153,8 +153,8 @@ static int for_each(lua_State * L) {
return 0; return 0;
} }
static const struct luaL_Reg token_set_m[] = { static const struct luaL_Reg token_table_m[] = {
{"__gc", token_set_gc}, {"__gc", token_table_gc},
{"add_command_token", safe_function<add_command_token>}, {"add_command_token", safe_function<add_command_token>},
{"add_token", safe_function<add_token>}, {"add_token", safe_function<add_token>},
{"merge", safe_function<merge>}, {"merge", safe_function<merge>},
@ -164,14 +164,14 @@ static const struct luaL_Reg token_set_m[] = {
{0, 0} {0, 0}
}; };
void open_token_set(lua_State * L) { void open_token_table(lua_State * L) {
luaL_newmetatable(L, token_set_mt); luaL_newmetatable(L, token_table_mt);
lua_pushvalue(L, -1); lua_pushvalue(L, -1);
lua_setfield(L, -2, "__index"); lua_setfield(L, -2, "__index");
setfuncs(L, token_set_m, 0); setfuncs(L, token_table_m, 0);
SET_GLOBAL_FUN(token_set_pred, "is_token_set"); SET_GLOBAL_FUN(token_table_pred, "is_token_table");
SET_GLOBAL_FUN(mk_default_token_set, "default_token_set"); SET_GLOBAL_FUN(mk_default_token_table, "default_token_table");
SET_GLOBAL_FUN(mk_token_set, "token_set"); SET_GLOBAL_FUN(mk_token_table, "token_table");
} }
} }

View file

@ -25,15 +25,15 @@ public:
unsigned precedence() const { return m_precedence; } unsigned precedence() const { return m_precedence; }
}; };
typedef ctrie<token_info> token_set; typedef ctrie<token_info> token_table;
token_set mk_token_set(); token_table mk_token_table();
token_set mk_default_token_set(); token_table mk_default_token_table();
token_set add_command_token(token_set const & s, char const * token); token_table add_command_token(token_table const & s, char const * token);
token_set add_command_token(token_set const & s, char const * token, char const * val); token_table add_command_token(token_table const & s, char const * token, char const * val);
token_set add_token(token_set const & s, char const * token, unsigned prec = 0); token_table add_token(token_table const & s, char const * token, unsigned prec = 0);
token_set add_token(token_set const & s, char const * token, char const * val, unsigned prec = 0); token_table add_token(token_table const & s, char const * token, char const * val, unsigned prec = 0);
void for_each(token_set const & s, std::function<void(char const *, token_info const&)> const & fn); void for_each(token_table const & s, std::function<void(char const *, token_info const&)> const & fn);
token_set const * find(token_set const & s, char c); token_table const * find(token_table const & s, char c);
token_info const * value_of(token_set const & s); token_info const * value_of(token_table const & s);
void open_token_set(lua_State * L); void open_token_table(lua_State * L);
} }

View file

@ -13,7 +13,7 @@ using namespace lean;
#define tk scanner::token_kind #define tk scanner::token_kind
static void scan(char const * str, token_set set = mk_default_token_set()) { static void scan(char const * str, token_table set = mk_default_token_table()) {
std::istringstream in(str); std::istringstream in(str);
scanner s(set, in, "[string]"); scanner s(set, in, "[string]");
while (true) { while (true) {
@ -35,7 +35,7 @@ static void scan(char const * str, token_set set = mk_default_token_set()) {
std::cout << "\n"; std::cout << "\n";
} }
static void scan_success(char const * str, token_set set = mk_default_token_set()) { static void scan_success(char const * str, token_table set = mk_default_token_table()) {
try { try {
scan(str, set); scan(str, set);
} catch (exception & ex) { } catch (exception & ex) {
@ -54,7 +54,7 @@ static void scan_error(char const * str) {
} }
static void check(char const * str, std::initializer_list<tk> const & l, static void check(char const * str, std::initializer_list<tk> const & l,
token_set set = mk_default_token_set()) { token_table set = mk_default_token_table()) {
try { try {
auto it = l.begin(); auto it = l.begin();
std::istringstream in(str); std::istringstream in(str);
@ -75,7 +75,7 @@ static void check(char const * str, std::initializer_list<tk> const & l,
} }
} }
static void check_name(char const * str, name const & expected, token_set set = mk_default_token_set()) { static void check_name(char const * str, name const & expected, token_table set = mk_default_token_table()) {
std::istringstream in(str); std::istringstream in(str);
scanner s(set, in, "[string]"); scanner s(set, in, "[string]");
tk k = s.scan(); tk k = s.scan();
@ -84,7 +84,7 @@ static void check_name(char const * str, name const & expected, token_set set =
lean_assert(s.scan() == tk::Eof); lean_assert(s.scan() == tk::Eof);
} }
static void check_keyword(char const * str, name const & expected, token_set set = mk_default_token_set()) { static void check_keyword(char const * str, name const & expected, token_table set = mk_default_token_table()) {
std::istringstream in(str); std::istringstream in(str);
scanner s(set, in, "[string]"); scanner s(set, in, "[string]");
tk k = s.scan(); tk k = s.scan();
@ -94,7 +94,7 @@ static void check_keyword(char const * str, name const & expected, token_set set
} }
static void tst1() { static void tst1() {
token_set s = mk_default_token_set(); token_table s = mk_default_token_table();
s = add_token(s, "+", "plus"); s = add_token(s, "+", "plus");
s = add_token(s, "=", "eq"); s = add_token(s, "=", "eq");
scan_success("a..a"); scan_success("a..a");
@ -133,7 +133,7 @@ static void tst2() {
check_name("x.bla", name({"x", "bla"})); check_name("x.bla", name({"x", "bla"}));
scan_error("+++"); scan_error("+++");
token_set s = mk_default_token_set(); token_table s = mk_default_token_table();
s = add_token(s, "+++", "tplus"); s = add_token(s, "+++", "tplus");
check_keyword("+++", "tplus", s); check_keyword("+++", "tplus", s);
s = add_token(s, "+", "plus"); s = add_token(s, "+", "plus");

View file

@ -1,4 +1,4 @@
function display_token_set(s) function display_token_table(s)
s:for_each(function(k, cmd, val, prec) s:for_each(function(k, cmd, val, prec)
io.write(k) io.write(k)
if cmd then if cmd then
@ -8,15 +8,15 @@ function display_token_set(s)
end) end)
end end
function token_set_size(s) function token_table_size(s)
local r = 0 local r = 0
s:for_each(function() r = r + 1 end) s:for_each(function() r = r + 1 end)
return r return r
end end
local s = token_set() local s = token_table()
assert(is_token_set(s)) assert(is_token_table(s))
assert(token_set_size(s) == 0) assert(token_table_size(s) == 0)
s = s:add_command_token("test", "tst1") s = s:add_command_token("test", "tst1")
s = s:add_command_token("tast", "tst2") s = s:add_command_token("tast", "tst2")
s = s:add_command_token("tests", "tst3") s = s:add_command_token("tests", "tst3")
@ -24,22 +24,22 @@ s = s:add_command_token("fests", "tst4")
s = s:add_command_token("tes", "tst5") s = s:add_command_token("tes", "tst5")
s = s:add_token("++", "++", 65) s = s:add_token("++", "++", 65)
s = s:add_token("++-", "plusminus") s = s:add_token("++-", "plusminus")
assert(token_set_size(s) == 7) assert(token_table_size(s) == 7)
display_token_set(s) display_token_table(s)
print("========") print("========")
local s2 = default_token_set() local s2 = default_token_table()
display_token_set(s2) display_token_table(s2)
assert(token_set_size(s2) > 0) assert(token_table_size(s2) > 0)
local sz1 = token_set_size(s) local sz1 = token_table_size(s)
local sz2 = token_set_size(s2) local sz2 = token_table_size(s2)
s2 = s2:merge(s) s2 = s2:merge(s)
assert(token_set_size(s2) == sz1 + sz2) assert(token_table_size(s2) == sz1 + sz2)
s2 = s2:find("t"):find("e") s2 = s2:find("t"):find("e")
print("========") print("========")
display_token_set(s2) display_token_table(s2)
assert(token_set_size(s2) == 3) assert(token_table_size(s2) == 3)
s2 = s2:find("s") s2 = s2:find("s")
local cmd, val, prec = s2:value_of() local cmd, val, prec = s2:value_of()
assert(val == name("tst5")) assert(val == name("tst5"))