Skip to content

Commit

Permalink
Improve handling of keywords (#402)
Browse files Browse the repository at this point in the history
* Reserve more keywords for future use
* Add use and provide keywords and set them up in the string interner
* Use include_strs to avoid lazy_static for KEYWORD list
* Prefix StringInterner keywords properties to avoid clash with rust keywords
  • Loading branch information
Cypher1 authored Apr 27, 2024
1 parent a8bcc97 commit c3bd783
Show file tree
Hide file tree
Showing 5 changed files with 64 additions and 27 deletions.
16 changes: 16 additions & 0 deletions better-std/src/more_pretty_assertions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,22 @@ impl std::fmt::Debug for MultiPretty<String> {
}
}

#[macro_export]
macro_rules! include_strs_impl {
($($line:expr)*) => {
&[
$(stringify!($line),)*
]
}
}

#[macro_export]
macro_rules! include_strs {
($file:expr) => {
$crate::include_strs_impl!(include!($file))
};
}

#[macro_export]
macro_rules! assert_str_eq {
($left:expr, $right:expr) => {
Expand Down
2 changes: 2 additions & 0 deletions examples/instances.tk
100644 → 100755
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
#!/usr/bin/env tako

Bounded = Enum
.open()
.based_on({
Expand Down
33 changes: 21 additions & 12 deletions takolib/src/ast/string_interner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,13 @@ pub struct StringInterner {
// BUT: We can also merge the hashes without losing any information.
pub loc2string: Arc<BTreeMap<IndexIntoFile, StrId>>,
pub strings: Arc<BTreeMap<StrId, String>>,
pub lambda: StrId,
pub pi: StrId,
pub forall: StrId,
pub exists: StrId,
pub kw_lambda: StrId,
pub kw_pi: StrId,
pub kw_forall: StrId,
pub kw_exists: StrId,
pub kw_use: StrId,
pub kw_provide: StrId,
pub kw_public: StrId,
}

impl Default for StringInterner {
Expand All @@ -33,15 +36,21 @@ impl Default for StringInterner {
loc2string: Arc::new(BTreeMap::new()),
strings: Arc::new(BTreeMap::new()),
// These are, temporarily, invalid.
lambda: TypedIndex::max_value(),
pi: TypedIndex::max_value(),
forall: TypedIndex::max_value(),
exists: TypedIndex::max_value(),
kw_lambda: TypedIndex::max_value(),
kw_pi: TypedIndex::max_value(),
kw_forall: TypedIndex::max_value(),
kw_exists: TypedIndex::max_value(),
kw_use: TypedIndex::max_value(),
kw_provide: TypedIndex::max_value(),
kw_public: TypedIndex::max_value(),
};
n.lambda = n.register_str("lambda");
n.pi = n.register_str("pi");
n.forall = n.register_str("forall");
n.exists = n.register_str("exists");
n.kw_lambda = n.register_str("lambda");
n.kw_pi = n.register_str("pi");
n.kw_forall = n.register_str("forall");
n.kw_exists = n.register_str("exists");
n.kw_use = n.register_str("use");
n.kw_provide = n.register_str("provide");
n.kw_public = n.register_str("public");
for key in KEYWORDS.iter() {
n.register_str(key);
}
Expand Down
22 changes: 19 additions & 3 deletions takolib/src/parser/keywords.txt
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
False
TRUE
True
any
check
construct
constructor
exists
export
exports
expose
false
forall
from
Expand All @@ -14,16 +16,30 @@ imports
in
include
includes
inject
lambda
module
new
nil
null
pi
preserve
private
provide
public
publish
read
require
requires
sigma
such
suchthat
that
true
undefined
unit
use
void
where
with
write
18 changes: 6 additions & 12 deletions takolib/src/parser/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ use crate::ast::location::Location;
use crate::ast::string_interner::Identifier;
use crate::ast::{Ast, Atom, Call, Contains, Definition, NodeData, NodeId, Op};
use crate::error::TError;
use better_std::include_strs;
use log::trace;
use semantics::BindingMode;
use semantics::Literal;
Expand All @@ -12,14 +13,7 @@ use std::path::Path;
use thiserror::Error;
use tokens::{assign_op, binding_mode_operation, is_assign, OpBinding, Symbol, Token, TokenType};

use lazy_static::lazy_static;

lazy_static! {
pub static ref KEYWORDS: Vec<String> = include_str!("keywords.txt")
.split('\n')
.map(|s| s.to_string())
.collect();
}
pub const KEYWORDS: &[&str] = include_strs!("keywords.txt");

#[derive(Debug, Error, PartialEq, Eq, Ord, PartialOrd, Clone, Hash)]
pub enum ParseError {
Expand Down Expand Up @@ -620,13 +614,13 @@ pub fn parse(filepath: &Path, contents: &str, tokens: &[Token]) -> Result<Ast, T

fn normalize_keywords_as_ops(ast: &Ast, name: Identifier) -> TokenType {
let interner = &ast.string_interner;
let op = if name == interner.lambda {
let op = if name == interner.kw_lambda {
Symbol::Lambda
} else if name == interner.pi {
} else if name == interner.kw_pi {
Symbol::Pi
} else if name == interner.forall {
} else if name == interner.kw_forall {
Symbol::Forall
} else if name == interner.exists {
} else if name == interner.kw_exists {
Symbol::Exists
} else {
return TokenType::Ident;
Expand Down

0 comments on commit c3bd783

Please sign in to comment.