1
use std::{
2
collections::VecDeque,
3
error::Error,
4
io::{self, Write},
5
};
6
7
mod lexer;
8
9
#[derive(Debug, Clone, PartialEq)]
10
enum Token {
11
Symbol(String),
12
Number(String),
13
LeftBracket,
14
RightBracket,
15
LeftParenthesis,
16
RightParenthesis,
17
Comma,
18
StringOpen,
19
StringLiteralContent(String),
20
StringClose,
21
}
22
23
#[derive(Debug, Clone)]
24
enum ATerm {
25
Appl(String, Vec<ATerm>),
26
List(Vec<ATerm>),
27
Cons(Vec<ATerm>),
28
String(String),
29
}
30
31
fn main() -> Result<(), Box<dyn Error>> {
32
let path = std::env::args_os().skip(1).next().unwrap();
33
let content = std::fs::read_to_string(&path)?;
34
35
let (mut tokens, maybe_errix) = lexer::lex(&content);
36
37
if let Some(errix) = maybe_errix {
38
for token in &tokens {
39
println!("{token:?}");
40
}
41
let context = &content[errix..][..50];
42
println!("error at byte {errix} of {path:?}: {context:?}");
43
return Ok(());
44
}
45
46
let term = parse(&mut tokens)?;
47
pp(&mut std::io::stdout().lock(), &term, &mut String::new())?;
48
49
Ok(())
50
}
51
52
fn pp<W: Write>(write: &mut W, term: &ATerm, indent: &mut String) -> io::Result<()> {
53
match term {
54
ATerm::Appl(sym, terms) => {
55
write!(write, "{}(", &sym)?;
56
pp_list(write, terms, ")", indent)?;
57
}
58
ATerm::List(terms) => {
59
write!(write, "[")?;
60
pp_list(write, terms, "]", indent)?;
61
}
62
ATerm::Cons(terms) => {
63
write!(write, "(")?;
64
pp_list(write, terms, ")", indent)?;
65
}
66
ATerm::String(s) => {
67
write!(write, "{:?}", &s)?;
68
}
69
}
70
Ok(())
71
}
72
73
fn pp_list<W: Write>(
74
write: &mut W,
75
terms: &[ATerm],
76
list_end: &str,
77
indent: &mut String,
78
) -> io::Result<()> {
79
match terms.len() {
80
0 => write!(write, "{}", list_end)?,
81
1 => {
82
pp(write, &terms[0], indent)?;
83
write!(write, "{}", list_end)?;
84
}
85
_ => {
86
writeln!(write)?;
87
indent.push_str(" ");
88
for term in terms {
89
write!(write, "{}", indent)?;
90
pp(write, term, indent)?;
91
writeln!(write)?;
92
}
93
indent.drain(indent.len() - 2..);
94
write!(write, "{}{}", &indent, list_end)?;
95
}
96
}
97
Ok(())
98
}
99
100
fn parse(tokens: &mut VecDeque<Token>) -> Result<ATerm, Box<dyn Error>> {
101
let Some(start) = tokens.pop_front() else {
102
panic!("unexpected end of token stream");
103
};
104
match start {
105
Token::Symbol(sym) => {
106
let Some(Token::LeftParenthesis) = tokens.pop_front() else {
107
panic!("expected LeftParenthesis after Symbol");
108
};
109
return Ok(ATerm::Appl(
110
sym,
111
parse_list(tokens, Token::RightParenthesis)?,
112
));
113
}
114
Token::Number(_) => todo!(),
115
Token::LeftBracket => {
116
return Ok(ATerm::List(parse_list(tokens, Token::RightBracket)?));
117
}
118
Token::LeftParenthesis => {
119
return Ok(ATerm::Cons(parse_list(tokens, Token::RightParenthesis)?));
120
}
121
Token::StringOpen => {
122
let mut s = String::new();
123
loop {
124
let Some(tok) = tokens.pop_front() else {
125
panic!("expected token in string");
126
};
127
match tok {
128
Token::StringLiteralContent(content) => s.push_str(&content),
129
Token::StringClose => return Ok(ATerm::String(s)),
130
_ => panic!("unexpected token {tok:?} in String"),
131
}
132
}
133
}
134
_ => panic!("unexpected token {start:?}"),
135
}
136
}
137
138
fn parse_list(
139
tokens: &mut VecDeque<Token>,
140
end_token: Token,
141
) -> Result<Vec<ATerm>, Box<dyn Error>> {
142
let Some(tok) = tokens.pop_front() else {
143
panic!("expected token in Appl");
144
};
145
match tok {
146
Token::RightParenthesis => {
147
return Ok(vec![]);
148
}
149
_ => {
150
tokens.push_front(tok);
151
let mut terms = vec![parse(tokens)?];
152
loop {
153
let Some(tok) = tokens.pop_front() else {
154
panic!("expected another token in Appl");
155
};
156
if tok == end_token {
157
return Ok(terms);
158
}
159
match tok {
160
Token::Comma => terms.push(parse(tokens)?),
161
_ => panic!("expected Comma or {end_token:?} in Appl"),
162
}
163
}
164
}
165
}
166
}
167