refactor: straighten token
This commit is contained in:
parent
5da156485c
commit
f84d6fdc85
4 changed files with 68 additions and 28 deletions
|
@ -1,6 +1,6 @@
|
||||||
import gleam/bool
|
import gleam/bool
|
||||||
import gleam/int
|
import gleam/int
|
||||||
import gleam/iterator.{type Iterator}
|
import gleam/iterator.{type Iterator, type Step, Done, Next}
|
||||||
import gleam/result
|
import gleam/result
|
||||||
import gleam/string
|
import gleam/string
|
||||||
|
|
||||||
|
@ -9,6 +9,10 @@ pub type Token {
|
||||||
Number(Int)
|
Number(Int)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub type Error {
|
||||||
|
UnexpectedEndOfTokens
|
||||||
|
}
|
||||||
|
|
||||||
pub fn from_string(token: String) -> Token {
|
pub fn from_string(token: String) -> Token {
|
||||||
case int.parse(token) {
|
case int.parse(token) {
|
||||||
Ok(n) -> Number(n)
|
Ok(n) -> Number(n)
|
||||||
|
@ -23,14 +27,14 @@ fn is_whitespace(string: String) -> Bool {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn yield_token(acc: String, string: String) -> iterator.Step(Token, String) {
|
fn yield_token(acc: String, string: String) -> Step(Token, String) {
|
||||||
use <- bool.guard(when: acc == "" && string == "", return: iterator.Done)
|
use <- bool.guard(when: acc == "" && string == "", return: Done)
|
||||||
|
|
||||||
let #(char, rest) = string.pop_grapheme(string) |> result.unwrap(#("", ""))
|
let #(char, rest) = string.pop_grapheme(string) |> result.unwrap(#("", ""))
|
||||||
|
|
||||||
case is_whitespace(char), acc == "" {
|
case is_whitespace(char), acc == "" {
|
||||||
True, True -> yield_token("", rest)
|
True, True -> yield_token("", rest)
|
||||||
True, _ -> iterator.Next(from_string(acc), rest)
|
True, _ -> Next(from_string(acc), rest)
|
||||||
False, _ -> yield_token(acc <> char, rest)
|
False, _ -> yield_token(acc <> char, rest)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -38,3 +42,18 @@ fn yield_token(acc: String, string: String) -> iterator.Step(Token, String) {
|
||||||
pub fn tokenize(string: String) -> Iterator(Token) {
|
pub fn tokenize(string: String) -> Iterator(Token) {
|
||||||
iterator.unfold(from: string, with: yield_token("", _))
|
iterator.unfold(from: string, with: yield_token("", _))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn skip_over(
|
||||||
|
in tokens: Iterator(Token),
|
||||||
|
this token: Token,
|
||||||
|
) -> Result(Iterator(Token), Error) {
|
||||||
|
let tokens =
|
||||||
|
tokens
|
||||||
|
|> iterator.drop_while(fn(t) { t != token })
|
||||||
|
|> iterator.drop(1)
|
||||||
|
|
||||||
|
case iterator.first(tokens) {
|
||||||
|
Ok(_) -> Ok(tokens)
|
||||||
|
Error(_) -> Error(UnexpectedEndOfTokens)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import fortheck/stack.{type Error, StackOverflow, StackUnderflow}
|
import fortheck/stack.{StackOverflow, StackUnderflow}
|
||||||
import gleam/iterator
|
import gleam/iterator
|
||||||
import gleam/pair
|
import gleam/pair
|
||||||
import gleam/result
|
import gleam/result
|
||||||
|
|
44
test/fortheck/token_test.gleam
Normal file
44
test/fortheck/token_test.gleam
Normal file
|
@ -0,0 +1,44 @@
|
||||||
|
import fortheck/token.{UnexpectedEndOfTokens}
|
||||||
|
import gleam/iterator
|
||||||
|
import gleeunit
|
||||||
|
import gleeunit/should
|
||||||
|
|
||||||
|
pub fn main() {
|
||||||
|
gleeunit.main()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn tokenize_test() {
|
||||||
|
let string = "3 4\nMUL\t5 \n\n \n dIv"
|
||||||
|
|
||||||
|
string
|
||||||
|
|> token.tokenize
|
||||||
|
|> iterator.to_list
|
||||||
|
|> should.equal([
|
||||||
|
token.Number(3),
|
||||||
|
token.Number(4),
|
||||||
|
token.Word("MUL"),
|
||||||
|
token.Number(5),
|
||||||
|
token.Word("DIV"),
|
||||||
|
])
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn skip_over_test() {
|
||||||
|
let string = "3 4\nMUL\t5 \n\n \n mul dIv"
|
||||||
|
|
||||||
|
string
|
||||||
|
|> token.tokenize
|
||||||
|
|> token.skip_over(token.Word("MUL"))
|
||||||
|
|> should.be_ok
|
||||||
|
|> iterator.to_list
|
||||||
|
|> should.equal([token.Number(5), token.Word("MUL"), token.Word("DIV")])
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn skip_over_eof_test() {
|
||||||
|
let string = "3 4\nMUL\t5 \n\n \n mul dIv"
|
||||||
|
|
||||||
|
string
|
||||||
|
|> token.tokenize
|
||||||
|
|> token.skip_over(token.Word("DIV"))
|
||||||
|
|> should.be_error
|
||||||
|
|> should.equal(UnexpectedEndOfTokens)
|
||||||
|
}
|
|
@ -1,23 +0,0 @@
|
||||||
import fortheck/token
|
|
||||||
import gleam/iterator
|
|
||||||
import gleeunit
|
|
||||||
import gleeunit/should
|
|
||||||
|
|
||||||
pub fn main() {
|
|
||||||
gleeunit.main()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn tokenize_test() {
|
|
||||||
let string = "3 4\nMUL\t5 \n\n \n dIv"
|
|
||||||
|
|
||||||
string
|
|
||||||
|> token.tokenize
|
|
||||||
|> iterator.to_list
|
|
||||||
|> should.equal([
|
|
||||||
token.Number(3),
|
|
||||||
token.Number(4),
|
|
||||||
token.Word("MUL"),
|
|
||||||
token.Number(5),
|
|
||||||
token.Word("DIV"),
|
|
||||||
])
|
|
||||||
}
|
|
Loading…
Reference in a new issue