Remove old lexer+parser implementation
This commit is contained in:
parent
d5632e9217
commit
a4ec29f084
15 changed files with 0 additions and 706 deletions
|
@ -1,62 +0,0 @@
|
|||
package lexer
|
||||
|
||||
import lexer.errors.LexingError
|
||||
import org.junit.jupiter.api.Test
|
||||
import org.junit.jupiter.api.assertThrows
|
||||
import kotlin.test.assertEquals
|
||||
|
||||
/**
|
||||
* Tests for the Prolog lexer.
|
||||
*
|
||||
* These tests are based on the Prolog syntax.
|
||||
*/
|
||||
class ScanPrologParserTests {
|
||||
@Test
|
||||
fun scan_simple_atom() {
|
||||
val tokens = Lexer("atom.").scan()
|
||||
|
||||
assertEquals(3, tokens.size)
|
||||
|
||||
assertEquals(TokenType.ALPHANUMERIC, tokens[0].type, "Expected ALPHANUMERIC token, got ${tokens[0].type}")
|
||||
assertEquals(TokenType.DOT, tokens[1].type, "Expected DOT token, got ${tokens[1].type}")
|
||||
assertEquals(TokenType.EOF, tokens[2].type, "Expected EOF token, got ${tokens[2].type}")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun scan_variable() {
|
||||
val tokens = Lexer("X.").scan()
|
||||
|
||||
assertEquals(3, tokens.size)
|
||||
|
||||
assertEquals(TokenType.ALPHANUMERIC, tokens[0].type, "Expected ALPHANUMERIC token, got ${tokens[0].type}")
|
||||
assertEquals(TokenType.DOT, tokens[1].type, "Expected DOT token, got ${tokens[1].type}")
|
||||
assertEquals(TokenType.EOF, tokens[2].type, "Expected EOF token, got ${tokens[2].type}")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun scan_variable_with_number() {
|
||||
val tokens = Lexer("X1.").scan()
|
||||
|
||||
assertEquals(3, tokens.size)
|
||||
|
||||
assertEquals(TokenType.ALPHANUMERIC, tokens[0].type, "Expected ALPHANUMERIC token, got ${tokens[0].type}")
|
||||
assertEquals(TokenType.DOT, tokens[1].type, "Expected DOT token, got ${tokens[1].type}")
|
||||
assertEquals(TokenType.EOF, tokens[2].type, "Expected EOF token, got ${tokens[2].type}")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun scan_variable_with_underscore() {
|
||||
val tokens = Lexer("X_1.").scan()
|
||||
|
||||
assertEquals(3, tokens.size)
|
||||
|
||||
assertEquals(TokenType.ALPHANUMERIC, tokens[0].type, "Expected ALPHANUMERIC token, got ${tokens[0].type}")
|
||||
assertEquals(TokenType.DOT, tokens[1].type, "Expected DOT token, got ${tokens[1].type}")
|
||||
assertEquals(TokenType.EOF, tokens[2].type, "Expected EOF token, got ${tokens[2].type}")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun scan_variable_that_starts_with_a_number() {
|
||||
assertThrows<LexingError> { Lexer("1X.").scan() }
|
||||
}
|
||||
}
|
|
@ -1,191 +0,0 @@
|
|||
package lexer
|
||||
|
||||
import lexer.errors.LexingError
|
||||
import org.junit.jupiter.api.Test
|
||||
import org.junit.jupiter.api.assertThrows
|
||||
import org.junit.jupiter.api.Assertions.*
|
||||
|
||||
class ScanTests {
|
||||
@Test
|
||||
fun scan_emptyString_returns_EOF() {
|
||||
val tokens = Lexer("").scan()
|
||||
assertEquals(1, tokens.size, "Expected 1 token, got ${tokens.size}")
|
||||
assertEquals(TokenType.EOF, tokens[0].type, "Expected EOF token, got ${tokens[0].type}")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun scan_unknownSymbol_returns_Error() {
|
||||
assertThrows<LexingError> { Lexer("€").scan() }
|
||||
}
|
||||
|
||||
@Test
|
||||
fun scan_dot_returns_Dot() {
|
||||
val tokens = Lexer(".").scan()
|
||||
assertEquals(2, tokens.size)
|
||||
assertEquals(TokenType.DOT, tokens[0].type, "Expected DOT token, got ${tokens[0].type}")
|
||||
assertEquals(TokenType.EOF, tokens[1].type, "Expected EOF token, got ${tokens[1].type}")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun scan_two_dots_returns_two_dots() {
|
||||
val tokens = Lexer("..").scan()
|
||||
assertEquals(3, tokens.size)
|
||||
assertEquals(TokenType.DOT, tokens[0].type, "Expected DOT token, got ${tokens[0].type}")
|
||||
assertEquals(TokenType.DOT, tokens[1].type, "Expected DOT token, got ${tokens[1].type}")
|
||||
assertEquals(TokenType.EOF, tokens[2].type, "Expected EOF token, got ${tokens[2].type}")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun scan_letter_returns_letter() {
|
||||
val tokens = Lexer("a").scan()
|
||||
|
||||
assertEquals(2, tokens.size)
|
||||
|
||||
assertEquals(TokenType.ALPHANUMERIC, tokens[0].type, "Expected ALPHANUMERIC token, got ${tokens[0].type}")
|
||||
assertEquals(TokenType.EOF, tokens[1].type, "Expected EOF token, got ${tokens[1].type}")
|
||||
|
||||
assertEquals(0, tokens[0].position.line, "Expected line 0, got ${tokens[0].position.line}")
|
||||
assertEquals(0, tokens[0].position.column, "Expected column 0, got ${tokens[0].position.column}")
|
||||
assertEquals(1, tokens[0].position.length, "Expected length 1, got ${tokens[0].position.length}")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun scan_word_returns_alphanumerics() {
|
||||
val lexer = Lexer("word")
|
||||
val tokens = lexer.scan()
|
||||
|
||||
assertEquals(2, tokens.size)
|
||||
|
||||
assertEquals(TokenType.ALPHANUMERIC, tokens[0].type, "Expected ALPHANUMERIC token, got ${tokens[0].type}")
|
||||
assertEquals(TokenType.EOF, tokens[1].type, "Expected EOF token, got ${tokens[1].type}")
|
||||
|
||||
assertEquals(4, tokens[0].position.length, "Expected length 4, got ${tokens[0].position.length}")
|
||||
|
||||
assertEquals("word", tokens[0].value, "Expected 'word', got ${tokens[0].value}")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun scan_space_returns_nothing() {
|
||||
val lexer = Lexer(" ")
|
||||
val tokens = lexer.scan()
|
||||
|
||||
assertEquals(1, tokens.size)
|
||||
|
||||
assertEquals(TokenType.EOF, tokens[0].type, "Expected EOF token, got ${tokens[0].type}")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun scan_whitespace_various_returns_nothing() {
|
||||
val lexer = Lexer(" \t\n\r")
|
||||
val tokens = lexer.scan()
|
||||
|
||||
assertEquals(1, tokens.size)
|
||||
|
||||
assertEquals(TokenType.EOF, tokens[0].type, "Expected EOF token, got ${tokens[0].type}")
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
fun scan_separated_words() {
|
||||
val tokens = Lexer("word1 word2").scan()
|
||||
|
||||
assertEquals(3, tokens.size, "Expected 3 tokens, got ${tokens.size}")
|
||||
|
||||
assertEquals(TokenType.ALPHANUMERIC, tokens[0].type, "Expected ALPHANUMERIC token, got ${tokens[0].type}")
|
||||
assertEquals("word1", tokens[0].value, "Expected 'word1', got ${tokens[0].value}")
|
||||
assertEquals(5, tokens[0].position.length, "Expected length 5, got ${tokens[0].position.length}")
|
||||
|
||||
assertEquals(TokenType.ALPHANUMERIC, tokens[1].type, "Expected ALPHANUMERIC token, got ${tokens[1].type}")
|
||||
assertEquals("word2", tokens[1].value, "Expected 'word2', got ${tokens[1].value}")
|
||||
assertEquals(5, tokens[1].position.length, "Expected length 5, got ${tokens[1].position.length}")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun scan_multiline() {
|
||||
val tokens = Lexer(
|
||||
"""
|
||||
word1
|
||||
word2
|
||||
""".trimIndent()
|
||||
).scan()
|
||||
|
||||
assertEquals(3, tokens.size, "Expected 3 tokens, got ${tokens.size}")
|
||||
|
||||
assertEquals(TokenType.ALPHANUMERIC, tokens[0].type, "Expected ALPHANUMERIC token, got ${tokens[0].type}")
|
||||
assertEquals("word1", tokens[0].value, "Expected 'word1', got ${tokens[0].value}")
|
||||
assertEquals(5, tokens[0].position.length, "Expected length 5, got ${tokens[0].position.length}")
|
||||
|
||||
assertEquals(TokenType.ALPHANUMERIC, tokens[1].type, "Expected ALPHANUMERIC token, got ${tokens[1].type}")
|
||||
assertEquals("word2", tokens[1].value, "Expected 'word2', got ${tokens[1].value}")
|
||||
assertEquals(5, tokens[1].position.length, "Expected length 5, got ${tokens[1].position.length}")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun scan_parenthesis_returns_parenthesis() {
|
||||
val lexer = Lexer("()")
|
||||
val tokens = lexer.scan()
|
||||
|
||||
assertEquals(3, tokens.size)
|
||||
|
||||
assertEquals(
|
||||
TokenType.PARENTHESIS_LEFT,
|
||||
tokens[0].type,
|
||||
"Expected LEFT_PARENTHESES token, got ${tokens[0].type}"
|
||||
)
|
||||
assertEquals(
|
||||
TokenType.PARENTHESIS_RIGHT,
|
||||
tokens[1].type,
|
||||
"Expected RIGHT_PARENTHESES token, got ${tokens[1].type}"
|
||||
)
|
||||
assertEquals(TokenType.EOF, tokens[2].type, "Expected EOF token, got ${tokens[2].type}")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun scan_simple_quoted_string_returns_string() {
|
||||
val lexer = Lexer("\"string\"")
|
||||
val tokens = lexer.scan()
|
||||
|
||||
assertEquals(2, tokens.size)
|
||||
|
||||
assertEquals(TokenType.ALPHANUMERIC, tokens[0].type, "Expected ALPHANUMERIC token, got ${tokens[0].type}")
|
||||
assertEquals(TokenType.EOF, tokens[1].type, "Expected EOF token, got ${tokens[1].type}")
|
||||
|
||||
assertEquals("string", tokens[0].value, "Expected 'string', got ${tokens[0].value}")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun scan_quoted_string_with_space_returns_string() {
|
||||
val lexer = Lexer("\"string with space\"")
|
||||
val tokens = lexer.scan()
|
||||
|
||||
assertEquals(2, tokens.size)
|
||||
|
||||
assertEquals(TokenType.ALPHANUMERIC, tokens[0].type, "Expected ALPHANUMERIC token, got ${tokens[0].type}")
|
||||
assertEquals(TokenType.EOF, tokens[1].type, "Expected EOF token, got ${tokens[1].type}")
|
||||
|
||||
assertEquals("string with space", tokens[0].value, "Expected 'string with space', got ${tokens[0].value}")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun scan_comments_returns_nothing() {
|
||||
val lexer = Lexer("% comment")
|
||||
val tokens = lexer.scan()
|
||||
|
||||
assertEquals(1, tokens.size)
|
||||
|
||||
assertEquals(TokenType.EOF, tokens[0].type, "Expected EOF token, got ${tokens[0].type}")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun scan_comment_and_sentence_returns_sentence() {
|
||||
val tokens = Lexer("""
|
||||
% comment
|
||||
sentence
|
||||
""".trimIndent()).scan()
|
||||
|
||||
assertEquals(2, tokens.size)
|
||||
|
||||
assertEquals(TokenType.ALPHANUMERIC, tokens[0].type, "Expected ALPHANUMERIC token, got ${tokens[0].type}")
|
||||
assertEquals("sentence", tokens[0].value, "Expected 'sentence', got ${tokens[0].value}")
|
||||
}
|
||||
}
|
|
@ -1,4 +0,0 @@
|
|||
package parser
|
||||
|
||||
class ParseFromTextTests {
|
||||
}
|
|
@ -1,91 +0,0 @@
|
|||
package parser
|
||||
|
||||
import lexer.Token
|
||||
import lexer.state.TokenPosition
|
||||
import lexer.TokenType
|
||||
import org.junit.jupiter.api.Assertions.assertEquals
|
||||
import org.junit.jupiter.api.Assertions.assertTrue
|
||||
import org.junit.jupiter.api.Test
|
||||
import prolog.ast.terms.Atom
|
||||
import prolog.ast.terms.CompoundTerm
|
||||
|
||||
class ParseTests {
|
||||
@Test
|
||||
fun `parse atom a`() {
|
||||
val input = Token(TokenType.ALPHANUMERIC, "a", TokenPosition(0, 0, 1))
|
||||
|
||||
val result = Parser(listOf(input)).parse()
|
||||
|
||||
assertEquals(1, result.size, "Expected 1 term")
|
||||
assertEquals(Atom("a"), result[0], "Expected atom 'a'")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `parse atom foo`() {
|
||||
val input = Token(TokenType.ALPHANUMERIC, "foo", TokenPosition(0, 0, 3))
|
||||
|
||||
val result = Parser(listOf(input)).parse()
|
||||
|
||||
assertEquals(1, result.size, "Expected 1 term")
|
||||
assertEquals(Atom("foo"), result[0], "Expected atom 'foo'")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `parse atom foo1`() {
|
||||
val input = Token(TokenType.ALPHANUMERIC, "foo1", TokenPosition(0, 0, 4))
|
||||
|
||||
val result = Parser(listOf(input)).parse()
|
||||
|
||||
assertEquals(1, result.size, "Expected 1 term")
|
||||
assertEquals(Atom("foo1"), result[0], "Expected atom 'foo1'")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `parse atom fooBar`() {
|
||||
val name = "fooBar"
|
||||
val input = Token(TokenType.ALPHANUMERIC, name, TokenPosition(0, 0, 6))
|
||||
|
||||
val result = Parser(listOf(input)).parse()
|
||||
|
||||
assertEquals(1, result.size, "Expected 1 term")
|
||||
assertEquals(Atom(name), result[0], "Expected atom 'fooBar'")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `parse atom foo_bar`() {
|
||||
val name = "foo_bar"
|
||||
val input = Token(TokenType.ALPHANUMERIC, name, TokenPosition(0, 0, 7))
|
||||
|
||||
val result = Parser(listOf(input)).parse()
|
||||
|
||||
assertEquals(1, result.size, "Expected 1 term")
|
||||
assertEquals(Atom(name), result[0], "Expected atom 'foo_bar'")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `parse atom my_FooBar1`() {
|
||||
val name = "my_FooBar1"
|
||||
val input = Token(TokenType.ALPHANUMERIC, name, TokenPosition(0, 0, 11))
|
||||
|
||||
val result = Parser(listOf(input)).parse()
|
||||
|
||||
assertEquals(1, result.size, "Expected 1 term")
|
||||
assertEquals(Atom(name), result[0], "Expected atom 'my_FooBar1'")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `parse compound term f()`() {
|
||||
val input = listOf(
|
||||
Token(TokenType.ALPHANUMERIC, "f", TokenPosition(0, 0, 1)),
|
||||
Token(TokenType.PARENTHESIS_LEFT, "(", TokenPosition(0, 1, 2)),
|
||||
Token(TokenType.PARENTHESIS_RIGHT, ")", TokenPosition(0, 3, 4))
|
||||
)
|
||||
|
||||
val result = Parser(input).parse()
|
||||
|
||||
assertEquals(1, result.size, "Expected 1 term")
|
||||
assertTrue(result[0] is CompoundTerm)
|
||||
assertEquals("f", (result[0] as CompoundTerm).name)
|
||||
assertEquals(0, (result[0] as CompoundTerm).arguments.size)
|
||||
}
|
||||
}
|
Reference in a new issue