85 lines
3 KiB
Kotlin
85 lines
3 KiB
Kotlin
package be.ugent.logprog.lexer
|
|
|
|
import lexer.Error
|
|
import lexer.Lexer
|
|
import lexer.TokenType
|
|
import org.junit.jupiter.api.Test
|
|
import org.junit.jupiter.api.assertThrows
|
|
import kotlin.test.assertEquals
|
|
|
|
class LexerScanTest {
|
|
@Test
|
|
fun scan_emptyString_returns_EOF() {
|
|
val lexer = Lexer("")
|
|
val tokens = lexer.scan()
|
|
assertEquals(1, tokens.size, "Expected 1 token, got ${tokens.size}")
|
|
assertEquals(TokenType.EOF, tokens[0].type, "Expected EOF token, got ${tokens[0].type}")
|
|
}
|
|
|
|
@Test
|
|
fun scan_unknownSymbol_returns_Error() {
|
|
val lexer = Lexer("€")
|
|
assertThrows<Error>({
|
|
val tokens = lexer.scan()
|
|
})
|
|
}
|
|
|
|
@Test
|
|
fun scan_dot_returns_Dot() {
|
|
val lexer = Lexer(".")
|
|
val tokens = lexer.scan()
|
|
assertEquals(2, tokens.size)
|
|
assertEquals(TokenType.DOT, tokens[0].type, "Expected DOT token, got ${tokens[0].type}")
|
|
assertEquals(TokenType.EOF, tokens[1].type, "Expected EOF token, got ${tokens[1].type}")
|
|
}
|
|
|
|
@Test
|
|
fun scan_two_dots_returns_two_dots() {
|
|
val lexer = Lexer("..")
|
|
val tokens = lexer.scan()
|
|
assertEquals(3, tokens.size)
|
|
assertEquals(TokenType.DOT, tokens[0].type, "Expected DOT token, got ${tokens[0].type}")
|
|
assertEquals(TokenType.DOT, tokens[1].type, "Expected DOT token, got ${tokens[1].type}")
|
|
assertEquals(TokenType.EOF, tokens[2].type, "Expected EOF token, got ${tokens[2].type}")
|
|
}
|
|
|
|
@Test
|
|
fun scan_letter_returns_letter() {
|
|
val lexer = Lexer("a")
|
|
val tokens = lexer.scan()
|
|
|
|
assertEquals(2, tokens.size)
|
|
|
|
assertEquals(TokenType.ALPHANUMERIC, tokens[0].type, "Expected ALPHANUMERIC token, got ${tokens[0].type}")
|
|
assertEquals(TokenType.EOF, tokens[1].type, "Expected EOF token, got ${tokens[1].type}")
|
|
|
|
assertEquals(0, tokens[0].position.line, "Expected line 0, got ${tokens[0].position.line}")
|
|
assertEquals(0, tokens[0].position.column, "Expected column 0, got ${tokens[0].position.column}")
|
|
assertEquals(1, tokens[0].position.length, "Expected length 1, got ${tokens[0].position.length}")
|
|
}
|
|
|
|
@Test
|
|
fun scan_word_returns_alphanumerics() {
|
|
val lexer = Lexer("word")
|
|
val tokens = lexer.scan()
|
|
|
|
assertEquals(2, tokens.size)
|
|
|
|
assertEquals(TokenType.ALPHANUMERIC, tokens[0].type, "Expected ALPHANUMERIC token, got ${tokens[0].type}")
|
|
assertEquals(TokenType.EOF, tokens[1].type, "Expected EOF token, got ${tokens[1].type}")
|
|
|
|
assertEquals(4, tokens[0].position.length, "Expected length 4, got ${tokens[0].position.length}")
|
|
|
|
assertEquals("word", tokens[0].value, "Expected 'word', got ${tokens[0].value}")
|
|
}
|
|
|
|
@Test
|
|
fun scan_whitespace_returns_nothing() {
|
|
val lexer = Lexer(" ")
|
|
val tokens = lexer.scan()
|
|
|
|
assertEquals(1, tokens.size)
|
|
|
|
assertEquals(TokenType.EOF, tokens[0].type, "Expected EOF token, got ${tokens[0].type}")
|
|
}
|
|
}
|