gradle: 8.4 and parser: lazy tokenization

This commit is contained in:
Alex Zenla 2023-10-13 08:56:04 -07:00
parent 5078f38f61
commit e96bcd8754
Signed by: alex
GPG Key ID: C0780728420EBFE5
8 changed files with 59 additions and 11 deletions

View File

@ -3,5 +3,5 @@ plugins {
}
tasks.withType<Wrapper> {
gradleVersion = "8.3"
gradleVersion = "8.4"
}

Binary file not shown.

View File

@ -1,6 +1,6 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-8.3-bin.zip
distributionUrl=https\://services.gradle.org/distributions/gradle-8.4-bin.zip
networkTimeout=10000
validateDistributionUrl=true
zipStoreBase=GRADLE_USER_HOME

3
gradlew vendored
View File

@ -83,7 +83,8 @@ done
# This is normally unused
# shellcheck disable=SC2034
APP_BASE_NAME=${0##*/}
APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit
# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036)
APP_HOME=$( cd "${APP_HOME:-./}" > /dev/null && pwd -P ) || exit
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD=maximum

View File

@ -22,14 +22,14 @@ abstract class Tool {
val rootImportLocator: ImportLocator
get() = ImportLocator("local", rootFilePath())
fun tokenize(): TokenStream =
Tokenizer(createCharSource()).stream()
fun tokenize(): LazyTokenSource =
LazyTokenSource(Tokenizer(createCharSource()))
fun parse(attribution: NodeAttribution = DiscardNodeAttribution): CompilationUnit =
Parser(TokenStreamSource(tokenize()), attribution).parseCompilationUnit()
Parser(TokenStreamSource(tokenize().streamAllRemainingTokens()), attribution).parseCompilationUnit()
fun highlight(scheme: HighlightScheme): List<Highlight> =
Highlighter(scheme).highlight(tokenize())
Highlighter(scheme).highlight(tokenize().streamAllRemainingTokens())
fun reprint(): String = buildString { visit(Printer(this)) }

View File

@ -0,0 +1,38 @@
package gay.pizza.pork.parser
class LazyTokenSource(val tokenizer: Tokenizer) : TokenSource {
private val queue = mutableListOf<Token>()
private var index = 0
override val currentIndex: Int
get() = index
override fun next(): Token {
index++
if (queue.isNotEmpty()) {
return queue.removeFirst()
}
return tokenizer.next()
}
override fun peek(): Token {
if (queue.isNotEmpty()) {
return queue.first()
}
val token = tokenizer.next()
queue.add(token)
return token
}
override fun peekTypeAhead(ahead: Int): TokenType {
wantAtLeast(ahead + 1)
return queue[ahead].type
}
private fun wantAtLeast(ahead: Int) {
if (queue.size < ahead) {
for (i in 1..ahead) {
queue.add(tokenizer.next())
}
}
}
}

View File

@ -14,6 +14,9 @@ interface TokenSource : PeekableSource<Token> {
return tokens
}
fun streamAllRemainingTokens(): TokenStream =
TokenStream(consumeAllRemainingTokens().filter { !TokenType.ParserIgnoredTypes.contains(it.type) })
fun ignoringParserIgnoredTypes(): TokenSource =
TokenStreamSource(TokenStream(consumeAllRemainingTokens().filter { !TokenType.ParserIgnoredTypes.contains(it.type) }))
TokenStreamSource(streamAllRemainingTokens())
}

View File

@ -4,15 +4,21 @@ import com.github.ajalt.clikt.core.CliktCommand
import com.github.ajalt.clikt.parameters.arguments.argument
import gay.pizza.dough.fs.PlatformFsProvider
import gay.pizza.pork.minimal.FileTool
import gay.pizza.pork.parser.TokenType
class TokenizeCommand : CliktCommand(help = "Tokenize Compilation Unit", name = "tokenize") {
val path by argument("file")
override fun run() {
val tool = FileTool(PlatformFsProvider.resolve(path))
val tokenStream = tool.tokenize()
for (token in tokenStream.tokens) {
println("${token.sourceIndex.index} ${token.type.name} '${sanitize(token.text)}'")
val tokenSource = tool.tokenize()
while (true) {
val token = tokenSource.next()
println("${token.sourceIndex} ${token.type.name} '${sanitize(token.text)}'")
tokenSource.peekTypeAhead(5)
if (token.type == TokenType.EndOfFile) {
break
}
}
}