mirror of
https://github.com/GayPizzaSpecifications/pork.git
synced 2025-08-02 12:50:55 +00:00
gradle: 8.4 and parser: lazy tokenization
This commit is contained in:
parent
5078f38f61
commit
e96bcd8754
@ -3,5 +3,5 @@ plugins {
|
||||
}
|
||||
|
||||
tasks.withType<Wrapper> {
|
||||
gradleVersion = "8.3"
|
||||
gradleVersion = "8.4"
|
||||
}
|
||||
|
BIN
gradle/wrapper/gradle-wrapper.jar
vendored
BIN
gradle/wrapper/gradle-wrapper.jar
vendored
Binary file not shown.
2
gradle/wrapper/gradle-wrapper.properties
vendored
2
gradle/wrapper/gradle-wrapper.properties
vendored
@ -1,6 +1,6 @@
|
||||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-8.3-bin.zip
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-8.4-bin.zip
|
||||
networkTimeout=10000
|
||||
validateDistributionUrl=true
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
|
3
gradlew
vendored
3
gradlew
vendored
@ -83,7 +83,8 @@ done
|
||||
# This is normally unused
|
||||
# shellcheck disable=SC2034
|
||||
APP_BASE_NAME=${0##*/}
|
||||
APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit
|
||||
# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036)
|
||||
APP_HOME=$( cd "${APP_HOME:-./}" > /dev/null && pwd -P ) || exit
|
||||
|
||||
# Use the maximum available, or set MAX_FD != -1 to use that value.
|
||||
MAX_FD=maximum
|
||||
|
@ -22,14 +22,14 @@ abstract class Tool {
|
||||
val rootImportLocator: ImportLocator
|
||||
get() = ImportLocator("local", rootFilePath())
|
||||
|
||||
fun tokenize(): TokenStream =
|
||||
Tokenizer(createCharSource()).stream()
|
||||
fun tokenize(): LazyTokenSource =
|
||||
LazyTokenSource(Tokenizer(createCharSource()))
|
||||
|
||||
fun parse(attribution: NodeAttribution = DiscardNodeAttribution): CompilationUnit =
|
||||
Parser(TokenStreamSource(tokenize()), attribution).parseCompilationUnit()
|
||||
Parser(TokenStreamSource(tokenize().streamAllRemainingTokens()), attribution).parseCompilationUnit()
|
||||
|
||||
fun highlight(scheme: HighlightScheme): List<Highlight> =
|
||||
Highlighter(scheme).highlight(tokenize())
|
||||
Highlighter(scheme).highlight(tokenize().streamAllRemainingTokens())
|
||||
|
||||
fun reprint(): String = buildString { visit(Printer(this)) }
|
||||
|
||||
|
@ -0,0 +1,38 @@
|
||||
package gay.pizza.pork.parser
|
||||
|
||||
class LazyTokenSource(val tokenizer: Tokenizer) : TokenSource {
|
||||
private val queue = mutableListOf<Token>()
|
||||
private var index = 0
|
||||
override val currentIndex: Int
|
||||
get() = index
|
||||
|
||||
override fun next(): Token {
|
||||
index++
|
||||
if (queue.isNotEmpty()) {
|
||||
return queue.removeFirst()
|
||||
}
|
||||
return tokenizer.next()
|
||||
}
|
||||
|
||||
override fun peek(): Token {
|
||||
if (queue.isNotEmpty()) {
|
||||
return queue.first()
|
||||
}
|
||||
val token = tokenizer.next()
|
||||
queue.add(token)
|
||||
return token
|
||||
}
|
||||
|
||||
override fun peekTypeAhead(ahead: Int): TokenType {
|
||||
wantAtLeast(ahead + 1)
|
||||
return queue[ahead].type
|
||||
}
|
||||
|
||||
private fun wantAtLeast(ahead: Int) {
|
||||
if (queue.size < ahead) {
|
||||
for (i in 1..ahead) {
|
||||
queue.add(tokenizer.next())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -14,6 +14,9 @@ interface TokenSource : PeekableSource<Token> {
|
||||
return tokens
|
||||
}
|
||||
|
||||
fun streamAllRemainingTokens(): TokenStream =
|
||||
TokenStream(consumeAllRemainingTokens().filter { !TokenType.ParserIgnoredTypes.contains(it.type) })
|
||||
|
||||
fun ignoringParserIgnoredTypes(): TokenSource =
|
||||
TokenStreamSource(TokenStream(consumeAllRemainingTokens().filter { !TokenType.ParserIgnoredTypes.contains(it.type) }))
|
||||
TokenStreamSource(streamAllRemainingTokens())
|
||||
}
|
||||
|
@ -4,15 +4,21 @@ import com.github.ajalt.clikt.core.CliktCommand
|
||||
import com.github.ajalt.clikt.parameters.arguments.argument
|
||||
import gay.pizza.dough.fs.PlatformFsProvider
|
||||
import gay.pizza.pork.minimal.FileTool
|
||||
import gay.pizza.pork.parser.TokenType
|
||||
|
||||
class TokenizeCommand : CliktCommand(help = "Tokenize Compilation Unit", name = "tokenize") {
|
||||
val path by argument("file")
|
||||
|
||||
override fun run() {
|
||||
val tool = FileTool(PlatformFsProvider.resolve(path))
|
||||
val tokenStream = tool.tokenize()
|
||||
for (token in tokenStream.tokens) {
|
||||
println("${token.sourceIndex.index} ${token.type.name} '${sanitize(token.text)}'")
|
||||
val tokenSource = tool.tokenize()
|
||||
while (true) {
|
||||
val token = tokenSource.next()
|
||||
println("${token.sourceIndex} ${token.type.name} '${sanitize(token.text)}'")
|
||||
tokenSource.peekTypeAhead(5)
|
||||
if (token.type == TokenType.EndOfFile) {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user