Implement token node attribution and a Dart compiler.

This commit is contained in:
Alex Zenla 2023-08-30 03:54:14 -07:00
parent 743bc00bab
commit 900e3f1a1c
Signed by: alex
GPG Key ID: C0780728420EBFE5
9 changed files with 213 additions and 26 deletions

View File

@ -1,7 +1,11 @@
package gay.pizza.pork.ast.nodes package gay.pizza.pork.ast.nodes
import gay.pizza.pork.ast.NodeType import gay.pizza.pork.ast.NodeType
import kotlinx.serialization.SerialName
import kotlinx.serialization.Serializable
@Serializable
@SerialName("stringLiteral")
class StringLiteral(val text: String) : Expression() { class StringLiteral(val text: String) : Expression() {
override val type: NodeType = NodeType.StringLiteral override val type: NodeType = NodeType.StringLiteral

View File

@ -0,0 +1,16 @@
package gay.pizza.pork.cli
import com.github.ajalt.clikt.core.CliktCommand
import com.github.ajalt.clikt.parameters.arguments.argument
import com.github.ajalt.clikt.parameters.types.path
import gay.pizza.pork.compiler.DartCompiler
import gay.pizza.pork.frontend.FileFrontend
class GenerateDartCommand : CliktCommand(help = "Generate Dart Code", name = "generate-dart") {
val path by argument("file").path(mustExist = true, canBeDir = false)
override fun run() {
val frontend = FileFrontend(path)
println(frontend.visit(DartCompiler()))
}
}

View File

@ -14,7 +14,8 @@ class RootCommand : CliktCommand(
TokenizeCommand(), TokenizeCommand(),
ReprintCommand(), ReprintCommand(),
AstCommand(), AstCommand(),
GenerateKotlinCommand() GenerateKotlinCommand(),
GenerateDartCommand()
) )
} }

View File

@ -0,0 +1,90 @@
package gay.pizza.pork.compiler
import gay.pizza.pork.ast.NodeVisitor
import gay.pizza.pork.ast.nodes.*
import gay.pizza.pork.util.StringEscape
class DartCompiler : NodeVisitor<String> {
override fun visitDefine(node: Define): String =
"final ${visit(node.symbol)} = ${visit(node.value)};"
override fun visitFunctionCall(node: FunctionCall): String =
"${visit(node.symbol)}(${node.arguments.joinToString(", ") { visit(it) }})"
override fun visitReference(node: SymbolReference): String =
visit(node.symbol)
override fun visitIf(node: If): String = buildString {
append("if (")
append(visit(node.condition))
append(") {")
append(visit(node.thenExpression))
append("}")
if (node.elseExpression != null) {
append(" else {")
append(visit(node.elseExpression))
append("}")
}
}
override fun visitSymbol(node: Symbol): String =
node.id
override fun visitLambda(node: Lambda): String = buildString {
append("(${node.arguments.joinToString(", ") { visit(it) }}) {")
appendLine()
for ((index, expression) in node.expressions.withIndex()) {
val code = visit(expression)
if (index == node.expressions.size - 1) {
append("return ");
}
append(code)
append(";")
}
appendLine()
append("}")
}
override fun visitIntLiteral(node: IntLiteral): String =
node.value.toString()
override fun visitBooleanLiteral(node: BooleanLiteral): String =
node.value.toString()
override fun visitListLiteral(node: ListLiteral): String = buildString {
append("[")
for ((index, item) in node.items.withIndex()) {
appendLine()
append(visit(item))
if (index + 1 != node.items.size) {
append(",")
}
}
append("]")
}
override fun visitStringLiteral(node: StringLiteral): String =
"\"" + StringEscape.escape(node.text) + "\""
override fun visitParentheses(node: Parentheses): String =
"(${visit(node.expression)})"
override fun visitPrefixOperation(node: PrefixOperation): String =
"${node.op.token}${visit(node.expression)}"
override fun visitInfixOperation(node: InfixOperation): String =
"${visit(node.left)} ${node.op.token} ${visit(node.right)}"
override fun visitProgram(node: Program): String = buildString {
appendLine("void main() {")
for (item in node.expressions) {
append(visit(item))
if (!endsWith(";")) {
append(";")
}
append(";")
appendLine()
}
appendLine("}")
}
}

View File

@ -13,8 +13,8 @@ abstract class Frontend {
fun tokenize(): TokenStream = fun tokenize(): TokenStream =
Tokenizer(createCharSource()).tokenize() Tokenizer(createCharSource()).tokenize()
fun parse(): Program = fun parse(attribution: NodeAttribution = DiscardNodeAttribution): Program =
Parser(TokenStreamSource(tokenize())).readProgram() Parser(TokenStreamSource(tokenize()), attribution).readProgram()
fun highlight(scheme: HighlightScheme): List<Highlight> = fun highlight(scheme: HighlightScheme): List<Highlight> =
Highlighter(scheme).highlight(tokenize()) Highlighter(scheme).highlight(tokenize())

View File

@ -0,0 +1,9 @@
package gay.pizza.pork.parse
import gay.pizza.pork.ast.nodes.Node
object DiscardNodeAttribution : NodeAttribution {
override fun enter() {}
override fun push(token: Token) {}
override fun <T : Node> exit(node: T): T = node
}

View File

@ -0,0 +1,9 @@
package gay.pizza.pork.parse
import gay.pizza.pork.ast.nodes.Node
interface NodeAttribution {
fun enter()
fun push(token: Token)
fun <T: Node> exit(node: T): T
}

View File

@ -3,38 +3,41 @@ package gay.pizza.pork.parse
import gay.pizza.pork.ast.nodes.* import gay.pizza.pork.ast.nodes.*
import gay.pizza.pork.util.StringEscape import gay.pizza.pork.util.StringEscape
class Parser(source: PeekableSource<Token>) { class Parser(source: PeekableSource<Token>, val attribution: NodeAttribution) {
private val unsanitizedSource = source private val unsanitizedSource = source
private fun readIntLiteral(): IntLiteral = private fun readIntLiteral(): IntLiteral = within {
expect(TokenType.IntLiteral) { IntLiteral(it.text.toInt()) } expect(TokenType.IntLiteral) { IntLiteral(it.text.toInt()) }
}
private fun readStringLiteral(): StringLiteral = private fun readStringLiteral(): StringLiteral = within {
expect(TokenType.StringLiteral) { expect(TokenType.StringLiteral) {
val content = StringEscape.unescape(StringEscape.unquote(it.text)) val content = StringEscape.unescape(StringEscape.unquote(it.text))
StringLiteral(content) StringLiteral(content)
} }
}
private fun readBooleanLiteral(): BooleanLiteral = private fun readBooleanLiteral(): BooleanLiteral = within {
expect(TokenType.True, TokenType.False) { expect(TokenType.True, TokenType.False) {
BooleanLiteral(it.type == TokenType.True) BooleanLiteral(it.type == TokenType.True)
} }
}
private fun readListLiteral(): ListLiteral { private fun readListLiteral(): ListLiteral = within {
expect(TokenType.LeftBracket) expect(TokenType.LeftBracket)
val items = collect(TokenType.RightBracket, TokenType.Comma) { val items = collect(TokenType.RightBracket, TokenType.Comma) {
readExpression() readExpression()
} }
expect(TokenType.RightBracket) expect(TokenType.RightBracket)
return ListLiteral(items) ListLiteral(items)
} }
private fun readSymbol(): Symbol = private fun readSymbolRaw(): Symbol =
expect(TokenType.Symbol) { Symbol(it.text) } expect(TokenType.Symbol) { Symbol(it.text) }
private fun readSymbolCases(): Expression { private fun readSymbolCases(): Expression = within {
val symbol = readSymbol() val symbol = readSymbolRaw()
return if (next(TokenType.LeftParentheses)) { if (next(TokenType.LeftParentheses)) {
val arguments = collect(TokenType.RightParentheses, TokenType.Comma) { val arguments = collect(TokenType.RightParentheses, TokenType.Comma) {
readExpression() readExpression()
} }
@ -47,11 +50,11 @@ class Parser(source: PeekableSource<Token>) {
} }
} }
private fun readLambda(): Lambda { private fun readLambda(): Lambda = within {
expect(TokenType.LeftCurly) expect(TokenType.LeftCurly)
val arguments = mutableListOf<Symbol>() val arguments = mutableListOf<Symbol>()
while (!peek(TokenType.In)) { while (!peek(TokenType.In)) {
val symbol = readSymbol() val symbol = readSymbolRaw()
arguments.add(symbol) arguments.add(symbol)
if (next(TokenType.Comma)) { if (next(TokenType.Comma)) {
continue continue
@ -64,22 +67,23 @@ class Parser(source: PeekableSource<Token>) {
readExpression() readExpression()
} }
expect(TokenType.RightCurly) expect(TokenType.RightCurly)
return Lambda(arguments, items) Lambda(arguments, items)
} }
private fun readParentheses(): Parentheses { private fun readParentheses(): Parentheses = within {
expect(TokenType.LeftParentheses) expect(TokenType.LeftParentheses)
val expression = readExpression() val expression = readExpression()
expect(TokenType.RightParentheses) expect(TokenType.RightParentheses)
return Parentheses(expression) Parentheses(expression)
} }
private fun readNegation(): PrefixOperation = private fun readNegation(): PrefixOperation = within {
expect(TokenType.Negation) { expect(TokenType.Negation) {
PrefixOperation(PrefixOperator.Negate, readExpression()) PrefixOperation(PrefixOperator.Negate, readExpression())
} }
}
private fun readIf(): If { private fun readIf(): If = within {
expect(TokenType.If) expect(TokenType.If)
val condition = readExpression() val condition = readExpression()
expect(TokenType.Then) expect(TokenType.Then)
@ -88,7 +92,7 @@ class Parser(source: PeekableSource<Token>) {
if (next(TokenType.Else)) { if (next(TokenType.Else)) {
elseExpression = readExpression() elseExpression = readExpression()
} }
return If(condition, thenExpression, elseExpression) If(condition, thenExpression, elseExpression)
} }
fun readExpression(): Expression { fun readExpression(): Expression {
@ -133,7 +137,8 @@ class Parser(source: PeekableSource<Token>) {
else -> { else -> {
throw RuntimeException( throw RuntimeException(
"Failed to parse token: ${token.type} '${token.text}' as" + "Failed to parse token: ${token.type} '${token.text}' as" +
" expression (index ${unsanitizedSource.currentIndex})") " expression (index ${unsanitizedSource.currentIndex})"
)
} }
} }
@ -143,7 +148,9 @@ class Parser(source: PeekableSource<Token>) {
TokenType.Multiply, TokenType.Multiply,
TokenType.Divide, TokenType.Divide,
TokenType.Equality, TokenType.Equality,
TokenType.Inequality)) { TokenType.Inequality
)
) {
val infixToken = next() val infixToken = next()
val infixOperator = convertInfixOperator(infixToken) val infixOperator = convertInfixOperator(infixToken)
return InfixOperation(expression, infixOperator, readExpression()) return InfixOperation(expression, infixOperator, readExpression())
@ -200,18 +207,21 @@ class Parser(source: PeekableSource<Token>) {
private fun expect(vararg types: TokenType): Token { private fun expect(vararg types: TokenType): Token {
val token = next() val token = next()
if (!types.contains(token.type)) { if (!types.contains(token.type)) {
throw RuntimeException("Expected one of ${types.joinToString(", ")} " + throw RuntimeException(
" but got type ${token.type} '${token.text}'") "Expected one of ${types.joinToString(", ")} " +
" but got type ${token.type} '${token.text}'"
)
} }
return token return token
} }
private fun <T> expect(vararg types: TokenType, consume: (Token) -> T): T = private fun <T: Node> expect(vararg types: TokenType, consume: (Token) -> T): T =
consume(expect(*types)) consume(expect(*types))
private fun next(): Token { private fun next(): Token {
while (true) { while (true) {
val token = unsanitizedSource.next() val token = unsanitizedSource.next()
attribution.push(token)
if (ignoredByParser(token.type)) { if (ignoredByParser(token.type)) {
continue continue
} }
@ -230,6 +240,11 @@ class Parser(source: PeekableSource<Token>) {
} }
} }
private fun <T: Node> within(block: () -> T): T {
attribution.enter()
return attribution.exit(block())
}
private fun ignoredByParser(type: TokenType): Boolean = when (type) { private fun ignoredByParser(type: TokenType): Boolean = when (type) {
TokenType.BlockComment -> true TokenType.BlockComment -> true
TokenType.LineComment -> true TokenType.LineComment -> true

View File

@ -0,0 +1,43 @@
package gay.pizza.pork.parse
import gay.pizza.pork.ast.NodeCoalescer
import gay.pizza.pork.ast.nodes.Node
import java.util.IdentityHashMap
class TokenNodeAttribution : NodeAttribution {
private val map: MutableMap<Node, List<Token>> = IdentityHashMap()
private val stack = mutableListOf<MutableList<Token>>()
private var current: MutableList<Token>? = null
override fun enter() {
val store = mutableListOf<Token>()
current = store
stack.add(store)
}
override fun push(token: Token) {
val store = current ?: throw RuntimeException("enter() not called!")
store.add(token)
}
override fun <T: Node> exit(node: T): T {
val store = stack.removeLast()
map[node] = store
return node
}
fun tokensOf(node: Node): List<Token>? = map[node]
fun assembleTokens(node: Node): List<Token> {
val allTokens = mutableListOf<Token>()
val coalescer = NodeCoalescer { item ->
val tokens = tokensOf(item)
if (tokens != null) {
allTokens.addAll(tokens)
}
}
coalescer.visit(node)
return allTokens
}
}