Compare commits
66 Commits
13adbe4c0e
...
7feb3cd7b0
| Author | SHA1 | Date | |
|---|---|---|---|
| 7feb3cd7b0 | |||
| 1fec471da9 | |||
| 09d2420508 | |||
| 028ccf2bf9 | |||
| 1458da58cc | |||
| 4a27a8b474 | |||
| f13be7817c | |||
| 7fe6e3b5ad | |||
| c4368f24fc | |||
| dcf94296fa | |||
| 12370361c4 | |||
| 0c6ce16bcd | |||
| c244435ae2 | |||
| b400f48676 | |||
| 793565cafa | |||
| feae5d314e | |||
| 10e1986fe2 | |||
| 9eaa71fe2d | |||
| f58ff1785a | |||
| 970ceeb8b0 | |||
| e2f5024a4c | |||
| 8008f37f16 | |||
| c9140bd018 | |||
| ba5ce0a88c | |||
| 398cd57b1d | |||
| f8718ac05b | |||
| d4596c3afa | |||
| 69bbe17992 | |||
| 2d4c79b30f | |||
| 238af9affc | |||
| a6c283759d | |||
| 63ee57e7f0 | |||
| 503ca41155 | |||
| a156d24a91 | |||
| 019f7d84b1 | |||
| 4c794944ef | |||
| 99a5aa5312 | |||
| 7bbf43a725 | |||
| 4c15526d1b | |||
| c741cfee51 | |||
| 012b8c8cf1 | |||
| 4c3f7a8bfc | |||
| fe6f54b402 | |||
| 49f3f3e09f | |||
| 0d1dce4868 | |||
| d18ab2507c | |||
| 7e69356f79 | |||
| 9863f46f38 | |||
| 45f31d0678 | |||
| 49a6320fef | |||
| 51f67ac908 | |||
| 7da437212d | |||
| 740379d7b2 | |||
| 19c4fb5033 | |||
| f57452ece2 | |||
| 4590d66105 | |||
| 3aa40ae2c2 | |||
| da0af799d8 | |||
| 9f45252522 | |||
| bae0da31c2 | |||
| 4258503c0e | |||
| d4a772e88b | |||
| 68ec6f9f3e | |||
|
|
59cf459d74 | ||
|
|
890eb811b9 | ||
|
|
fd3c5da59b |
7
bin/repl
7
bin/repl
|
|
@ -7,6 +7,9 @@ import * as readline from 'readline'
|
|||
import { readFileSync, writeFileSync } from 'fs'
|
||||
import { basename } from 'path'
|
||||
|
||||
globals.$.script.name = '(repl)'
|
||||
globals.$.script.path = '(repl)'
|
||||
|
||||
async function repl() {
|
||||
const commands = ['/clear', '/reset', '/vars', '/funcs', '/history', '/bytecode', '/exit', '/save', '/quit']
|
||||
|
||||
|
|
@ -145,7 +148,7 @@ async function repl() {
|
|||
}
|
||||
|
||||
try {
|
||||
const compiler = new Compiler(trimmed, Object.keys(globals))
|
||||
const compiler = new Compiler(trimmed, [...Object.keys(globals), ...vm.vars()])
|
||||
|
||||
// Save VM state before appending bytecode, in case execution fails
|
||||
const savedInstructions = [...vm.instructions]
|
||||
|
|
@ -235,7 +238,7 @@ async function loadFile(filePath: string): Promise<{ vm: VM; codeHistory: string
|
|||
if (!trimmed) continue
|
||||
|
||||
try {
|
||||
const compiler = new Compiler(trimmed)
|
||||
const compiler = new Compiler(trimmed, [...Object.keys(globals), ...vm.vars()])
|
||||
vm.appendBytecode(compiler.bytecode)
|
||||
await vm.continue()
|
||||
codeHistory.push(trimmed)
|
||||
|
|
|
|||
125
bin/shrimp
125
bin/shrimp
|
|
@ -1,71 +1,83 @@
|
|||
#!/usr/bin/env bun
|
||||
|
||||
import { Compiler } from '../src/compiler/compiler'
|
||||
import { colors, globals } from '../src/prelude'
|
||||
import { parser } from '../src/parser/shrimp'
|
||||
import { colors, globals as prelude } from '../src/prelude'
|
||||
import { treeToString } from '../src/utils/tree'
|
||||
import { VM, fromValue, bytecodeToString } from 'reefvm'
|
||||
import { readFileSync, writeFileSync, mkdirSync } from 'fs'
|
||||
import { randomUUID } from 'crypto'
|
||||
import { runCode, runFile, compileFile, parseCode } from '../src'
|
||||
import { resolve } from 'path'
|
||||
import { bytecodeToString } from 'reefvm'
|
||||
import { readFileSync } from 'fs'
|
||||
import { spawn } from 'child_process'
|
||||
import { join } from 'path'
|
||||
|
||||
async function runFile(filePath: string) {
|
||||
try {
|
||||
const code = readFileSync(filePath, 'utf-8')
|
||||
const compiler = new Compiler(code, Object.keys(globals))
|
||||
const vm = new VM(compiler.bytecode, globals)
|
||||
await vm.run()
|
||||
return vm.stack.length ? fromValue(vm.stack[vm.stack.length - 1]) : null
|
||||
} catch (error: any) {
|
||||
console.error(`${colors.red}Error:${colors.reset} ${error.message}`)
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
async function compileFile(filePath: string) {
|
||||
try {
|
||||
const code = readFileSync(filePath, 'utf-8')
|
||||
const compiler = new Compiler(code)
|
||||
return bytecodeToString(compiler.bytecode)
|
||||
} catch (error: any) {
|
||||
console.error(`${colors.red}Error:${colors.reset} ${error.message}`)
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
async function parseFile(filePath: string) {
|
||||
try {
|
||||
const code = readFileSync(filePath, 'utf-8')
|
||||
const tree = parser.parse(code)
|
||||
return treeToString(tree, code)
|
||||
} catch (error: any) {
|
||||
console.error(`${colors.red}Error:${colors.reset} ${error.message}`)
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
function showHelp() {
|
||||
console.log(`${colors.bright}${colors.magenta}🦐 Shrimp${colors.reset} is a scripting language in a shell.
|
||||
|
||||
${colors.bright}Usage:${colors.reset} shrimp <command> [...args]
|
||||
${colors.bright}Usage:${colors.reset} shrimp <command> [options] [...args]
|
||||
|
||||
${colors.bright}Commands:${colors.reset}
|
||||
${colors.cyan}run ${colors.yellow}./my-file.sh${colors.reset} Execute a file with Shrimp
|
||||
${colors.cyan}parse ${colors.yellow}./my-file.sh${colors.reset} Print parse tree for Shrimp file
|
||||
${colors.cyan}bytecode ${colors.yellow}./my-file.sh${colors.reset} Print bytecode for Shrimp file
|
||||
${colors.cyan}eval ${colors.yellow}'some code'${colors.reset} Evaluate a line of Shrimp code
|
||||
${colors.cyan}eval ${colors.yellow}'some code'${colors.reset} Evaluate a line of Shrimp code
|
||||
${colors.cyan}print ${colors.yellow}'some code'${colors.reset} Evaluate a line of Shrimp code and print the result
|
||||
${colors.cyan}repl${colors.reset} Start REPL
|
||||
${colors.cyan}help${colors.reset} Print this help message
|
||||
${colors.cyan}version${colors.reset} Print version`)
|
||||
${colors.cyan}version${colors.reset} Print version
|
||||
|
||||
${colors.bright}Options:${colors.reset}
|
||||
${colors.cyan}eval -I${colors.reset} ${colors.yellow}<module>${colors.reset} Import module (can be repeated)
|
||||
Example: shrimp -I math -e 'random | echo'
|
||||
Example: shrimp -Imath -Istr -e 'random | echo'`)
|
||||
}
|
||||
|
||||
function showVersion() {
|
||||
console.log('🦐 v0.0.1')
|
||||
}
|
||||
|
||||
async function evalCode(code: string, imports: string[]) {
|
||||
const idx = Bun.argv.indexOf('--')
|
||||
prelude.$.args = idx >= 0 ? Bun.argv.slice(idx + 1) : []
|
||||
|
||||
const importStatement = imports.length > 0 ? `import ${imports.join(' ')}` : ''
|
||||
if (importStatement) code = `${importStatement}; ${code}`
|
||||
return await runCode(code)
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const args = process.argv.slice(2)
|
||||
let args = process.argv.slice(2)
|
||||
|
||||
if (args.length === 0) {
|
||||
showHelp()
|
||||
return
|
||||
}
|
||||
|
||||
// Parse -I flags for imports (supports both "-I math" and "-Imath")
|
||||
const imports: string[] = []
|
||||
|
||||
while (args.length > 0) {
|
||||
const arg = args[0]
|
||||
|
||||
if (arg === '-I') {
|
||||
// "-I math" format
|
||||
if (args.length < 2) {
|
||||
console.log(`${colors.bright}error: -I requires a module name${colors.reset}`)
|
||||
process.exit(1)
|
||||
}
|
||||
imports.push(args[1])
|
||||
args = args.slice(2)
|
||||
} else if (arg.startsWith('-I')) {
|
||||
// "-Imath" format
|
||||
const moduleName = arg.slice(2)
|
||||
if (!moduleName) {
|
||||
console.log(`${colors.bright}error: -I requires a module name${colors.reset}`)
|
||||
process.exit(1)
|
||||
}
|
||||
imports.push(moduleName)
|
||||
args = args.slice(1)
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (args.length === 0) {
|
||||
showHelp()
|
||||
|
|
@ -99,10 +111,18 @@ async function main() {
|
|||
process.exit(1)
|
||||
}
|
||||
|
||||
try { mkdirSync('/tmp/shrimp') } catch { }
|
||||
const path = `/tmp/shrimp/${randomUUID()}.sh`
|
||||
writeFileSync(path, code)
|
||||
console.log(await runFile(path))
|
||||
await evalCode(code, imports)
|
||||
return
|
||||
}
|
||||
|
||||
if (['print', '-print', '--print', '-E'].includes(command)) {
|
||||
const code = args[1]
|
||||
if (!code) {
|
||||
console.log(`${colors.bright}usage: shrimp print <code>${colors.reset}`)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
console.log(await evalCode(code, imports))
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -112,7 +132,7 @@ async function main() {
|
|||
console.log(`${colors.bright}usage: shrimp bytecode <file>${colors.reset}`)
|
||||
process.exit(1)
|
||||
}
|
||||
console.log(await compileFile(file))
|
||||
console.log(bytecodeToString(compileFile(file)))
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -122,7 +142,8 @@ async function main() {
|
|||
console.log(`${colors.bright}usage: shrimp parse <file>${colors.reset}`)
|
||||
process.exit(1)
|
||||
}
|
||||
console.log(await parseFile(file))
|
||||
const input = readFileSync(file, 'utf-8')
|
||||
console.log(treeToString(parseCode(input), input))
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -132,10 +153,12 @@ async function main() {
|
|||
console.log(`${colors.bright}usage: shrimp run <file>${colors.reset}`)
|
||||
process.exit(1)
|
||||
}
|
||||
prelude.$.script.path = resolve(file)
|
||||
await runFile(file)
|
||||
return
|
||||
}
|
||||
|
||||
prelude.$.script.path = resolve(command)
|
||||
await runFile(command)
|
||||
}
|
||||
|
||||
|
|
|
|||
8
bun.lock
8
bun.lock
|
|
@ -44,7 +44,7 @@
|
|||
|
||||
"@marijn/find-cluster-break": ["@marijn/find-cluster-break@1.0.2", "", {}, "sha512-l0h88YhZFyKdXIFNfSWpyjStDjGHwZ/U7iobcK1cQQD8sejsONdQtTVU+1wVN1PBw40PiiHB1vA5S7VTfQiP9g=="],
|
||||
|
||||
"@types/bun": ["@types/bun@1.3.1", "", { "dependencies": { "bun-types": "1.3.1" } }, "sha512-4jNMk2/K9YJtfqwoAa28c8wK+T7nvJFOjxI4h/7sORWcypRNxBpr+TPNaCfVWq70tLCJsqoFwcf0oI0JU/fvMQ=="],
|
||||
"@types/bun": ["@types/bun@1.3.2", "", { "dependencies": { "bun-types": "1.3.2" } }, "sha512-t15P7k5UIgHKkxwnMNkJbWlh/617rkDGEdSsDbu+qNHTaz9SKf7aC8fiIlUdD5RPpH6GEkP0cK7WlvmrEBRtWg=="],
|
||||
|
||||
"@types/node": ["@types/node@24.10.0", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-qzQZRBqkFsYyaSWXuEHc2WR9c0a0CXwiE5FWUvn7ZM+vdy1uZLfCunD38UzhuB7YN/J11ndbDBcTmOdxJo9Q7A=="],
|
||||
|
||||
|
|
@ -52,7 +52,7 @@
|
|||
|
||||
"bun-plugin-tailwind": ["bun-plugin-tailwind@0.0.15", "", { "peerDependencies": { "typescript": "^5.0.0" } }, "sha512-qtAXMNGG4R0UGGI8zWrqm2B7BdXqx48vunJXBPzfDOHPA5WkRUZdTSbE7TFwO4jLhYqSE23YMWsM9NhE6ovobw=="],
|
||||
|
||||
"bun-types": ["bun-types@1.3.1", "", { "dependencies": { "@types/node": "*" }, "peerDependencies": { "@types/react": "^19" } }, "sha512-NMrcy7smratanWJ2mMXdpatalovtxVggkj11bScuWuiOoXTiKIu2eVS1/7qbyI/4yHedtsn175n4Sm4JcdHLXw=="],
|
||||
"bun-types": ["bun-types@1.3.2", "", { "dependencies": { "@types/node": "*" }, "peerDependencies": { "@types/react": "^19" } }, "sha512-i/Gln4tbzKNuxP70OWhJRZz1MRfvqExowP7U6JKoI8cntFrtxg7RJK3jvz7wQW54UuvNC8tbKHHri5fy74FVqg=="],
|
||||
|
||||
"codemirror": ["codemirror@6.0.2", "", { "dependencies": { "@codemirror/autocomplete": "^6.0.0", "@codemirror/commands": "^6.0.0", "@codemirror/language": "^6.0.0", "@codemirror/lint": "^6.0.0", "@codemirror/search": "^6.0.0", "@codemirror/state": "^6.0.0", "@codemirror/view": "^6.0.0" } }, "sha512-VhydHotNW5w1UGK0Qj96BwSk/Zqbp9WbnyK2W/eVMv4QyF41INRGpjUhFJY7/uDNuudSc33a/PKr4iDqRduvHw=="],
|
||||
|
||||
|
|
@ -62,11 +62,11 @@
|
|||
|
||||
"hono": ["hono@4.10.4", "", {}, "sha512-YG/fo7zlU3KwrBL5vDpWKisLYiM+nVstBQqfr7gCPbSYURnNEP9BDxEMz8KfsDR9JX0lJWDRNc6nXX31v7ZEyg=="],
|
||||
|
||||
"reefvm": ["reefvm@git+https://git.nose.space/defunkt/reefvm#bffb83a5280a4d74e424c4e0f4fbd46f790227a3", { "peerDependencies": { "typescript": "^5" } }, "bffb83a5280a4d74e424c4e0f4fbd46f790227a3"],
|
||||
"reefvm": ["reefvm@git+https://git.nose.space/defunkt/reefvm#3e2e68b31f504347225a4d705c7568a0957d629e", { "peerDependencies": { "typescript": "^5" } }, "3e2e68b31f504347225a4d705c7568a0957d629e"],
|
||||
|
||||
"style-mod": ["style-mod@4.1.3", "", {}, "sha512-i/n8VsZydrugj3Iuzll8+x/00GH2vnYsk1eomD8QiRrSAeW6ItbCQDtfXCeJHd0iwiNagqjQkvpvREEPtW3IoQ=="],
|
||||
|
||||
"tailwindcss": ["tailwindcss@4.1.16", "", {}, "sha512-pONL5awpaQX4LN5eiv7moSiSPd/DLDzKVRJz8Q9PgzmAdd1R4307GQS2ZpfiN7ZmekdQrfhZZiSE5jkLR4WNaA=="],
|
||||
"tailwindcss": ["tailwindcss@4.1.17", "", {}, "sha512-j9Ee2YjuQqYT9bbRTfTZht9W/ytp5H+jJpZKiYdP/bpnXARAuELt9ofP0lPnmHjbga7SNQIxdTAXCmtKVYjN+Q=="],
|
||||
|
||||
"typescript": ["typescript@5.9.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw=="],
|
||||
|
||||
|
|
|
|||
1
examples/find.shrimp
Normal file
1
examples/find.shrimp
Normal file
|
|
@ -0,0 +1 @@
|
|||
echo
|
||||
|
|
@ -8,7 +8,9 @@
|
|||
"dev": "bun generate-parser && bun --hot src/server/server.tsx",
|
||||
"generate-parser": "lezer-generator src/parser/shrimp.grammar --typeScript -o src/parser/shrimp.ts",
|
||||
"repl": "bun generate-parser && bun bin/repl",
|
||||
"update-reef": "rm -rf ~/.bun/install/cache/ && rm bun.lock && bun update reefvm"
|
||||
"update-reef": "rm -rf ~/.bun/install/cache/ && rm bun.lock && bun update reefvm",
|
||||
"cli:install": "ln -s \"$(pwd)/bin/shrimp\" ~/.bun/bin/shrimp",
|
||||
"cli:remove": "rm ~/.bun/bin/shrimp"
|
||||
},
|
||||
"dependencies": {
|
||||
"@codemirror/view": "^6.38.3",
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ import { CompilerError } from '#compiler/compilerError.ts'
|
|||
import { parser } from '#parser/shrimp.ts'
|
||||
import * as terms from '#parser/shrimp.terms'
|
||||
import { setGlobals } from '#parser/tokenizer'
|
||||
import { tokenizeCurlyString } from '#parser/curlyTokenizer'
|
||||
import type { SyntaxNode, Tree } from '@lezer/common'
|
||||
import { assert, errorMessage } from '#utils/utils'
|
||||
import { toBytecode, type Bytecode, type ProgramItem, bytecodeToString } from 'reefvm'
|
||||
|
|
@ -51,6 +52,7 @@ function processEscapeSeq(escapeSeq: string): string {
|
|||
|
||||
export class Compiler {
|
||||
instructions: ProgramItem[] = []
|
||||
labelCount = 0
|
||||
fnLabelCount = 0
|
||||
ifLabelCount = 0
|
||||
tryLabelCount = 0
|
||||
|
|
@ -58,9 +60,9 @@ export class Compiler {
|
|||
bytecode: Bytecode
|
||||
pipeCounter = 0
|
||||
|
||||
constructor(public input: string, globals?: string[]) {
|
||||
constructor(public input: string, globals?: string[] | Record<string, any>) {
|
||||
try {
|
||||
if (globals) setGlobals(globals)
|
||||
if (globals) setGlobals(Array.isArray(globals) ? globals : Object.keys(globals))
|
||||
const cst = parser.parse(input)
|
||||
const errors = checkTreeForErrors(cst)
|
||||
|
||||
|
|
@ -105,13 +107,26 @@ export class Compiler {
|
|||
|
||||
switch (node.type.id) {
|
||||
case terms.Number:
|
||||
const number = Number(value)
|
||||
if (Number.isNaN(number))
|
||||
// Handle sign prefix for hex, binary, and octal literals
|
||||
// Number() doesn't parse '-0xFF', '+0xFF', '-0o77', etc. correctly
|
||||
let numberValue: number
|
||||
if (value.startsWith('-') && (value.includes('0x') || value.includes('0b') || value.includes('0o'))) {
|
||||
numberValue = -Number(value.slice(1))
|
||||
} else if (value.startsWith('+') && (value.includes('0x') || value.includes('0b') || value.includes('0o'))) {
|
||||
numberValue = Number(value.slice(1))
|
||||
} else {
|
||||
numberValue = Number(value)
|
||||
}
|
||||
|
||||
if (Number.isNaN(numberValue))
|
||||
throw new CompilerError(`Invalid number literal: ${value}`, node.from, node.to)
|
||||
|
||||
return [[`PUSH`, number]]
|
||||
return [[`PUSH`, numberValue]]
|
||||
|
||||
case terms.String: {
|
||||
if (node.firstChild?.type.id === terms.CurlyString)
|
||||
return this.#compileCurlyString(value, input)
|
||||
|
||||
const { parts, hasInterpolation } = getStringParts(node, input)
|
||||
|
||||
// Simple string without interpolation or escapes - extract text directly
|
||||
|
|
@ -196,16 +211,37 @@ export class Compiler {
|
|||
}
|
||||
|
||||
case terms.DotGet: {
|
||||
// DotGet is parsed into a nested tree because it's hard to parse it into a flat one.
|
||||
// However, we want a flat tree - so we're going to pretend like we are getting one from the parser.
|
||||
//
|
||||
// This: DotGet(config, DotGet(script, name))
|
||||
// Becomes: DotGet(config, script, name)
|
||||
const { objectName, property } = getDotGetParts(node, input)
|
||||
const instructions: ProgramItem[] = []
|
||||
|
||||
instructions.push(['TRY_LOAD', objectName])
|
||||
if (property.type.id === terms.ParenExpr) {
|
||||
instructions.push(...this.#compileNode(property, input))
|
||||
} else {
|
||||
const propertyValue = input.slice(property.from, property.to)
|
||||
instructions.push(['PUSH', propertyValue])
|
||||
|
||||
const flattenProperty = (prop: SyntaxNode): void => {
|
||||
if (prop.type.id === terms.DotGet) {
|
||||
const nestedParts = getDotGetParts(prop, input)
|
||||
|
||||
const nestedObjectValue = input.slice(nestedParts.object.from, nestedParts.object.to)
|
||||
instructions.push(['PUSH', nestedObjectValue])
|
||||
instructions.push(['DOT_GET'])
|
||||
|
||||
flattenProperty(nestedParts.property)
|
||||
} else {
|
||||
if (prop.type.id === terms.ParenExpr) {
|
||||
instructions.push(...this.#compileNode(prop, input))
|
||||
} else {
|
||||
const propertyValue = input.slice(prop.from, prop.to)
|
||||
instructions.push(['PUSH', propertyValue])
|
||||
}
|
||||
instructions.push(['DOT_GET'])
|
||||
}
|
||||
}
|
||||
instructions.push(['DOT_GET'])
|
||||
|
||||
flattenProperty(property)
|
||||
return instructions
|
||||
}
|
||||
|
||||
|
|
@ -232,6 +268,24 @@ export class Compiler {
|
|||
case '%':
|
||||
instructions.push(['MOD'])
|
||||
break
|
||||
case 'band':
|
||||
instructions.push(['BIT_AND'])
|
||||
break
|
||||
case 'bor':
|
||||
instructions.push(['BIT_OR'])
|
||||
break
|
||||
case 'bxor':
|
||||
instructions.push(['BIT_XOR'])
|
||||
break
|
||||
case '<<':
|
||||
instructions.push(['BIT_SHL'])
|
||||
break
|
||||
case '>>':
|
||||
instructions.push(['BIT_SHR'])
|
||||
break
|
||||
case '>>>':
|
||||
instructions.push(['BIT_USHR'])
|
||||
break
|
||||
default:
|
||||
throw new CompilerError(`Unsupported binary operator: ${opValue}`, op.from, op.to)
|
||||
}
|
||||
|
|
@ -274,13 +328,33 @@ export class Compiler {
|
|||
const { identifier, operator, right } = getCompoundAssignmentParts(node)
|
||||
const identifierName = input.slice(identifier.from, identifier.to)
|
||||
const instructions: ProgramItem[] = []
|
||||
const opValue = input.slice(operator.from, operator.to)
|
||||
|
||||
// will throw if undefined
|
||||
instructions.push(['LOAD', identifierName])
|
||||
// Special handling for ??= since it needs conditional evaluation
|
||||
if (opValue === '??=') {
|
||||
instructions.push(['LOAD', identifierName])
|
||||
|
||||
const skipLabel: Label = `.skip_${this.labelCount++}`
|
||||
const rightInstructions = this.#compileNode(right, input)
|
||||
|
||||
instructions.push(['DUP'])
|
||||
instructions.push(['PUSH', null])
|
||||
instructions.push(['NEQ'])
|
||||
instructions.push(['JUMP_IF_TRUE', skipLabel])
|
||||
instructions.push(['POP'])
|
||||
instructions.push(...rightInstructions)
|
||||
|
||||
instructions.push([`${skipLabel}:`])
|
||||
instructions.push(['DUP'])
|
||||
instructions.push(['STORE', identifierName])
|
||||
|
||||
return instructions
|
||||
}
|
||||
|
||||
// Standard compound assignments: evaluate both sides, then operate
|
||||
instructions.push(['LOAD', identifierName]) // will throw if undefined
|
||||
instructions.push(...this.#compileNode(right, input))
|
||||
|
||||
const opValue = input.slice(operator.from, operator.to)
|
||||
switch (opValue) {
|
||||
case '+=':
|
||||
instructions.push(['ADD'])
|
||||
|
|
@ -367,7 +441,29 @@ export class Compiler {
|
|||
|
||||
case terms.FunctionCallOrIdentifier: {
|
||||
if (node.firstChild?.type.id === terms.DotGet) {
|
||||
return this.#compileNode(node.firstChild, input)
|
||||
const instructions: ProgramItem[] = []
|
||||
const callLabel: Label = `.call_dotget_${++this.labelCount}`
|
||||
const afterLabel: Label = `.after_dotget_${++this.labelCount}`
|
||||
|
||||
instructions.push(...this.#compileNode(node.firstChild, input))
|
||||
instructions.push(['DUP'])
|
||||
instructions.push(['TYPE'])
|
||||
instructions.push(['PUSH', 'function'])
|
||||
instructions.push(['EQ'])
|
||||
instructions.push(['JUMP_IF_TRUE', callLabel])
|
||||
instructions.push(['DUP'])
|
||||
instructions.push(['TYPE'])
|
||||
instructions.push(['PUSH', 'native'])
|
||||
instructions.push(['EQ'])
|
||||
instructions.push(['JUMP_IF_TRUE', callLabel])
|
||||
instructions.push(['JUMP', afterLabel])
|
||||
instructions.push([`${callLabel}:`])
|
||||
instructions.push(['PUSH', 0])
|
||||
instructions.push(['PUSH', 0])
|
||||
instructions.push(['CALL'])
|
||||
instructions.push([`${afterLabel}:`])
|
||||
|
||||
return instructions
|
||||
}
|
||||
|
||||
return [['TRY_CALL', value]]
|
||||
|
|
@ -386,6 +482,7 @@ export class Compiler {
|
|||
PUSH 1 ; Named count
|
||||
CALL
|
||||
*/
|
||||
case terms.FunctionCallWithNewlines:
|
||||
case terms.FunctionCall: {
|
||||
const { identifierNode, namedArgs, positionalArgs } = getFunctionCallParts(node, input)
|
||||
const instructions: ProgramItem[] = []
|
||||
|
|
@ -509,19 +606,24 @@ export class Compiler {
|
|||
instructions.push(...this.#compileNode(conditionNode, input))
|
||||
this.ifLabelCount++
|
||||
const endLabel: Label = `.end_${this.ifLabelCount}`
|
||||
const elseLabel: Label = `.else_${this.ifLabelCount}`
|
||||
|
||||
const thenBlockInstructions = this.#compileNode(thenBlock, input)
|
||||
instructions.push(['JUMP_IF_FALSE', thenBlockInstructions.length + 1])
|
||||
instructions.push(['JUMP_IF_FALSE', elseLabel])
|
||||
instructions.push(...thenBlockInstructions)
|
||||
instructions.push(['JUMP', endLabel])
|
||||
|
||||
instructions.push([`${elseLabel}:`])
|
||||
|
||||
// Else if
|
||||
elseIfBlocks.forEach(({ conditional, thenBlock }) => {
|
||||
elseIfBlocks.forEach(({ conditional, thenBlock }, index) => {
|
||||
instructions.push(...this.#compileNode(conditional, input))
|
||||
const nextLabel: Label = `.elsif_${this.ifLabelCount}_${index}`
|
||||
const elseIfInstructions = this.#compileNode(thenBlock, input)
|
||||
instructions.push(['JUMP_IF_FALSE', elseIfInstructions.length + 1])
|
||||
instructions.push(['JUMP_IF_FALSE', nextLabel])
|
||||
instructions.push(...elseIfInstructions)
|
||||
instructions.push(['JUMP', endLabel])
|
||||
instructions.push([`${nextLabel}:`])
|
||||
})
|
||||
|
||||
// Else
|
||||
|
|
@ -570,22 +672,41 @@ export class Compiler {
|
|||
instructions.push(...leftInstructions, ...rightInstructions, ['GTE'])
|
||||
break
|
||||
|
||||
case 'and':
|
||||
case 'and': {
|
||||
const skipLabel: Label = `.skip_${this.labelCount++}`
|
||||
instructions.push(...leftInstructions)
|
||||
instructions.push(['DUP'])
|
||||
instructions.push(['JUMP_IF_FALSE', rightInstructions.length + 1])
|
||||
instructions.push(['JUMP_IF_FALSE', skipLabel])
|
||||
instructions.push(['POP'])
|
||||
instructions.push(...rightInstructions)
|
||||
instructions.push([`${skipLabel}:`])
|
||||
break
|
||||
}
|
||||
|
||||
case 'or':
|
||||
case 'or': {
|
||||
const skipLabel: Label = `.skip_${this.labelCount++}`
|
||||
instructions.push(...leftInstructions)
|
||||
instructions.push(['DUP'])
|
||||
instructions.push(['JUMP_IF_TRUE', rightInstructions.length + 1])
|
||||
instructions.push(['JUMP_IF_TRUE', skipLabel])
|
||||
instructions.push(['POP'])
|
||||
instructions.push(...rightInstructions)
|
||||
|
||||
instructions.push([`${skipLabel}:`])
|
||||
break
|
||||
}
|
||||
|
||||
case '??': {
|
||||
// Nullish coalescing: return left if not null, else right
|
||||
const skipLabel: Label = `.skip_${this.labelCount++}`
|
||||
instructions.push(...leftInstructions)
|
||||
instructions.push(['DUP'])
|
||||
instructions.push(['PUSH', null])
|
||||
instructions.push(['NEQ'])
|
||||
instructions.push(['JUMP_IF_TRUE', skipLabel])
|
||||
instructions.push(['POP'])
|
||||
instructions.push(...rightInstructions)
|
||||
instructions.push([`${skipLabel}:`])
|
||||
break
|
||||
}
|
||||
|
||||
default:
|
||||
throw new CompilerError(`Unsupported conditional operator: ${opValue}`, op.from, op.to)
|
||||
|
|
@ -712,13 +833,38 @@ export class Compiler {
|
|||
return instructions
|
||||
}
|
||||
|
||||
case terms.Import: {
|
||||
const instructions: ProgramItem[] = []
|
||||
const [_import, ...nodes] = getAllChildren(node)
|
||||
const args = nodes.filter(node => node.type.id === terms.Identifier)
|
||||
const namedArgs = nodes.filter(node => node.type.id === terms.NamedArg)
|
||||
|
||||
instructions.push(['LOAD', 'import'])
|
||||
|
||||
args.forEach((dict) =>
|
||||
instructions.push(['PUSH', input.slice(dict.from, dict.to)])
|
||||
)
|
||||
|
||||
namedArgs.forEach((arg) => {
|
||||
const { name, valueNode } = getNamedArgParts(arg, input)
|
||||
instructions.push(['PUSH', name])
|
||||
instructions.push(...this.#compileNode(valueNode, input))
|
||||
})
|
||||
|
||||
instructions.push(['PUSH', args.length])
|
||||
instructions.push(['PUSH', namedArgs.length])
|
||||
instructions.push(['CALL'])
|
||||
|
||||
return instructions
|
||||
}
|
||||
|
||||
case terms.Comment: {
|
||||
return [] // ignore comments
|
||||
}
|
||||
|
||||
default:
|
||||
throw new CompilerError(
|
||||
`Compiler doesn't know how to handle a "${node.type.name}" node.`,
|
||||
`Compiler doesn't know how to handle a "${node.type.name}" (${node.type.id}) node.`,
|
||||
node.from,
|
||||
node.to
|
||||
)
|
||||
|
|
@ -772,4 +918,26 @@ export class Compiler {
|
|||
|
||||
return instructions
|
||||
}
|
||||
|
||||
#compileCurlyString(value: string, input: string): ProgramItem[] {
|
||||
const instructions: ProgramItem[] = []
|
||||
const nodes = tokenizeCurlyString(value)
|
||||
|
||||
nodes.forEach((node) => {
|
||||
if (typeof node === 'string') {
|
||||
instructions.push(['PUSH', node])
|
||||
} else {
|
||||
const [input, topNode] = node
|
||||
let child = topNode.firstChild
|
||||
while (child) {
|
||||
instructions.push(...this.#compileNode(child, input))
|
||||
child = child.nextSibling
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
instructions.push(['STR_CONCAT', nodes.length])
|
||||
|
||||
return instructions
|
||||
}
|
||||
}
|
||||
|
|
|
|||
178
src/compiler/tests/bitwise.test.ts
Normal file
178
src/compiler/tests/bitwise.test.ts
Normal file
|
|
@ -0,0 +1,178 @@
|
|||
import { expect, describe, test } from 'bun:test'
|
||||
|
||||
describe('bitwise operators', () => {
|
||||
describe('band (bitwise AND)', () => {
|
||||
test('basic AND operation', () => {
|
||||
expect('5 band 3').toEvaluateTo(1)
|
||||
// 5 = 0101, 3 = 0011, result = 0001 = 1
|
||||
})
|
||||
|
||||
test('AND with zero', () => {
|
||||
expect('5 band 0').toEvaluateTo(0)
|
||||
})
|
||||
|
||||
test('AND with all bits set', () => {
|
||||
expect('15 band 7').toEvaluateTo(7)
|
||||
// 15 = 1111, 7 = 0111, result = 0111 = 7
|
||||
})
|
||||
|
||||
test('AND in assignment', () => {
|
||||
expect('x = 12 band 10').toEvaluateTo(8)
|
||||
// 12 = 1100, 10 = 1010, result = 1000 = 8
|
||||
})
|
||||
})
|
||||
|
||||
describe('bor (bitwise OR)', () => {
|
||||
test('basic OR operation', () => {
|
||||
expect('5 bor 3').toEvaluateTo(7)
|
||||
// 5 = 0101, 3 = 0011, result = 0111 = 7
|
||||
})
|
||||
|
||||
test('OR with zero', () => {
|
||||
expect('5 bor 0').toEvaluateTo(5)
|
||||
})
|
||||
|
||||
test('OR with all bits set', () => {
|
||||
expect('8 bor 4').toEvaluateTo(12)
|
||||
// 8 = 1000, 4 = 0100, result = 1100 = 12
|
||||
})
|
||||
})
|
||||
|
||||
describe('bxor (bitwise XOR)', () => {
|
||||
test('basic XOR operation', () => {
|
||||
expect('5 bxor 3').toEvaluateTo(6)
|
||||
// 5 = 0101, 3 = 0011, result = 0110 = 6
|
||||
})
|
||||
|
||||
test('XOR with itself returns zero', () => {
|
||||
expect('5 bxor 5').toEvaluateTo(0)
|
||||
})
|
||||
|
||||
test('XOR with zero returns same value', () => {
|
||||
expect('7 bxor 0').toEvaluateTo(7)
|
||||
})
|
||||
|
||||
test('XOR in assignment', () => {
|
||||
expect('result = 8 bxor 12').toEvaluateTo(4)
|
||||
// 8 = 1000, 12 = 1100, result = 0100 = 4
|
||||
})
|
||||
})
|
||||
|
||||
describe('bnot (bitwise NOT)', () => {
|
||||
test('NOT of positive number', () => {
|
||||
expect('bnot 5').toEvaluateTo(-6)
|
||||
// ~5 = -6 (two\'s complement)
|
||||
})
|
||||
|
||||
test('NOT of zero', () => {
|
||||
expect('bnot 0').toEvaluateTo(-1)
|
||||
})
|
||||
|
||||
test('NOT of negative number', () => {
|
||||
expect('bnot -1').toEvaluateTo(0)
|
||||
})
|
||||
|
||||
test('double NOT returns original', () => {
|
||||
expect('bnot (bnot 5)').toEvaluateTo(5)
|
||||
})
|
||||
})
|
||||
|
||||
describe('<< (left shift)', () => {
|
||||
test('basic left shift', () => {
|
||||
expect('5 << 2').toEvaluateTo(20)
|
||||
// 5 << 2 = 20
|
||||
})
|
||||
|
||||
test('shift by zero', () => {
|
||||
expect('5 << 0').toEvaluateTo(5)
|
||||
})
|
||||
|
||||
test('shift by one', () => {
|
||||
expect('3 << 1').toEvaluateTo(6)
|
||||
})
|
||||
|
||||
test('large shift', () => {
|
||||
expect('1 << 10').toEvaluateTo(1024)
|
||||
})
|
||||
})
|
||||
|
||||
describe('>> (signed right shift)', () => {
|
||||
test('basic right shift', () => {
|
||||
expect('20 >> 2').toEvaluateTo(5)
|
||||
// 20 >> 2 = 5
|
||||
})
|
||||
|
||||
test('shift by zero', () => {
|
||||
expect('20 >> 0').toEvaluateTo(20)
|
||||
})
|
||||
|
||||
test('preserves sign for negative numbers', () => {
|
||||
expect('-20 >> 2').toEvaluateTo(-5)
|
||||
// Sign is preserved
|
||||
})
|
||||
|
||||
test('negative number right shift', () => {
|
||||
expect('-8 >> 1').toEvaluateTo(-4)
|
||||
})
|
||||
})
|
||||
|
||||
describe('>>> (unsigned right shift)', () => {
|
||||
test('basic unsigned right shift', () => {
|
||||
expect('20 >>> 2').toEvaluateTo(5)
|
||||
})
|
||||
|
||||
test('unsigned shift of -1', () => {
|
||||
expect('-1 >>> 1').toEvaluateTo(2147483647)
|
||||
// -1 >>> 1 = 2147483647 (unsigned, no sign extension)
|
||||
})
|
||||
|
||||
test('unsigned shift of negative number', () => {
|
||||
expect('-8 >>> 1').toEvaluateTo(2147483644)
|
||||
})
|
||||
})
|
||||
|
||||
describe('compound expressions', () => {
|
||||
test('multiple bitwise operations', () => {
|
||||
expect('(5 band 3) bor (8 bxor 12)').toEvaluateTo(5)
|
||||
// (5 & 3) | (8 ^ 12) = 1 | 4 = 5
|
||||
})
|
||||
|
||||
test('bitwise with variables', () => {
|
||||
expect(`
|
||||
a = 5
|
||||
b = 3
|
||||
a bor b
|
||||
`).toEvaluateTo(7)
|
||||
})
|
||||
|
||||
test('shift operations with variables', () => {
|
||||
expect(`
|
||||
x = 16
|
||||
y = 2
|
||||
x >> y
|
||||
`).toEvaluateTo(4)
|
||||
})
|
||||
|
||||
test('mixing shifts and bitwise', () => {
|
||||
expect('(8 << 1) band 15').toEvaluateTo(0)
|
||||
// (8 << 1) & 15 = 16 & 15 = 0
|
||||
})
|
||||
|
||||
test('mixing shifts and bitwise 2', () => {
|
||||
expect('(7 << 1) band 15').toEvaluateTo(14)
|
||||
// (7 << 1) & 15 = 14 & 15 = 14
|
||||
})
|
||||
})
|
||||
|
||||
describe('precedence', () => {
|
||||
test('bitwise has correct precedence with arithmetic', () => {
|
||||
expect('1 + 2 band 3').toEvaluateTo(3)
|
||||
// (1 + 2) & 3 = 3 & 3 = 3
|
||||
})
|
||||
|
||||
test('shift has correct precedence', () => {
|
||||
expect('4 + 8 << 1').toEvaluateTo(24)
|
||||
// (4 + 8) << 1 = 12 << 1 = 24
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
@ -110,7 +110,10 @@ describe('compiler', () => {
|
|||
})
|
||||
|
||||
test('function call with no args', () => {
|
||||
expect(`bloop = do: 'bloop' end; bloop`).toEvaluateTo('bloop')
|
||||
expect(`bloop = do: 'bleep' end; bloop`).toEvaluateTo('bleep')
|
||||
expect(`bloop = [ go=do: 'bleep' end ]; bloop.go`).toEvaluateTo('bleep')
|
||||
expect(`bloop = [ go=do: 'bleep' end ]; abc = do x: x end; abc (bloop.go)`).toEvaluateTo('bleep')
|
||||
expect(`num = ((math.random) * 10 + 1) | math.floor; num >= 1 and num <= 10 `).toEvaluateTo(true)
|
||||
})
|
||||
|
||||
test('function call with if statement and multiple expressions', () => {
|
||||
|
|
@ -185,6 +188,16 @@ describe('compiler', () => {
|
|||
test('single line if', () => {
|
||||
expect(`if 3 < 9: shire end`).toEvaluateTo('shire')
|
||||
})
|
||||
|
||||
test('if statement with function definition (bytecode labels)', () => {
|
||||
expect(`
|
||||
if false:
|
||||
abc = do x: x end
|
||||
else:
|
||||
nope
|
||||
end
|
||||
`).toEvaluateTo('nope')
|
||||
})
|
||||
})
|
||||
|
||||
describe('errors', () => {
|
||||
|
|
@ -281,6 +294,43 @@ describe('dot get', () => {
|
|||
test('use parens expr with dot-get', () => {
|
||||
expect(`a = 1; arr = array 'a' 'b' 'c'; arr.(1 + a)`).toEvaluateTo('c', { array })
|
||||
})
|
||||
|
||||
test('chained dot get: two levels', () => {
|
||||
expect(`obj = [inner=[value=42]]; obj.inner.value`).toEvaluateTo(42)
|
||||
})
|
||||
|
||||
test('chained dot get: three levels', () => {
|
||||
expect(`obj = [a=[b=[c=123]]]; obj.a.b.c`).toEvaluateTo(123)
|
||||
})
|
||||
|
||||
test('chained dot get: four levels', () => {
|
||||
expect(`obj = [w=[x=[y=[z='deep']]]]; obj.w.x.y.z`).toEvaluateTo('deep')
|
||||
})
|
||||
|
||||
test('chained dot get with numeric index', () => {
|
||||
expect(`obj = [items=[1 2 3]]; obj.items.0`).toEvaluateTo(1)
|
||||
})
|
||||
|
||||
test('chained dot get in expression', () => {
|
||||
expect(`config = [server=[port=3000]]; config.server.port + 1`).toEvaluateTo(3001)
|
||||
})
|
||||
|
||||
test('chained dot get as function argument', () => {
|
||||
const double = (x: number) => x * 2
|
||||
expect(`obj = [val=[num=21]]; double obj.val.num`).toEvaluateTo(42, { double })
|
||||
})
|
||||
|
||||
test('chained dot get in binary operation', () => {
|
||||
expect(`a = [x=[y=10]]; b = [x=[y=20]]; a.x.y + b.x.y`).toEvaluateTo(30)
|
||||
})
|
||||
|
||||
test('chained dot get with parens at end', () => {
|
||||
expect(`idx = 1; obj = [items=[10 20 30]]; obj.items.(idx)`).toEvaluateTo(20)
|
||||
})
|
||||
|
||||
test('mixed chained and simple dot get', () => {
|
||||
expect(`obj = [a=1 b=[c=2]]; obj.a + obj.b.c`).toEvaluateTo(3)
|
||||
})
|
||||
})
|
||||
|
||||
describe('default params', () => {
|
||||
|
|
@ -298,6 +348,23 @@ describe('default params', () => {
|
|||
expect('multiply = do x y=5: x * y end; multiply 5 2').toEvaluateTo(10)
|
||||
})
|
||||
|
||||
test('null triggers default value', () => {
|
||||
expect('test = do n=true: n end; test').toEvaluateTo(true)
|
||||
expect('test = do n=true: n end; test false').toEvaluateTo(false)
|
||||
expect('test = do n=true: n end; test null').toEvaluateTo(true)
|
||||
})
|
||||
|
||||
test('null triggers default for named parameters', () => {
|
||||
expect("greet = do name='World': name end; greet name=null").toEvaluateTo('World')
|
||||
expect("greet = do name='World': name end; greet name='Bob'").toEvaluateTo('Bob')
|
||||
})
|
||||
|
||||
test('null triggers default with multiple parameters', () => {
|
||||
expect('calc = do x=10 y=20: x + y end; calc null 5').toEvaluateTo(15)
|
||||
expect('calc = do x=10 y=20: x + y end; calc 3 null').toEvaluateTo(23)
|
||||
expect('calc = do x=10 y=20: x + y end; calc null null').toEvaluateTo(30)
|
||||
})
|
||||
|
||||
test.skip('array default', () => {
|
||||
expect('abc = do alpha=[a b c]: alpha end; abc').toEvaluateTo(['a', 'b', 'c'])
|
||||
expect('abc = do alpha=[a b c]: alpha end; abc [x y z]').toEvaluateTo(['x', 'y', 'z'])
|
||||
|
|
@ -313,3 +380,146 @@ describe('default params', () => {
|
|||
).toEvaluateTo({ name: 'Jon', age: 21 })
|
||||
})
|
||||
})
|
||||
|
||||
describe('Nullish coalescing operator (??)', () => {
|
||||
test('returns left side when not null', () => {
|
||||
expect('5 ?? 10').toEvaluateTo(5)
|
||||
})
|
||||
|
||||
test('returns right side when left is null', () => {
|
||||
expect('null ?? 10').toEvaluateTo(10)
|
||||
})
|
||||
|
||||
test('returns left side when left is false', () => {
|
||||
expect('false ?? 10').toEvaluateTo(false)
|
||||
})
|
||||
|
||||
test('returns left side when left is 0', () => {
|
||||
expect('0 ?? 10').toEvaluateTo(0)
|
||||
})
|
||||
|
||||
test('returns left side when left is empty string', () => {
|
||||
expect(`'' ?? 'default'`).toEvaluateTo('')
|
||||
})
|
||||
|
||||
test('chains left to right', () => {
|
||||
expect('null ?? null ?? 42').toEvaluateTo(42)
|
||||
expect('null ?? 10 ?? 20').toEvaluateTo(10)
|
||||
})
|
||||
|
||||
test('short-circuits evaluation', () => {
|
||||
const throwError = () => { throw new Error('Should not evaluate') }
|
||||
expect('5 ?? throw-error').toEvaluateTo(5, { 'throw-error': throwError })
|
||||
})
|
||||
|
||||
test('works with variables', () => {
|
||||
expect('x = null; x ?? 5').toEvaluateTo(5)
|
||||
expect('y = 3; y ?? 5').toEvaluateTo(3)
|
||||
})
|
||||
|
||||
test('works with function calls', () => {
|
||||
const getValue = () => null
|
||||
const getDefault = () => 42
|
||||
// Note: identifiers without parentheses refer to the function, not call it
|
||||
// Use explicit call syntax to invoke the function
|
||||
expect('(get-value) ?? (get-default)').toEvaluateTo(42, {
|
||||
'get-value': getValue,
|
||||
'get-default': getDefault
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Nullish coalescing assignment (??=)', () => {
|
||||
test('assigns when variable is null', () => {
|
||||
expect('x = null; x ??= 5; x').toEvaluateTo(5)
|
||||
})
|
||||
|
||||
test('does not assign when variable is not null', () => {
|
||||
expect('x = 3; x ??= 10; x').toEvaluateTo(3)
|
||||
})
|
||||
|
||||
test('does not assign when variable is false', () => {
|
||||
expect('x = false; x ??= true; x').toEvaluateTo(false)
|
||||
})
|
||||
|
||||
test('does not assign when variable is 0', () => {
|
||||
expect('x = 0; x ??= 100; x').toEvaluateTo(0)
|
||||
})
|
||||
|
||||
test('does not assign when variable is empty string', () => {
|
||||
expect(`x = ''; x ??= 'default'; x`).toEvaluateTo('')
|
||||
})
|
||||
|
||||
test('returns the final value', () => {
|
||||
expect('x = null; x ??= 5').toEvaluateTo(5)
|
||||
expect('y = 3; y ??= 10').toEvaluateTo(3)
|
||||
})
|
||||
|
||||
test('short-circuits evaluation when not null', () => {
|
||||
const throwError = () => { throw new Error('Should not evaluate') }
|
||||
expect('x = 5; x ??= throw-error; x').toEvaluateTo(5, { 'throw-error': throwError })
|
||||
})
|
||||
|
||||
test('works with expressions', () => {
|
||||
expect('x = null; x ??= 2 + 3; x').toEvaluateTo(5)
|
||||
})
|
||||
|
||||
test('works with function calls', () => {
|
||||
const getDefault = () => 42
|
||||
expect('x = null; x ??= (get-default); x').toEvaluateTo(42, { 'get-default': getDefault })
|
||||
})
|
||||
|
||||
test('throws when variable is undefined', () => {
|
||||
expect(() => expect('undefined-var ??= 5').toEvaluateTo(null)).toThrow()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Compound assignment operators', () => {
|
||||
test('+=', () => {
|
||||
expect('x = 5; x += 3; x').toEvaluateTo(8)
|
||||
})
|
||||
|
||||
test('-=', () => {
|
||||
expect('x = 10; x -= 4; x').toEvaluateTo(6)
|
||||
})
|
||||
|
||||
test('*=', () => {
|
||||
expect('x = 3; x *= 4; x').toEvaluateTo(12)
|
||||
})
|
||||
|
||||
test('/=', () => {
|
||||
expect('x = 20; x /= 5; x').toEvaluateTo(4)
|
||||
})
|
||||
|
||||
test('%=', () => {
|
||||
expect('x = 10; x %= 3; x').toEvaluateTo(1)
|
||||
})
|
||||
})
|
||||
|
||||
describe('import', () => {
|
||||
test('imports single dict', () => {
|
||||
expect(`import str; starts-with? abc a`).toEvaluateTo(true)
|
||||
})
|
||||
|
||||
test('imports multiple dicts', () => {
|
||||
expect(`import str math list; map [1 2 3] do x: x * 2 end`).toEvaluateTo([2, 4, 6])
|
||||
})
|
||||
|
||||
test('imports non-prelude dicts', () => {
|
||||
expect(`
|
||||
abc = [a=true b=yes c=si]
|
||||
import abc
|
||||
abc.b
|
||||
`).toEvaluateTo('yes')
|
||||
})
|
||||
|
||||
test('can specify imports', () => {
|
||||
expect(`import str only=ends-with?; ref ends-with? | function?`).toEvaluateTo(true)
|
||||
expect(`import str only=ends-with?; ref starts-with? | function?`).toEvaluateTo(false)
|
||||
expect(`
|
||||
abc = [a=true b=yes c=si]
|
||||
import abc only=[a c]
|
||||
[a c]
|
||||
`).toEvaluateTo([true, 'si'])
|
||||
})
|
||||
})
|
||||
|
|
@ -1,6 +1,55 @@
|
|||
import { describe } from 'bun:test'
|
||||
import { expect, test } from 'bun:test'
|
||||
|
||||
describe('number literals', () => {
|
||||
test('binary literals', () => {
|
||||
expect('0b110').toEvaluateTo(6)
|
||||
expect('0b1010').toEvaluateTo(10)
|
||||
expect('0b11111111').toEvaluateTo(255)
|
||||
expect('0b0').toEvaluateTo(0)
|
||||
expect('0b1').toEvaluateTo(1)
|
||||
})
|
||||
|
||||
test('hex literals', () => {
|
||||
expect('0xdeadbeef').toEvaluateTo(0xdeadbeef)
|
||||
expect('0xdeadbeef').toEvaluateTo(3735928559)
|
||||
expect('0xFF').toEvaluateTo(255)
|
||||
expect('0xff').toEvaluateTo(255)
|
||||
expect('0x10').toEvaluateTo(16)
|
||||
expect('0x0').toEvaluateTo(0)
|
||||
expect('0xABCDEF').toEvaluateTo(0xabcdef)
|
||||
})
|
||||
|
||||
test('octal literals', () => {
|
||||
expect('0o644').toEvaluateTo(420)
|
||||
expect('0o755').toEvaluateTo(493)
|
||||
expect('0o777').toEvaluateTo(511)
|
||||
expect('0o10').toEvaluateTo(8)
|
||||
expect('0o0').toEvaluateTo(0)
|
||||
expect('0o123').toEvaluateTo(83)
|
||||
})
|
||||
|
||||
test('decimal literals still work', () => {
|
||||
expect('42').toEvaluateTo(42)
|
||||
expect('3.14').toEvaluateTo(3.14)
|
||||
expect('0').toEvaluateTo(0)
|
||||
expect('999999').toEvaluateTo(999999)
|
||||
})
|
||||
|
||||
test('negative hex, binary, and octal', () => {
|
||||
expect('-0xFF').toEvaluateTo(-255)
|
||||
expect('-0b1010').toEvaluateTo(-10)
|
||||
expect('-0o755').toEvaluateTo(-493)
|
||||
})
|
||||
|
||||
test('positive prefix', () => {
|
||||
expect('+0xFF').toEvaluateTo(255)
|
||||
expect('+0b110').toEvaluateTo(6)
|
||||
expect('+0o644').toEvaluateTo(420)
|
||||
expect('+42').toEvaluateTo(42)
|
||||
})
|
||||
})
|
||||
|
||||
describe('array literals', () => {
|
||||
test('work with numbers', () => {
|
||||
expect('[1 2 3]').toEvaluateTo([1, 2, 3])
|
||||
|
|
@ -155,3 +204,69 @@ describe('dict literals', () => {
|
|||
c=3]`).toEvaluateTo({ a: 1, b: 2, c: 3 })
|
||||
})
|
||||
})
|
||||
|
||||
describe('curly strings', () => {
|
||||
test('work on one line', () => {
|
||||
expect('{ one two three }').toEvaluateTo(" one two three ")
|
||||
})
|
||||
|
||||
test('work on multiple lines', () => {
|
||||
expect(`{
|
||||
one
|
||||
two
|
||||
three
|
||||
}`).toEvaluateTo("\n one\n two\n three\n ")
|
||||
})
|
||||
|
||||
test('can contain other curlies', () => {
|
||||
expect(`{
|
||||
{ one }
|
||||
two
|
||||
{ three }
|
||||
}`).toEvaluateTo("\n { one }\n two\n { three }\n ")
|
||||
})
|
||||
|
||||
test('interpolates variables', () => {
|
||||
expect(`name = Bob; { Hello $name! }`).toEvaluateTo(` Hello Bob! `)
|
||||
})
|
||||
|
||||
test("doesn't interpolate escaped variables ", () => {
|
||||
expect(`name = Bob; { Hello \\$name }`).toEvaluateTo(` Hello $name `)
|
||||
expect(`a = 1; b = 2; { sum is \\$(a + b)! }`).toEvaluateTo(` sum is $(a + b)! `)
|
||||
})
|
||||
|
||||
test('interpolates expressions', () => {
|
||||
expect(`a = 1; b = 2; { sum is $(a + b)! }`).toEvaluateTo(` sum is 3! `)
|
||||
expect(`a = 1; b = 2; { sum is { $(a + b) }! }`).toEvaluateTo(` sum is { 3 }! `)
|
||||
expect(`a = 1; b = 2; { sum is $(a + (b * b))! }`).toEvaluateTo(` sum is 5! `)
|
||||
expect(`{ This is $({twisted}). }`).toEvaluateTo(` This is twisted. `)
|
||||
expect(`{ This is $({{twisted}}). }`).toEvaluateTo(` This is {twisted}. `)
|
||||
})
|
||||
|
||||
test('interpolation edge cases', () => {
|
||||
expect(`{[a=1 b=2 c={wild}]}`).toEvaluateTo(`[a=1 b=2 c={wild}]`)
|
||||
expect(`a = 1;b = 2;c = 3;{$a $b $c}`).toEvaluateTo(`1 2 3`)
|
||||
expect(`a = 1;b = 2;c = 3;{$a$b$c}`).toEvaluateTo(`123`)
|
||||
})
|
||||
})
|
||||
|
||||
describe('double quoted strings', () => {
|
||||
test("work", () => {
|
||||
expect(`"hello world"`).toEvaluateTo('hello world')
|
||||
})
|
||||
|
||||
test("don't interpolate", () => {
|
||||
expect(`"hello $world"`).toEvaluateTo('hello $world')
|
||||
expect(`"hello $(1 + 2)"`).toEvaluateTo('hello $(1 + 2)')
|
||||
})
|
||||
|
||||
test("equal regular strings", () => {
|
||||
expect(`"hello world" == 'hello world'`).toEvaluateTo(true)
|
||||
})
|
||||
|
||||
test("can contain newlines", () => {
|
||||
expect(`
|
||||
"hello
|
||||
world"`).toEvaluateTo('hello\n world')
|
||||
})
|
||||
})
|
||||
|
|
@ -92,4 +92,29 @@ describe('pipe expressions', () => {
|
|||
get-msg | length
|
||||
`).toEvaluateTo(5)
|
||||
})
|
||||
|
||||
test('string literals can be piped', () => {
|
||||
expect(`'hey there' | str.to-upper`).toEvaluateTo('HEY THERE')
|
||||
})
|
||||
|
||||
test('number literals can be piped', () => {
|
||||
expect(`42 | str.trim`).toEvaluateTo('42')
|
||||
expect(`4.22 | str.trim`).toEvaluateTo('4.22')
|
||||
})
|
||||
|
||||
test('null literals can be piped', () => {
|
||||
expect(`null | type`).toEvaluateTo('null')
|
||||
})
|
||||
|
||||
test('boolean literals can be piped', () => {
|
||||
expect(`true | str.to-upper`).toEvaluateTo('TRUE')
|
||||
})
|
||||
|
||||
test('array literals can be piped', () => {
|
||||
expect(`[1 2 3] | str.join '-'`).toEvaluateTo('1-2-3')
|
||||
})
|
||||
|
||||
test('dict literals can be piped', () => {
|
||||
expect(`[a=1 b=2 c=3] | dict.values | list.sort | str.join '-'`).toEvaluateTo('1-2-3')
|
||||
})
|
||||
})
|
||||
|
|
|
|||
|
|
@ -251,7 +251,9 @@ export const getStringParts = (node: SyntaxNode, input: string) => {
|
|||
return (
|
||||
child.type.id === terms.StringFragment ||
|
||||
child.type.id === terms.Interpolation ||
|
||||
child.type.id === terms.EscapeSeq
|
||||
child.type.id === terms.EscapeSeq ||
|
||||
child.type.id === terms.CurlyString
|
||||
|
||||
)
|
||||
})
|
||||
|
||||
|
|
@ -260,7 +262,8 @@ export const getStringParts = (node: SyntaxNode, input: string) => {
|
|||
if (
|
||||
part.type.id !== terms.StringFragment &&
|
||||
part.type.id !== terms.Interpolation &&
|
||||
part.type.id !== terms.EscapeSeq
|
||||
part.type.id !== terms.EscapeSeq &&
|
||||
part.type.id !== terms.CurlyString
|
||||
) {
|
||||
throw new CompilerError(
|
||||
`String child must be StringFragment, Interpolation, or EscapeSeq, got ${part.type.name}`,
|
||||
|
|
@ -290,7 +293,7 @@ export const getDotGetParts = (node: SyntaxNode, input: string) => {
|
|||
)
|
||||
}
|
||||
|
||||
if (object.type.id !== terms.IdentifierBeforeDot) {
|
||||
if (object.type.id !== terms.IdentifierBeforeDot && object.type.id !== terms.Dollar) {
|
||||
throw new CompilerError(
|
||||
`DotGet object must be an IdentifierBeforeDot, got ${object.type.name}`,
|
||||
object.from,
|
||||
|
|
@ -298,9 +301,9 @@ export const getDotGetParts = (node: SyntaxNode, input: string) => {
|
|||
)
|
||||
}
|
||||
|
||||
if (![terms.Identifier, terms.Number, terms.ParenExpr].includes(property.type.id)) {
|
||||
if (![terms.Identifier, terms.Number, terms.ParenExpr, terms.DotGet].includes(property.type.id)) {
|
||||
throw new CompilerError(
|
||||
`DotGet property must be an Identifier or Number, got ${property.type.name}`,
|
||||
`DotGet property must be an Identifier, Number, ParenExpr, or DotGet, got ${property.type.name}`,
|
||||
property.from,
|
||||
property.to
|
||||
)
|
||||
|
|
@ -308,7 +311,7 @@ export const getDotGetParts = (node: SyntaxNode, input: string) => {
|
|||
|
||||
const objectName = input.slice(object.from, object.to)
|
||||
|
||||
return { objectName, property }
|
||||
return { object, objectName, property }
|
||||
}
|
||||
|
||||
export const getTryExprParts = (node: SyntaxNode, input: string) => {
|
||||
|
|
|
|||
77
src/index.ts
77
src/index.ts
|
|
@ -1,11 +1,17 @@
|
|||
import { readFileSync } from 'fs'
|
||||
import { VM, fromValue, type Bytecode } from 'reefvm'
|
||||
import { VM, fromValue, toValue, isValue, type Bytecode } from 'reefvm'
|
||||
import { type Tree } from '@lezer/common'
|
||||
import { Compiler } from '#compiler/compiler'
|
||||
import { globals as shrimpGlobals, colors } from '#prelude'
|
||||
import { parser } from '#parser/shrimp'
|
||||
import { globals as parserGlobals, setGlobals as setParserGlobals } from '#parser/tokenizer'
|
||||
import { globals as prelude } from '#prelude'
|
||||
|
||||
export { Compiler } from '#compiler/compiler'
|
||||
export { parser } from '#parser/shrimp'
|
||||
export { globals } from '#prelude'
|
||||
export { globals as prelude } from '#prelude'
|
||||
export type { Tree } from '@lezer/common'
|
||||
export { type Value, type Bytecode } from 'reefvm'
|
||||
export { toValue, fromValue, isValue, Scope, VM, bytecodeToString } from 'reefvm'
|
||||
|
||||
export class Shrimp {
|
||||
vm: VM
|
||||
|
|
@ -13,15 +19,41 @@ export class Shrimp {
|
|||
|
||||
constructor(globals?: Record<string, any>) {
|
||||
const emptyBytecode = { instructions: [], constants: [], labels: new Map() }
|
||||
this.vm = new VM(emptyBytecode, Object.assign({}, shrimpGlobals, globals ?? {}))
|
||||
this.vm = new VM(emptyBytecode, Object.assign({}, prelude, globals ?? {}))
|
||||
this.globals = globals
|
||||
}
|
||||
|
||||
get(name: string): any {
|
||||
const value = this.vm.scope.get(name)
|
||||
return value ? fromValue(value, this.vm) : null
|
||||
}
|
||||
|
||||
set(name: string, value: any) {
|
||||
this.vm.scope.set(name, toValue(value, this.vm))
|
||||
}
|
||||
|
||||
has(name: string): boolean {
|
||||
return this.vm.scope.has(name)
|
||||
}
|
||||
|
||||
async call(name: string, ...args: any[]): Promise<any> {
|
||||
const result = await this.vm.call(name, ...args)
|
||||
return isValue(result) ? fromValue(result, this.vm) : result
|
||||
}
|
||||
|
||||
parse(code: string): Tree {
|
||||
return parseCode(code, this.globals)
|
||||
}
|
||||
|
||||
compile(code: string): Bytecode {
|
||||
return compileCode(code, this.globals)
|
||||
}
|
||||
|
||||
async run(code: string | Bytecode, locals?: Record<string, any>): Promise<any> {
|
||||
let bytecode
|
||||
|
||||
if (typeof code === 'string') {
|
||||
const compiler = new Compiler(code, Object.keys(Object.assign({}, shrimpGlobals, this.globals ?? {}, locals ?? {})))
|
||||
const compiler = new Compiler(code, Object.keys(Object.assign({}, prelude, this.globals ?? {}, locals ?? {})))
|
||||
bytecode = compiler.bytecode
|
||||
} else {
|
||||
bytecode = code
|
||||
|
|
@ -32,13 +64,9 @@ export class Shrimp {
|
|||
await this.vm.continue()
|
||||
if (locals) this.vm.popScope()
|
||||
|
||||
return this.vm.stack.length ? fromValue(this.vm.stack.at(-1)!) : null
|
||||
return this.vm.stack.length ? fromValue(this.vm.stack.at(-1)!, this.vm) : null
|
||||
}
|
||||
|
||||
get(name: string): any {
|
||||
const value = this.vm.scope.get(name)
|
||||
return value ? fromValue(value) : null
|
||||
}
|
||||
}
|
||||
|
||||
export async function runFile(path: string, globals?: Record<string, any>): Promise<any> {
|
||||
|
|
@ -51,14 +79,9 @@ export async function runCode(code: string, globals?: Record<string, any>): Prom
|
|||
}
|
||||
|
||||
export async function runBytecode(bytecode: Bytecode, globals?: Record<string, any>): Promise<any> {
|
||||
try {
|
||||
const vm = new VM(bytecode, Object.assign({}, shrimpGlobals, globals))
|
||||
await vm.run()
|
||||
return vm.stack.length ? fromValue(vm.stack[vm.stack.length - 1]!) : null
|
||||
} catch (error: any) {
|
||||
console.error(`${colors.red}Error:${colors.reset} ${error.message}`)
|
||||
process.exit(1)
|
||||
}
|
||||
const vm = new VM(bytecode, Object.assign({}, prelude, globals))
|
||||
await vm.run()
|
||||
return vm.stack.length ? fromValue(vm.stack[vm.stack.length - 1]!, vm) : null
|
||||
}
|
||||
|
||||
export function compileFile(path: string, globals?: Record<string, any>): Bytecode {
|
||||
|
|
@ -67,7 +90,23 @@ export function compileFile(path: string, globals?: Record<string, any>): Byteco
|
|||
}
|
||||
|
||||
export function compileCode(code: string, globals?: Record<string, any>): Bytecode {
|
||||
const globalNames = [...Object.keys(shrimpGlobals), ...(globals ? Object.keys(globals) : [])]
|
||||
const globalNames = [...Object.keys(prelude), ...(globals ? Object.keys(globals) : [])]
|
||||
const compiler = new Compiler(code, globalNames)
|
||||
return compiler.bytecode
|
||||
}
|
||||
|
||||
export function parseFile(path: string, globals?: Record<string, any>): Tree {
|
||||
const code = readFileSync(path, 'utf-8')
|
||||
return parseCode(code, globals)
|
||||
}
|
||||
|
||||
export function parseCode(code: string, globals?: Record<string, any>): Tree {
|
||||
const oldGlobals = [...parserGlobals]
|
||||
const globalNames = [...Object.keys(prelude), ...(globals ? Object.keys(globals) : [])]
|
||||
|
||||
setParserGlobals(globalNames)
|
||||
const result = parser.parse(code)
|
||||
setParserGlobals(oldGlobals)
|
||||
|
||||
return result
|
||||
}
|
||||
62
src/parser/curlyTokenizer.ts
Normal file
62
src/parser/curlyTokenizer.ts
Normal file
|
|
@ -0,0 +1,62 @@
|
|||
import { parser } from '#parser/shrimp.ts'
|
||||
import type { SyntaxNode } from '@lezer/common'
|
||||
import { isIdentStart, isIdentChar } from './tokenizer'
|
||||
|
||||
// Turns a { curly string } into strings and nodes for interpolation
|
||||
export const tokenizeCurlyString = (value: string): (string | [string, SyntaxNode])[] => {
|
||||
let pos = 1
|
||||
let start = 1
|
||||
let char = value[pos]
|
||||
const tokens: (string | [string, SyntaxNode])[] = []
|
||||
|
||||
while (pos < value.length) {
|
||||
if (char === '$') {
|
||||
// escaped \$
|
||||
if (value[pos - 1] === '\\' && value[pos - 2] !== '\\') {
|
||||
tokens.push(value.slice(start, pos - 1))
|
||||
start = pos
|
||||
char = value[++pos]
|
||||
continue
|
||||
}
|
||||
|
||||
tokens.push(value.slice(start, pos))
|
||||
start = pos
|
||||
|
||||
if (value[pos + 1] === '(') {
|
||||
pos++ // slip opening '('
|
||||
|
||||
char = value[++pos]
|
||||
if (!char) break
|
||||
|
||||
let depth = 0
|
||||
while (char) {
|
||||
if (char === '(') depth++
|
||||
if (char === ')') depth--
|
||||
if (depth < 0) break
|
||||
char = value[++pos]
|
||||
}
|
||||
|
||||
const input = value.slice(start + 2, pos) // skip '$('
|
||||
tokens.push([input, parser.parse(input).topNode])
|
||||
start = ++pos // skip ')'
|
||||
} else {
|
||||
char = value[++pos]
|
||||
if (!char) break
|
||||
if (!isIdentStart(char.charCodeAt(0))) break
|
||||
|
||||
while (char && isIdentChar(char.charCodeAt(0)))
|
||||
char = value[++pos]
|
||||
|
||||
const input = value.slice(start + 1, pos) // skip '$'
|
||||
tokens.push([input, parser.parse(input).topNode])
|
||||
start = pos-- // backtrack and start over
|
||||
}
|
||||
}
|
||||
|
||||
char = value[++pos]
|
||||
}
|
||||
|
||||
tokens.push(value.slice(start, pos - 1))
|
||||
|
||||
return tokens
|
||||
}
|
||||
|
|
@ -5,18 +5,28 @@ type Operator = { str: string; tokenName: keyof typeof terms }
|
|||
const operators: Array<Operator> = [
|
||||
{ str: 'and', tokenName: 'And' },
|
||||
{ str: 'or', tokenName: 'Or' },
|
||||
{ str: 'band', tokenName: 'Band' },
|
||||
{ str: 'bor', tokenName: 'Bor' },
|
||||
{ str: 'bxor', tokenName: 'Bxor' },
|
||||
{ str: '>>>', tokenName: 'Ushr' }, // Must come before >>
|
||||
{ str: '>>', tokenName: 'Shr' },
|
||||
{ str: '<<', tokenName: 'Shl' },
|
||||
{ str: '>=', tokenName: 'Gte' },
|
||||
{ str: '<=', tokenName: 'Lte' },
|
||||
{ str: '!=', tokenName: 'Neq' },
|
||||
{ str: '==', tokenName: 'EqEq' },
|
||||
|
||||
// Compound assignment operators (must come before single-char operators)
|
||||
{ str: '??=', tokenName: 'NullishEq' },
|
||||
{ str: '+=', tokenName: 'PlusEq' },
|
||||
{ str: '-=', tokenName: 'MinusEq' },
|
||||
{ str: '*=', tokenName: 'StarEq' },
|
||||
{ str: '/=', tokenName: 'SlashEq' },
|
||||
{ str: '%=', tokenName: 'ModuloEq' },
|
||||
|
||||
// Nullish coalescing (must come before it could be mistaken for other tokens)
|
||||
{ str: '??', tokenName: 'NullishCoalesce' },
|
||||
|
||||
// Single-char operators
|
||||
{ str: '*', tokenName: 'Star' },
|
||||
{ str: '=', tokenName: 'Eq' },
|
||||
|
|
|
|||
|
|
@ -6,27 +6,36 @@
|
|||
|
||||
@top Program { item* }
|
||||
|
||||
@external tokens operatorTokenizer from "./operatorTokenizer" { Star, Slash, Plus, Minus, And, Or, Eq, EqEq, Neq, Lt, Lte, Gt, Gte, Modulo, PlusEq, MinusEq, StarEq, SlashEq, ModuloEq }
|
||||
@external tokens operatorTokenizer from "./operatorTokenizer" { Star, Slash, Plus, Minus, And, Or, Eq, EqEq, Neq, Lt, Lte, Gt, Gte, Modulo, PlusEq, MinusEq, StarEq, SlashEq, ModuloEq, Band, Bor, Bxor, Shl, Shr, Ushr, NullishCoalesce, NullishEq }
|
||||
|
||||
@tokens {
|
||||
@precedence { Number Regex }
|
||||
|
||||
StringFragment { !['\\$]+ }
|
||||
NamedArgPrefix { $[a-z-]+ "=" }
|
||||
Number { ("-" | "+")? $[0-9]+ ('.' $[0-9]+)? }
|
||||
DoubleQuote { '"' !["]* '"' }
|
||||
NamedArgPrefix { $[a-z] $[a-z0-9-]* "=" }
|
||||
Number {
|
||||
("-" | "+")? "0x" $[0-9a-fA-F]+ |
|
||||
("-" | "+")? "0b" $[01]+ |
|
||||
("-" | "+")? "0o" $[0-7]+ |
|
||||
("-" | "+")? $[0-9]+ ("_"? $[0-9]+)* ('.' $[0-9]+ ("_"? $[0-9]+)*)?
|
||||
}
|
||||
Boolean { "true" | "false" }
|
||||
newlineOrSemicolon { "\n" | ";" }
|
||||
semicolon { ";" }
|
||||
eof { @eof }
|
||||
space { " " | "\t" }
|
||||
Comment { "#" " " ![\n]* }
|
||||
Comment { "#" ![\n]* }
|
||||
leftParen { "(" }
|
||||
rightParen { ")" }
|
||||
colon[closedBy="end", @name="colon"] { ":" }
|
||||
Underscore { "_" }
|
||||
Dollar { "$" }
|
||||
Regex { "//" (![/\\\n[] | "\\" ![\n] | "[" (![\n\\\]] | "\\" ![\n])* "]")+ ("//" $[gimsuy]*)? } // Stolen from the lezer JavaScript grammar
|
||||
"|"[@name=operator]
|
||||
}
|
||||
|
||||
newlineOrSemicolon { newline | semicolon }
|
||||
|
||||
end { @specialize[@name=keyword]<Identifier, "end"> }
|
||||
while { @specialize[@name=keyword]<Identifier, "while"> }
|
||||
if { @specialize[@name=keyword]<Identifier, "if"> }
|
||||
|
|
@ -35,22 +44,27 @@ try { @specialize[@name=keyword]<Identifier, "try"> }
|
|||
catch { @specialize[@name=keyword]<Identifier, "catch"> }
|
||||
finally { @specialize[@name=keyword]<Identifier, "finally"> }
|
||||
throw { @specialize[@name=keyword]<Identifier, "throw"> }
|
||||
import { @specialize[@name=keyword]<Identifier, "import"> }
|
||||
null { @specialize[@name=Null]<Identifier, "null"> }
|
||||
|
||||
@external tokens tokenizer from "./tokenizer" { Identifier, AssignableIdentifier, Word, IdentifierBeforeDot }
|
||||
@external tokens tokenizer from "./tokenizer" { Identifier, AssignableIdentifier, Word, IdentifierBeforeDot, CurlyString }
|
||||
@external tokens pipeStartsLineTokenizer from "./tokenizer" { newline, pipeStartsLine }
|
||||
@external specialize {Identifier} specializeKeyword from "./tokenizer" { Do }
|
||||
|
||||
@precedence {
|
||||
pipe @left,
|
||||
or @left,
|
||||
and @left,
|
||||
nullish @left,
|
||||
comparison @left,
|
||||
multiplicative @left,
|
||||
additive @left,
|
||||
call
|
||||
bitwise @left,
|
||||
call,
|
||||
functionWithNewlines
|
||||
}
|
||||
|
||||
item {
|
||||
item {
|
||||
consumeToTerminator newlineOrSemicolon |
|
||||
consumeToTerminator eof |
|
||||
newlineOrSemicolon // allow blank lines
|
||||
|
|
@ -63,6 +77,7 @@ consumeToTerminator {
|
|||
ambiguousFunctionCall |
|
||||
TryExpr |
|
||||
Throw |
|
||||
Import |
|
||||
IfExpr |
|
||||
FunctionDef |
|
||||
CompoundAssign |
|
||||
|
|
@ -73,11 +88,11 @@ consumeToTerminator {
|
|||
}
|
||||
|
||||
PipeExpr {
|
||||
pipeOperand (!pipe "|" pipeOperand)+
|
||||
pipeOperand (!pipe (pipeStartsLine? "|") newlineOrSemicolon* pipeOperand)+
|
||||
}
|
||||
|
||||
pipeOperand {
|
||||
FunctionCall | FunctionCallOrIdentifier
|
||||
consumeToTerminator
|
||||
}
|
||||
|
||||
WhileExpr {
|
||||
|
|
@ -152,6 +167,11 @@ Throw {
|
|||
throw (BinOp | ConditionalOp | expression)
|
||||
}
|
||||
|
||||
// this has to be in the parse tree so the scope tracker can use it
|
||||
Import {
|
||||
import NamedArg* Identifier+ NamedArg*
|
||||
}
|
||||
|
||||
ConditionalOp {
|
||||
expression !comparison EqEq expression |
|
||||
expression !comparison Neq expression |
|
||||
|
|
@ -160,7 +180,8 @@ ConditionalOp {
|
|||
expression !comparison Gt expression |
|
||||
expression !comparison Gte expression |
|
||||
(expression | ConditionalOp) !and And (expression | ConditionalOp) |
|
||||
(expression | ConditionalOp) !or Or (expression | ConditionalOp)
|
||||
(expression | ConditionalOp) !or Or (expression | ConditionalOp) |
|
||||
(expression | ConditionalOp) !nullish NullishCoalesce (expression | ConditionalOp)
|
||||
}
|
||||
|
||||
Params {
|
||||
|
|
@ -176,7 +197,7 @@ Assign {
|
|||
}
|
||||
|
||||
CompoundAssign {
|
||||
AssignableIdentifier (PlusEq | MinusEq | StarEq | SlashEq | ModuloEq) consumeToTerminator
|
||||
AssignableIdentifier (PlusEq | MinusEq | StarEq | SlashEq | ModuloEq | NullishEq) consumeToTerminator
|
||||
}
|
||||
|
||||
BinOp {
|
||||
|
|
@ -184,11 +205,31 @@ BinOp {
|
|||
(expression | BinOp) !multiplicative Star (expression | BinOp) |
|
||||
(expression | BinOp) !multiplicative Slash (expression | BinOp) |
|
||||
(expression | BinOp) !additive Plus (expression | BinOp) |
|
||||
(expression | BinOp) !additive Minus (expression | BinOp)
|
||||
(expression | BinOp) !additive Minus (expression | BinOp) |
|
||||
(expression | BinOp) !bitwise Band (expression | BinOp) |
|
||||
(expression | BinOp) !bitwise Bor (expression | BinOp) |
|
||||
(expression | BinOp) !bitwise Bxor (expression | BinOp) |
|
||||
(expression | BinOp) !bitwise Shl (expression | BinOp) |
|
||||
(expression | BinOp) !bitwise Shr (expression | BinOp) |
|
||||
(expression | BinOp) !bitwise Ushr (expression | BinOp)
|
||||
}
|
||||
|
||||
ParenExpr {
|
||||
leftParen (IfExpr | ambiguousFunctionCall | BinOp | expressionWithoutIdentifier | ConditionalOp | PipeExpr | FunctionDef) rightParen
|
||||
leftParen newlineOrSemicolon* (
|
||||
FunctionCallWithNewlines |
|
||||
IfExpr |
|
||||
ambiguousFunctionCall |
|
||||
BinOp newlineOrSemicolon* |
|
||||
expressionWithoutIdentifier |
|
||||
ConditionalOp newlineOrSemicolon* |
|
||||
PipeExpr |
|
||||
FunctionDef
|
||||
)
|
||||
rightParen
|
||||
}
|
||||
|
||||
FunctionCallWithNewlines[@name=FunctionCall] {
|
||||
(DotGet | Identifier | ParenExpr) newlineOrSemicolon+ arg !functionWithNewlines (newlineOrSemicolon+ arg)* newlineOrSemicolon*
|
||||
}
|
||||
|
||||
expression {
|
||||
|
|
@ -202,10 +243,13 @@ expression {
|
|||
|
||||
@skip {} {
|
||||
DotGet {
|
||||
IdentifierBeforeDot dot (Number | Identifier | ParenExpr)
|
||||
IdentifierBeforeDot dot (DotGet | Number | Identifier | ParenExpr) |
|
||||
Dollar dot (DotGet | Number | Identifier | ParenExpr)
|
||||
}
|
||||
|
||||
String { "'" stringContent* "'" }
|
||||
String {
|
||||
"'" stringContent* "'" | CurlyString | DoubleQuote
|
||||
}
|
||||
}
|
||||
|
||||
stringContent {
|
||||
|
|
@ -215,7 +259,7 @@ stringContent {
|
|||
}
|
||||
|
||||
Interpolation {
|
||||
"$" Identifier |
|
||||
"$" FunctionCallOrIdentifier |
|
||||
"$" ParenExpr
|
||||
}
|
||||
|
||||
|
|
@ -235,7 +279,7 @@ Array {
|
|||
// We need expressionWithoutIdentifier to avoid conflicts in consumeToTerminator.
|
||||
// Without this, when parsing "my-var" at statement level, the parser can't decide:
|
||||
// - ambiguousFunctionCall → FunctionCallOrIdentifier → Identifier
|
||||
// - expression → Identifier
|
||||
// - expression → Identifier
|
||||
// Both want the same Identifier token! So we use expressionWithoutIdentifier
|
||||
// to remove Identifier from the second path, forcing standalone identifiers
|
||||
// to go through ambiguousFunctionCall (which is what we want semantically).
|
||||
|
|
|
|||
|
|
@ -19,48 +19,63 @@ export const
|
|||
StarEq = 17,
|
||||
SlashEq = 18,
|
||||
ModuloEq = 19,
|
||||
Identifier = 20,
|
||||
AssignableIdentifier = 21,
|
||||
Word = 22,
|
||||
IdentifierBeforeDot = 23,
|
||||
Do = 24,
|
||||
Comment = 25,
|
||||
Program = 26,
|
||||
PipeExpr = 27,
|
||||
FunctionCall = 28,
|
||||
DotGet = 29,
|
||||
Number = 30,
|
||||
ParenExpr = 31,
|
||||
IfExpr = 32,
|
||||
keyword = 70,
|
||||
ConditionalOp = 34,
|
||||
String = 35,
|
||||
StringFragment = 36,
|
||||
Interpolation = 37,
|
||||
EscapeSeq = 38,
|
||||
Boolean = 39,
|
||||
Regex = 40,
|
||||
Dict = 41,
|
||||
NamedArg = 42,
|
||||
NamedArgPrefix = 43,
|
||||
FunctionDef = 44,
|
||||
Params = 45,
|
||||
NamedParam = 46,
|
||||
Null = 47,
|
||||
colon = 48,
|
||||
CatchExpr = 49,
|
||||
Block = 51,
|
||||
FinallyExpr = 52,
|
||||
Underscore = 55,
|
||||
Array = 56,
|
||||
ElseIfExpr = 57,
|
||||
ElseExpr = 59,
|
||||
FunctionCallOrIdentifier = 60,
|
||||
BinOp = 61,
|
||||
PositionalArg = 62,
|
||||
WhileExpr = 64,
|
||||
FunctionCallWithBlock = 66,
|
||||
TryExpr = 67,
|
||||
Throw = 69,
|
||||
CompoundAssign = 71,
|
||||
Assign = 72
|
||||
Band = 20,
|
||||
Bor = 21,
|
||||
Bxor = 22,
|
||||
Shl = 23,
|
||||
Shr = 24,
|
||||
Ushr = 25,
|
||||
NullishCoalesce = 26,
|
||||
NullishEq = 27,
|
||||
Identifier = 28,
|
||||
AssignableIdentifier = 29,
|
||||
Word = 30,
|
||||
IdentifierBeforeDot = 31,
|
||||
CurlyString = 32,
|
||||
newline = 101,
|
||||
pipeStartsLine = 102,
|
||||
Do = 33,
|
||||
Comment = 34,
|
||||
Program = 35,
|
||||
PipeExpr = 36,
|
||||
WhileExpr = 38,
|
||||
keyword = 84,
|
||||
ConditionalOp = 40,
|
||||
ParenExpr = 41,
|
||||
FunctionCallWithNewlines = 42,
|
||||
DotGet = 43,
|
||||
Number = 44,
|
||||
Dollar = 45,
|
||||
PositionalArg = 46,
|
||||
FunctionDef = 47,
|
||||
Params = 48,
|
||||
NamedParam = 49,
|
||||
NamedArgPrefix = 50,
|
||||
String = 51,
|
||||
StringFragment = 52,
|
||||
Interpolation = 53,
|
||||
FunctionCallOrIdentifier = 54,
|
||||
EscapeSeq = 55,
|
||||
DoubleQuote = 56,
|
||||
Boolean = 57,
|
||||
Null = 58,
|
||||
colon = 59,
|
||||
CatchExpr = 60,
|
||||
Block = 62,
|
||||
FinallyExpr = 63,
|
||||
Underscore = 66,
|
||||
NamedArg = 67,
|
||||
IfExpr = 68,
|
||||
FunctionCall = 70,
|
||||
ElseIfExpr = 71,
|
||||
ElseExpr = 73,
|
||||
BinOp = 74,
|
||||
Regex = 75,
|
||||
Dict = 76,
|
||||
Array = 77,
|
||||
FunctionCallWithBlock = 78,
|
||||
TryExpr = 79,
|
||||
Throw = 81,
|
||||
Import = 83,
|
||||
CompoundAssign = 85,
|
||||
Assign = 86
|
||||
|
|
|
|||
|
|
@ -1,27 +1,27 @@
|
|||
// This file was generated by lezer-generator. You probably shouldn't edit it.
|
||||
import {LRParser, LocalTokenGroup} from "@lezer/lr"
|
||||
import {operatorTokenizer} from "./operatorTokenizer"
|
||||
import {tokenizer, specializeKeyword} from "./tokenizer"
|
||||
import {tokenizer, pipeStartsLineTokenizer, specializeKeyword} from "./tokenizer"
|
||||
import {trackScope} from "./parserScopeContext"
|
||||
import {highlighting} from "./highlight"
|
||||
const spec_Identifier = {__proto__:null,if:66, null:94, catch:100, finally:106, end:108, else:116, while:130, try:136, throw:140}
|
||||
const spec_Identifier = {__proto__:null,while:78, null:116, catch:122, finally:128, end:130, if:138, else:144, try:160, throw:164, import:168}
|
||||
export const parser = LRParser.deserialize({
|
||||
version: 14,
|
||||
states: "9[QYQbOOO!dOSO'#DPOOQa'#DV'#DVO#mQbO'#DfO%RQcO'#E^OOQa'#E^'#E^O&XQcO'#E^O'ZQcO'#E]O'qQcO'#E]O)^QRO'#DOO*mQcO'#EWO*wQcO'#EWO+XQbO'#C{O,SOpO'#CyOOQ`'#EX'#EXO,XQbO'#EWO,cQRO'#DuOOQ`'#EW'#EWO,wQQO'#EVOOQ`'#EV'#EVOOQ`'#Dw'#DwQYQbOOO-PQbO'#DYO-[QbO'#C|O.PQbO'#DnO.tQQO'#DqO.PQbO'#DsO.yQbO'#DRO/RQWO'#DSOOOO'#E`'#E`OOOO'#Dx'#DxO/gOSO,59kOOQa,59k,59kOOQ`'#Dy'#DyO/uQbO,5:QO/|QbO'#DWO0WQQO,59qOOQa,5:Q,5:QO0cQbO,5:QOOQa'#E]'#E]OOQ`'#Dl'#DlOOQ`'#El'#ElOOQ`'#EQ'#EQO0mQbO,59dO1gQbO,5:bO.PQbO,59jO.PQbO,59jO.PQbO,59jO.PQbO,5:VO.PQbO,5:VO.PQbO,5:VO1wQRO,59gO2OQRO,59gO2ZQRO,59gO2UQQO,59gO2lQQO,59gO2tObO,59eO3PQbO'#ERO3[QbO,59cO3vQbO,5:[O1gQbO,5:aOOQ`,5:q,5:qOOQ`-E7u-E7uOOQ`'#Dz'#DzO4ZQbO'#DZO4fQbO'#D[OOQO'#D{'#D{O4^QQO'#DZO4tQQO,59tO4yQcO'#E]O6_QRO'#E[O6fQRO'#E[OOQO'#E['#E[O6qQQO,59hO6vQRO,5:YO6}QRO,5:YO3vQbO,5:]O7YQcO,5:_O8UQcO,5:_O8`QcO,5:_OOOO,59m,59mOOOO,59n,59nOOOO-E7v-E7vOOQa1G/V1G/VOOQ`-E7w-E7wO8pQQO1G/]OOQa1G/l1G/lO8{QbO1G/lOOQ`,59r,59rOOQO'#D}'#D}O8pQQO1G/]OOQa1G/]1G/]OOQ`'#EO'#EOO8{QbO1G/lOOQ`-E8O-E8OOOQ`1G/|1G/|OOQa1G/U1G/UO:WQcO1G/UO:_QcO1G/UO:fQcO1G/UOOQa1G/q1G/qO;_QcO1G/qO;iQcO1G/qO;sQcO1G/qOOQa1G/R1G/ROOQa1G/P1G/PO<hQbO'#DjO=_QbO'#CxOOQ`,5:m,5:mOOQ`-E8P-E8POOQ`'#Da'#DaO=lQbO'#DaO>]QbO1G/vOOQ`1G/{1G/{OOQ`-E7x-E7xO>hQQO,59uOOQO,59v,59vOOQO-E7y-E7yO>pQbO1G/`O3vQbO1G/SO3vQbO1G/tO?TQbO1G/wO?`QQO7+$wOOQa7+$w7+$wO?kQbO7+%WOOQa7+%W7+%WOOQO-E7{-E7{OOQ`-E7|-E7|OOQ`'#D|'#D|O?uQQO'#D|O?zQbO'#EiOOQ`,59{,59{O@kQbO'#D_O@pQQO'#DbOOQ`7+%b7+%bO@uQbO7+%bO@zQbO7+%bOASQbO7+$zOA_QbO7+$zOA{QbO7+$nOBTQbO7+%`OOQ`7+%c7+%cOBYQbO7+%cOB_QbO7+%cOOQa<<Hc<<HcOOQa<<Hr<<HrOOQ`,5:h,5:hOOQ`-E7z-E7zOBgQQO,59yO3vQbO,59|OOQ`<<H|<<H|OBlQbO<<H|OOQ`<<Hf<<HfOBqQbO<<HfOBvQbO<<HfOCOQbO<<HfOOQ`'#EP'#EPOCZQbO<<HYOCcQbO'#DiOOQ`<<HY<<HYOCkQbO<<HYOOQ`<<Hz<<HzOOQ`<<H}<<H}OCpQbO<<H}O3vQbO1G/eOOQ`1G/h1G/hOOQ`AN>hAN>hOOQ`AN>QAN>QOCuQbOAN>QOCzQbOAN>QOOQ`-E7}-E7}OOQ`AN=tAN=tODSQbOAN=tO-[QbO,5:RO3vQbO,5:TOOQ`AN>iAN>iOOQ`7+%P7+%POOQ`G23lG23lODXQbOG23lPD^QbO'#DgOOQ`G23`G23`ODcQQO1G/mOOQ`1G/o1G/oOOQ`LD)WLD)WO3vQbO7+%XOOQ`<<Hs<<Hs",
|
||||
stateData: "Dk~O!xOSiOS~OdWOe`OfTOg]OhfOnTOqgOwTOxTO!PTO!chO!fiO!hjO!}[O#RPO#YQO#ZRO#[cO~OtmO#RpO#TkO#UlO~OdwOfTOg]OnTOwTOxTO{sO!PTO!}[O#RPO#YQO#ZRO#[qO~O#^uO~P!rOP#QXQ#QXR#QXS#QXT#QXU#QXW#QXX#QXY#QXZ#QX[#QX]#QX^#QX#[#QX#a#QX!S#QX!V#QX!W#QX![#QX~OdwOfTOg]OhfOnTOwTOxTO{sO!PTO!XxO!}[O#RPO#YQO#ZRO#_#QX!Q#QX~P#tOV|O~P#tOP#PXQ#PXR#PXS#PXT#PXU#PXW#PXX#PXY#PXZ#PX[#PX]#PX^#PX~O#[!zX#a!zX!S!zX!V!zX!W!zX![!zX~P&`OdwOfTOg]OhfOnTOwTOxTO{sO!PTO!XxO!}[O#RPO#YQO#ZRO!Q!^X!a!^X#[!^X#a!^X#_!^X!S!^X!V!^X!W!^X![!^X~P&`OP!ROQ!ROR!SOS!SOT!OOU!POW}OX}OY}OZ}O[}O]}O^!QO~O#[!zX#a!zX!S!zX!V!zX!W!zX![!zX~OT!OOU!PO~P*XOP!ROQ!ROR!SOS!SO~P*XOdWOfTOg]OhfOnTOqgOwTOxTO!PTO!}[O#RPO#YQO#ZRO~O!|!YO~O!Q!]O!a!ZO~P*XOV|O_!^O`!^Oa!^Ob!^Oc!^O~O#[!_O#a!_O~Od!aO{!cO!Q}P~Od!gOfTOg]OnTOwTOxTO!PTO!}[O#RPO#YQO#ZRO~OdwOfTOg]OnTOwTOxTO!PTO!}[O#RPO#YQO#ZRO~O!Q!nO~Od!rO!}[O~O#R!sO#T!sO#U!sO#V!sO#W!sO#X!sO~OtmO#R!uO#TkO#UlO~O#^!xO~P!rOhfO!X!zO~P.PO{sO#[!{O#^!}O~O#[#OO#^!xO~P.POhfO{sO!XxO!Qla!ala#[la#ala#_la!Sla!Vla!Wla![la~P.POe`O!chO!fiO!hjO~P+XO#_#[O~P&`OT!OOU!PO#_#[O~OP!ROQ!ROR!SOS!SO#_#[O~O!a!ZO#_#[O~Od#]On#]O!}[O~Od#^Og]O!}[O~O!a!ZO#[ka#aka#_ka!Ska!Vka!Wka![ka~Oe`O!chO!fiO!hjO#[#cO~P+XOd!aO{!cO!Q}X~On#hOw#hO!P#hO#RPO~O!Q#jO~OhfO{sO!XxOT#PXU#PXW#PXX#PXY#PXZ#PX[#PX]#PX!Q#PX~P.POT!OOU!POW}OX}OY}OZ}O[}O]}O~O!Q#OX~P5sOT!OOU!PO!Q#OX~O!Q#kO~O!Q#lO~P5sOT!OOU!PO!Q#lO~O#[!ga#a!ga!S!ga!V!ga!W!ga![!ga~P)^O#[!ga#a!ga!S!ga!V!ga!W!ga![!ga~OT!OOU!PO~P7pOP!ROQ!ROR!SOS!SO~P7pO{sO#[!{O#^#oO~O#[#OO#^#qO~P.POW}OX}OY}OZ}O[}O]}OTri#[ri#ari#_ri!Qri!Sri!Vri!Wri![ri~OU!PO~P9VOU!PO~P9iOUri~P9VO^!QOR!_iS!_i#[!_i#a!_i#_!_i!S!_i!V!_i!W!_i![!_i~OP!_iQ!_i~P:mOP!ROQ!RO~P:mOP!ROQ!ROR!_iS!_i#[!_i#a!_i#_!_i!S!_i!V!_i!W!_i![!_i~OhfO{sO!XxO!a!^X#[!^X#a!^X#_!^X!S!^X!V!^X!W!^X![!^X~P.POhfO{sO!XxO~P.POe`O!chO!fiO!hjO#[#tO!S#]P!V#]P!W#]P![#]P~P+XO!S#xO!V#yO!W#zO~O{!cO!Q}a~Oe`O!chO!fiO!hjO#[$OO~P+XO!S#xO!V#yO!W$RO~O{sO#[!{O#^$UO~O#[#OO#^$VO~P.PO#[$WO~Oe`O!chO!fiO!hjO#[#tO!S#]X!V#]X!W#]X![#]X~P+XOd$YO~O!Q$ZO~O!W$[O~O!V#yO!W$[O~O!S#xO!V#yO!W$^O~Oe`O!chO!fiO!hjO#[#tO!S#]P!V#]P!W#]P~P+XO!W$eO![$dO~O!W$gO~O!W$hO~O!V#yO!W$hO~O!Q$jO~O!W$lO~O!W$mO~O!V#yO!W$mO~O!S#xO!V#yO!W$mO~O!W$qO![$dO~Oq$sO!Q$tO~O!W$qO~O!W$uO~O!W$wO~O!V#yO!W$wO~O!W$zO~O!W$}O~Oq$sO~O!Q%OO~Onx~",
|
||||
goto: "4x#aPPPPPPPPPPPPPPPPPPPPPPPPPPP#b#w$aP%d#bP&k'bP(a(aPP(e)aP)u*g*jPP*pP*|+fPPP+|,zP-O-U-j.YP.bP.b.bP.bP.b.b.t.z/Q/W/^/h/o/y0T0Z0ePPP0l0p1^PP1v1|3fP4fPPPPPPPP4jPP4ppaOe|!]!^!n#c#j#k#l#v$O$Z$j$t%OR!W[t^O[e|!Z!]!^!n#c#j#k#l#v$O$Z$j$t%OT!jg$srWO[e|!]!^!n#c#j#k#l#v$O$Z$j$t%OzwRSWhjrsv{}!O!P!Q!R!S!g!y#P#^#_#pS!gg$sR#^!ZvSO[eg|!]!^!n#c#j#k#l#v$O$Z$j$s$t%OzTRSWhjrsv{}!O!P!Q!R!S!g!y#P#^#_#pQ!rkQ#]!YR#_!ZpYOe|!]!^!n#c#j#k#l#v$O$Z$j$t%OQ!U[S!ig$sQ!mhQ!pjQ#S!PR#U!O!rTORSW[eghjrsv{|}!O!P!Q!R!S!]!^!g!n!y#P#^#_#c#j#k#l#p#v$O$Z$j$s$t%OR#h!cTmPo!sTORSW[eghjrsv{|}!O!P!Q!R!S!]!^!g!n!y#P#^#_#c#j#k#l#p#v$O$Z$j$s$t%OQtR[ySW{!g#^#_Q!wrX!{t!w!|#npaOe|!]!^!n#c#j#k#l#v$O$Z$j$t%O[xSW{!g#^#_Q!W[R!zsR!ffX!df!b!e#gQ#|#dQ$T#mQ$`#}R$o$aQ#d!]Q#m!nQ$P#kQ$Q#lQ$k$ZQ$v$jQ$|$tR%P%OQ#{#dQ$S#mQ$]#|Q$_#}Q$i$TS$n$`$aR$x$o!QTRSW[ghjrsv{}!O!P!Q!R!S!g!y#P#^#_#p$sqUOe|!]!^!n#c#j#k#l#v$O$Z$j$t%OT$b$P$cQ$f$PR$r$cu^O[e|!Z!]!^!n#c#j#k#l#v$O$Z$j$t%OpZOe|!]!^!n#c#j#k#l#v$O$Z$j$t%OQ!V[Q!qjQ#W!RR#Z!S]ySW{!g#^#_qaOe|!]!^!n#c#j#k#l#v$O$Z$j$t%OQeOR!`eQoPR!toQrRR!vrQ!bfR#f!bQ!efQ#g!bT#i!e#gS#v#c$OR$X#vQ!|tQ#n!wT#r!|#nQ#PvQ#p!yT#s#P#pQ$c$PR$p$cY{SW!g#^#_R#Q{S![_!XR#a![TdOeSbOeQ#R|`#b!]!n#k#l$Z$j$t%OQ#e!^U#u#c#v$OR#}#jp_Oe|!]!^!n#c#j#k#l#v$O$Z$j$t%OQ!X[R#`!ZQ!kgR${$srXO[e|!]!^!n#c#j#k#l#v$O$Z$j$t%OQvR[xSW{!g#^#_S!hg$sQ!lhQ!ojQ!yrQ!zsW#Ov!y#P#pQ#S}Q#T!OQ#V!PQ#W!QQ#X!RR#Y!SpVOe|!]!^!n#c#j#k#l#v$O$Z$j$t%O!OwRSWghjrsv{}!O!P!Q!R!S!g!y#P#^#_#p$sR!T[TnPoQ#w#cR$a$O]zSW{!g#^#_",
|
||||
nodeNames: "⚠ Star Slash Plus Minus And Or Eq EqEq Neq Lt Lte Gt Gte Modulo PlusEq MinusEq StarEq SlashEq ModuloEq Identifier AssignableIdentifier Word IdentifierBeforeDot Do Comment Program PipeExpr FunctionCall DotGet Number ParenExpr IfExpr keyword ConditionalOp String StringFragment Interpolation EscapeSeq Boolean Regex Dict NamedArg NamedArgPrefix FunctionDef Params NamedParam Null colon CatchExpr keyword Block FinallyExpr keyword keyword Underscore Array ElseIfExpr keyword ElseExpr FunctionCallOrIdentifier BinOp PositionalArg operator WhileExpr keyword FunctionCallWithBlock TryExpr keyword Throw keyword CompoundAssign Assign",
|
||||
maxTerm: 109,
|
||||
states: "?[QYQ!SOOOOQ!Q'#Ek'#EkO!sO!bO'#DXO%kQ!TO'#DdO&UOSO'#DaOOQ!R'#Da'#DaO)SQ!TO'#EnOOQ!Q'#E{'#E{O)pQRO'#DxO+xQ!TO'#EjO,fQ!SO'#DVOOQ!R'#Dz'#DzO/WQ!SO'#D{OOQ!R'#En'#EnO/_Q!TO'#EnO1cQ!TO'#EmO2qQ!TO'#EjO3OQRO'#ETOOQ!Q'#Ej'#EjO3gQ!SO'#EjO3nQrO'#EiOOQ!Q'#Ei'#EiOOQ!Q'#EV'#EVQYQ!SOOO4PQbO'#D]O4[QbO'#DrO5YQbO'#DSO6WQQO'#D}O5YQbO'#EPO6]QbO'#ERO6eObO,59sOOQ!Q'#D['#D[O6vQbO'#DqOOQ!Q'#Eq'#EqOOQ!Q'#E_'#E_O7QQ!SO,5:`OOQ!R'#Em'#EmO8QQbO'#DcO8`QWO'#DeOOOO'#Es'#EsOOOO'#E['#E[O8tOSO,59{OOQ!R,59{,59{O5YQbO,5:dO5YQbO,5:dO5YQbO,5:dO5YQbO,5:dO5YQbO,59pO5YQbO,59pO5YQbO,59pO5YQbO,59pOOQ!Q'#EX'#EXO,fQ!SO,59qO9SQ!TO'#DdO9^Q!TO'#EnO9hQsO,59qO9uQQO,59qO9zQrO,59qO:VQrO,59qO:eQsO,59qO;TQsO,59qO;[QrO'#DQO;dQ!SO,5:gO;kQrO,5:fOOQ!R,5:g,5:gO;yQ!SO,5:gO<WQbO,5:pO<WQbO,5:oOYQ!SO,5:hO=kQ!SO,59lOOQ!Q,5;T,5;TOYQ!SO'#EWO>]QQO'#EWOOQ!Q-E8T-E8TOOQ!Q'#EY'#EYO>bQbO'#D^O>mQbO'#D_OOQO'#EZ'#EZO>eQQO'#D^O?RQQO,59wO?WQcO'#EmO@TQRO'#EzOAQQRO'#EzOOQO'#Ez'#EzOAXQQO,5:^OA^QRO,59nOAeQRO,59nOYQ!SO,5:iOAsQ!TO,5:kOCXQ!TO,5:kOC{Q!TO,5:kODYQ!SO,5:mOOQ!Q'#Ec'#EcO6]QbO,5:mOOQ!R1G/_1G/_OOQ!Q,5:],5:]OOQ!Q-E8]-E8]OOOO'#Dd'#DdOOOO,59},59}OOOO,5:P,5:POOOO-E8Y-E8YOOQ!R1G/g1G/gOOQ!R1G0O1G0OOF_Q!TO1G0OOFiQ!TO1G0OOG}Q!TO1G0OOHXQ!TO1G0OOHfQ!TO1G0OOOQ!R1G/[1G/[OI}Q!TO1G/[OJUQ!TO1G/[OJ]Q!TO1G/[OKbQ!TO1G/[OJdQ!TO1G/[OOQ!Q-E8V-E8VOKxQsO1G/]OLVQQO1G/]OL[QrO1G/]OLgQrO1G/]OLuQsO1G/]OL|QsO1G/]OMTQ!SO,59rOM_QrO1G/]OOQ!R1G/]1G/]OMjQrO1G0QOOQ!R1G0R1G0ROMxQ!SO1G0ROOQp'#Ea'#EaOMjQrO1G0QOOQ!R1G0Q1G0QOOQ!Q'#Eb'#EbOMxQ!SO1G0RONVQ!SO1G0[ONwQ!SO1G0ZO! iQ!SO'#DlO! }Q!SO'#DlO!!_QbO1G0SOOQ!Q-E8U-E8UOYQ!SO,5:rOOQ!Q,5:r,5:rOYQ!SO,5:rOOQ!Q-E8W-E8WO!!jQQO,59xOOQO,59y,59yOOQO-E8X-E8XOYQ!SO1G/cOYQ!SO1G/xOYQ!SO1G/YO!!rQbO1G0TO!!}Q!SO1G0XO!#rQ!SO1G0XOOQ!Q-E8a-E8aO!#yQrO7+$wOOQ!R7+$w7+$wO!$UQrO1G/^O!$aQrO7+%lOOQ!R7+%l7+%lO!$oQ!SO7+%mOOQ!R7+%m7+%mOOQp-E8_-E8_OOQ!Q-E8`-E8`OOQ!Q'#E]'#E]O!$|QrO'#E]O!%[Q!SO'#EyOOQ`,5:W,5:WO!%lQbO'#DjO!%qQQO'#DmOOQ!Q7+%n7+%nO!%vQbO7+%nO!%{QbO7+%nOOQ!Q1G0^1G0^OYQ!SO1G0^O!&TQ!SO7+$}O!&fQ!SO7+$}O!&sQbO7+%dO!&{QbO7+$tOOQ!Q7+%o7+%oO!'QQbO7+%oO!'VQbO7+%oO!'_Q!SO7+%sOOQ!R<<Hc<<HcO!(SQ!SO7+$xO!(aQrO7+$xOOQ!R<<IW<<IWOOQ!R<<IX<<IXOOQ!Q,5:w,5:wOOQ!Q-E8Z-E8ZO!(lQQO,5:UOYQ!SO,5:XOOQ!Q<<IY<<IYO!(qQbO<<IYOOQ!Q7+%x7+%xOOQ!Q<<Hi<<HiO!(vQbO<<HiO!({QbO<<HiO!)TQbO<<HiOOQ`'#E`'#E`O!)`QbO<<IOO!)hQbO'#DwOOQ!Q<<IO<<IOO!)pQbO<<IOOOQ!Q<<H`<<H`OOQ!Q<<IZ<<IZO!)uQbO<<IZOOQp,5:x,5:xO!)zQ!SO<<HdOOQp-E8[-E8[OYQ!SO1G/pOOQ`1G/s1G/sOOQ!QAN>tAN>tOOQ!QAN>TAN>TO!*XQbOAN>TO!*^QbOAN>TOOQ`-E8^-E8^OOQ!QAN>jAN>jO!*fQbOAN>jO4[QbO,5:aOYQ!SO,5:cOOQ!QAN>uAN>uPMTQ!SO'#EXOOQ`7+%[7+%[OOQ!QG23oG23oO!*kQbOG23oP!)kQbO'#DuOOQ!QG24UG24UO!*pQQO1G/{OOQ`1G/}1G/}OOQ!QLD)ZLD)ZOYQ!SO7+%gOOQ`<<IR<<IRO!*uObO,59sO!+WO!bO'#DX",
|
||||
stateData: "!+`~O#[OSrOS~OlROmaOn]OoQOpTOqhOwjO|]O}QO!YTO!Z]O![]O!giO!m]O!rkO!tlO!vmO#XPO#`PO#cYO#fSO#qZO#r[O~O#dnO~OltOn]OoQOpTOqhO|]O}QO!SpO!YTO!Z]O![]O!doO!m]O#cYO#fSO#qZO#r[OP#aXQ#aXR#aXS#aXT#aXU#aXW#aXX#aXY#aXZ#aX[#aX]#aX^#aXd#aXe#aXf#aXg#aXh#aXi#aXj#aXu!WX!]!WX#Y!WX#p!WX~O#X!WX#`!WX#t!WX!_!WX!b!WX!c!WX!j!WX~P!xO!UwO#fzO#huO#ivO~OltOn]OoQOpTOqhO|]O}QO!SpO!YTO!Z]O![]O!doO!m]O#cYO#fSO#qZO#r[OP#bXQ#bXR#bXS#bXT#bXU#bXW#bXX#bXY#bXZ#bX[#bX]#bX^#bXd#bXe#bXf#bXg#bXh#bXi#bXj#bXu#bX#Y#bX#p#bX~O#X#bX#`#bX#t#bX!]#bX!_#bX!b#bX!c#bX!j#bX~P&dOP|OQ|OR}OS}OT!QOU!ROW!POX!POY!POZ!PO[!PO]!PO^{Od!OOe!OOf!OOg!OOh!OOi!OOj!SO~OP|OQ|OR}OS}Od!OOe!OOf!OOg!OOh!OOi!OOu#^X#Y#^X~O#X#^X#`#^X#t#^X!_#^X!b#^X!c#^X#p#^X!j#^X~P+QOl!VOmaOn]OoQOpTOqhOwjO|]O}QO!YTO!Z]O![]O!giO!m]O!rkO!tlO!vmO#XPO#`PO#cYO#fSO#qZO#r[O~OltOn]OoQOpTO|]O}QO!SpO!YTO!Z]O![]O!m]O#XPO#`PO#cYO#fSO#qZO#r[O~O#s!bO~P.POV!dO#X#bX#`#bX#t#bX!_#bX!b#bX!c#bX!j#bX~P'iOP#aXQ#aXR#aXS#aXT#aXU#aXW#aXX#aXY#aXZ#aX[#aX]#aX^#aXd#aXe#aXf#aXg#aXh#aXi#aXj#aXu#^X#Y#^X~O#X#^X#`#^X#t#^X!_#^X!b#^X!c#^X#p#^X!j#^X~P/{Ou#^X#X#^X#Y#^X#`#^X#t#^X!_#^X!b#^X!c#^X#p#^X!j#^X~OT!QOU!ROj!SO~P2POV!dO_!eO`!eOa!eOb!eOc!eOk!eO~O!]!fO~P2POu!iO#XPO#Y!jO#`PO#t!hO~Ol!lO!S!nO!]!QP~Ol!rOn]OoQOpTO|]O}QO!YTO!Z]O![]O!m]O#cYO#fSO#qZO#r[O~OltOn]OoQOpTO|]O}QO!YTO!Z]O![]O!m]O#cYO#fSO#qZO#r[O~O!]!yO~Ol!lO!SpO~Ol#QOoQO|#QO}QO#cYO~OqhO!d#RO~P5YOqhO!SpO!doOu!ha!]!ha#X!ha#Y!ha#`!ha#t!ha#p!ha!_!ha!b!ha!c!ha!j!ha~P5YOl#TOo&PO}&PO#cYO~O#f#VO#h#VO#i#VO#j#VO#k#VO#l#VO~O!UwO#f#XO#huO#ivO~O#XPO#`PO~P!xO#XPO#`PO~P&dO#XPO#`PO#p#oO~P+QO#p#oO~O#p#oOu#^X#Y#^X~O!]!fO#p#oOu#^X#Y#^X~O#p#oO~P/{OT!QOU!ROj!SO#XPO#`POu#^X#Y#^X~O#p#oO~P:lOu!iO#Y!jO~O#s#qO~P.PO!SpO#XPO#`PO#s#uO~O#XPO#`PO#s#qO~P5YOlROmaOn]OoQOpTOqhOwjO|]O}QO!YTO!Z]O![]O!giO!m]O!rkO!tlO!vmO#cYO#fSO#qZO#r[O~Ou!iO#Y!jO#Xta#`ta#tta#pta!_ta!bta!cta!jta~Ou$QO~Ol!lO!S!nO!]!QX~OpTO|$TO!YTO!Z$TO![$TO#fSO~O!]$VO~OqhO!SpO!doOT#aXU#aXW#aXX#aXY#aXZ#aX[#aX]#aXj#aX!]#aX~P5YOT!QOU!ROj!SO!]#nX~OT!QOU!ROW!POX!POY!POZ!PO[!PO]!POj!SO~O!]#nX~P@cO!]$WO~O!]$XO~P@cOT!QOU!ROj!SO!]$XO~Ou!sa#X!sa#Y!sa#`!sa#t!sa!_!sa!b!sa!c!sa#p!sa!j!sa~P)pOu!sa#X!sa#Y!sa#`!sa#t!sa!_!sa!b!sa!c!sa#p!sa!j!sa~OP|OQ|OR}OS}Od!OOe!OOf!OOg!OOh!OOi!OO~PBgOT!QOU!ROj!SO~PBgOl!lO!SpOu!ua#X!ua#Y!ua#`!ua#t!ua!_!ua!b!ua!c!ua#p!ua!j!ua~O^{OR!liS!lid!lie!lif!lig!lih!lii!liu!li#X!li#Y!li#`!li#t!li#p!li!_!li!b!li!c!li!j!li~OP!liQ!li~PEQOP|OQ|O~PEQOP|OQ|Od!lie!lif!lig!lih!lii!liu!li#X!li#Y!li#`!li#t!li#p!li!_!li!b!li!c!li!j!li~OR!liS!li~PFsOR}OS}O^{O~PFsOR}OS}O~PFsOW!POX!POY!POZ!PO[!PO]!POTxijxiuxi#Xxi#Yxi#`xi#txi#pxi!]xi!_xi!bxi!cxi!jxi~OU!RO~PHpOU!RO~PISOUxi~PHpOT!QOU!ROjxiuxi#Xxi#Yxi#`xi#txi#pxi!]xi!_xi!bxi!cxi!jxi~OW!POX!POY!POZ!PO[!PO]!PO~PJdO#XPO#`PO#p$_O~P+QO#p$_O~O#p$_Ou#^X#Y#^X~O!]!fO#p$_Ou#^X#Y#^X~O#p$_O~P/{O#p$_O~P:lOqhO!doO~P.PO#XPO#`PO#p$_O~O!SpO#XPO#`PO#s$bO~O#XPO#`PO#s$dO~P5YOu!iO#Y!jO#X!xi#`!xi#t!xi!_!xi!b!xi!c!xi#p!xi!j!xi~Ou!iO#Y!jO#X!wi#`!wi#t!wi!_!wi!b!wi!c!wi#p!wi!j!wi~Ou!iO#Y!jO!_!`X!b!`X!c!`X!j!`X~O!_#mP!b#mP!c#mP!j#mP~PYO!_$kO!b$lO!c$mO~O!S!nO!]!Qa~O!_$kO!b$lO!c$vO~O!SpOu!ui#X!ui#Y!ui#`!ui#t!ui!_!ui!b!ui!c!ui#p!ui!j!ui~Ol!lO~P!!}O#XPO#`PO#p$zO~O#XPO#`PO#pzi~O!SpO#XPO#`PO#s$}O~O#XPO#`PO#s%OO~P5YOu!iO#XPO#Y!jO#`PO~O!_#mX!b#mX!c#mX!j#mX~PYOl%RO~O!]%SO~O!c%TO~O!b$lO!c%TO~Ou!iO!_$kO!b$lO!c%WO#Y!jO~O!_#mP!b#mP!c#mP~PYO!c%_O!j%^O~O!c%aO~O!c%bO~O!b$lO!c%bO~O!SpOu!uq#X!uq#Y!uq#`!uq#t!uq!_!uq!b!uq!c!uq#p!uq!j!uq~OqhO!doO#pzq~P.PO#XPO#`PO#pzq~O!]%gO~O!c%iO~O!c%jO~O!b$lO!c%jO~O!_$kO!b$lO!c%jO~O!c%nO!j%^O~O!]%qO!g%pO~O!c%nO~O!c%rO~OqhO!doO#pzy~P.PO!c%uO~O!b$lO!c%uO~O!c%xO~O!c%{O~O!]%|O~Ol#QOo&PO|#QO}&PO#cYO~O#d&OO~O|!m~",
|
||||
goto: "<[#pPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPP#qP$_P$w%x'['bPP(v)S*P*SP*YP+d+h+dPPPP,TP,a,yPPP-a#qP.R.oP.s.yP/s0z$_$_P$_P$_P$_$_2T2Z2g3c3q3{4R4Y4`4j4p4z5UPPPPP5d5h6dP7v9oPP:|P;^PPPPP;b;h;nxbOg!d!e!f!i!y#{$O$Q$V$W$X$i$q$s%S%g%q%|Q!ZYR#i!U}bOYg!U!d!e!f!i!y#{$O$Q$V$W$X$i$q$s%S%g%q%|x`Og!d!e!f!i!y#{$O$Q$V$W$X$i$q$s%S%g%q%|Q!^YS!si%pQ!xjQ!|lQ#`!RQ#b!QQ#e!SR#l!U|UOgi!d!e!f!i!y#{$O$Q$V$W$X$i$q$s%S%g%p%q%|!W]RU[jlps{|}!O!P!Q!R!S!V!W!`!c!r#m#r#w$c${%e%sS!WY!US#Qn&OR#UuQ!YYR#h!UxROg!d!e!f!i!y#{$O$Q$V$W$X$i$q$s%S%g%q%|!WtRU[jlps{|}!O!P!Q!R!S!V!W!`!c!r#m#r#w$c${%e%sS!VY!US!ri%pS#Qn&OR#TueqRUs!V!W!r#m${%e%sxbOg!d!e!f!i!y#{$O$Q$V$W$X$i$q$s%S%g%q%|doRUs!V!W!r#m${%e%sQ!ZYQ#RpR#i!UR!qhX!oh!m!p$S#Y]ORUY[gijlps{|}!O!P!Q!R!S!U!V!W!`!c!d!e!f!i!r!y#m#r#w#{$O$Q$V$W$X$c$i$q$s${%S%e%g%p%q%s%|R$T!nTwSy|VOYg!U!d!e!f!i!y#{$O$Q$V$W$X$i$q$s%S%g%q%|R#UuQ$o#|Q$x$YQ%Y$rR%l%ZQ#|!fQ$Y!yQ$t$WQ$u$XQ%h%SQ%t%gQ%z%qR%}%|Q$n#|Q$w$YQ%U$oQ%X$rQ%c$xS%k%Y%ZR%v%ldqRUs!V!W!r#m${%e%sQ!a[[#Om!}#P$Z$[$yQ#p!`X#s!a#p#t$a|VOYg!U!d!e!f!i!y#{$O$Q$V$W$X$i$q$s%S%g%q%|T!ui%pT%[$t%]Q%`$tR%o%]xXOg!d!e!f!i!y#{$O$Q$V$W$X$i$q$s%S%g%q%|Q!XYQ!{lQ#Y|Q#]}Q#_!OR#g!U#Z]ORUY[gijlps{|}!O!P!Q!R!S!U!V!W!`!c!d!e!f!i!r!y#m#r#w#{$O$Q$V$W$X$c$i$q$s${%S%e%g%p%q%s%|![]RU[ijlps{|}!O!P!Q!R!S!V!W!`!c!r#m#r#w$c${%e%p%s}^OYg!U!d!e!f!i!y#{$O$Q$V$W$X$i$q$s%S%g%q%|QgOR!kg^!gd!_#x#y#z$h$rR#}!gQ!UYQ!`[d#f!U!`#m#n$O$^$q${%e%sS#m!V!WS#n!X!^Q$O!iS$^#g#lQ$q$QQ${$`R%e$|Q!mhQ!}mU$R!m!}$[R$[#PQ!phQ$S!mT$U!p$SQySR#WyS$i#{$sR%Q$iQ$|$`R%f$|YsRU!V!W!rR#SsQ%]$tR%m%]Q#t!aQ$a#pT$e#t$aQ#w!cQ$c#rT$f#w$cQ#PmQ$Z!}U$]#P$Z$yR$y$[TfOgSdOgS!_Y!UQ#x!dQ#y!e`#z!f!y$W$X%S%g%q%|Q$P!iU$h#{$i$sS$p$O$QQ$r$VR%V$qSeOg|!TY[!U!V!W!X!^!`!i#g#l#m#n$O$Q$^$`$q${$|%e%sQ!hdW#s!a#p#t$aW#v!c#r#w$c`#{!f!y$W$X%S%g%q%|U$g#{$i$sQ$s$VR%P$h|WOYg!U!d!e!f!i!y#{$O$Q$V$W$X$i$q$s%S%g%q%|doRUs!V!W!r#m${%e%sQ!c[S!ti%pQ!wjQ!zlQ#RpQ#Y{Q#Z|Q#[}Q#^!OQ#`!PQ#a!QQ#c!RQ#d!SQ#r!`X#v!c#r#w$cx_Og!d!e!f!i!y#{$O$Q$V$W$X$i$q$s%S%g%q%|![tRU[ijlps{|}!O!P!Q!R!S!V!W!`!c!r#m#r#w$c${%e%p%sQ!]YR#k!U[rRUs!V!W!rQ$`#mV%d${%e%sTxSyQ$j#{R%Z$sQ!viR%y%pxcOg!d!e!f!i!y#{$O$Q$V$W$X$i$q$s%S%g%q%|Q![YR#j!U",
|
||||
nodeNames: "⚠ Star Slash Plus Minus And Or Eq EqEq Neq Lt Lte Gt Gte Modulo PlusEq MinusEq StarEq SlashEq ModuloEq Band Bor Bxor Shl Shr Ushr NullishCoalesce NullishEq Identifier AssignableIdentifier Word IdentifierBeforeDot CurlyString Do Comment Program PipeExpr operator WhileExpr keyword ConditionalOp ParenExpr FunctionCall DotGet Number Dollar PositionalArg FunctionDef Params NamedParam NamedArgPrefix String StringFragment Interpolation FunctionCallOrIdentifier EscapeSeq DoubleQuote Boolean Null colon CatchExpr keyword Block FinallyExpr keyword keyword Underscore NamedArg IfExpr keyword FunctionCall ElseIfExpr keyword ElseExpr BinOp Regex Dict Array FunctionCallWithBlock TryExpr keyword Throw keyword Import keyword CompoundAssign Assign",
|
||||
maxTerm: 128,
|
||||
context: trackScope,
|
||||
nodeProps: [
|
||||
["closedBy", 48,"end"]
|
||||
["closedBy", 59,"end"]
|
||||
],
|
||||
propSources: [highlighting],
|
||||
skippedNodes: [0,25],
|
||||
repeatNodeCount: 11,
|
||||
tokenData: "C|~R|OX#{XY$jYZ%TZp#{pq$jqs#{st%ntu'tuw#{wx'yxy(Oyz(iz{#{{|)S|}#{}!O+v!O!P#{!P!Q.]!Q![)q![!]6x!]!^%T!^!}#{!}#O7c#O#P9X#P#Q9^#Q#R#{#R#S9w#S#T#{#T#Y,w#Y#Z:b#Z#b,w#b#c?`#c#f,w#f#g@]#g#h,w#h#iAY#i#o,w#o#p#{#p#qC^#q;'S#{;'S;=`$d<%l~#{~O#{~~CwS$QUtSOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{S$gP;=`<%l#{^$qUtS!xYOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{U%[UtS#[QOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{^%sWtSOp#{pq&]qt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{^&dZiYtSOY&]YZ#{Zt&]tu'Vuw&]wx'Vx#O&]#O#P'V#P;'S&];'S;=`'n<%lO&]Y'[SiYOY'VZ;'S'V;'S;=`'h<%lO'VY'kP;=`<%l'V^'qP;=`<%l&]~'yO#T~~(OO#R~U(VUtS!}QOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{U(pUtS#_QOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{U)XWtSOt#{uw#{x!Q#{!Q![)q![#O#{#P;'S#{;'S;=`$d<%lO#{U)xYtSnQOt#{uw#{x!O#{!O!P*h!P!Q#{!Q![)q![#O#{#P;'S#{;'S;=`$d<%lO#{U*mWtSOt#{uw#{x!Q#{!Q![+V![#O#{#P;'S#{;'S;=`$d<%lO#{U+^WtSnQOt#{uw#{x!Q#{!Q![+V![#O#{#P;'S#{;'S;=`$d<%lO#{U+{^tSOt#{uw#{x}#{}!O,w!O!Q#{!Q![)q![!_#{!_!`-r!`#O#{#P#T#{#T#o,w#o;'S#{;'S;=`$d<%lO#{U,|[tSOt#{uw#{x}#{}!O,w!O!_#{!_!`-r!`#O#{#P#T#{#T#o,w#o;'S#{;'S;=`$d<%lO#{U-yU{QtSOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{U.bWtSOt#{uw#{x!P#{!P!Q.z!Q#O#{#P;'S#{;'S;=`$d<%lO#{U/P^tSOY/{YZ#{Zt/{tu1Ouw/{wx1Ox!P/{!P!Q#{!Q!}/{!}#O5q#O#P3^#P;'S/{;'S;=`6r<%lO/{U0S^tSxQOY/{YZ#{Zt/{tu1Ouw/{wx1Ox!P/{!P!Q3s!Q!}/{!}#O5q#O#P3^#P;'S/{;'S;=`6r<%lO/{Q1TXxQOY1OZ!P1O!P!Q1p!Q!}1O!}#O2_#O#P3^#P;'S1O;'S;=`3m<%lO1OQ1sP!P!Q1vQ1{UxQ#Z#[1v#]#^1v#a#b1v#g#h1v#i#j1v#m#n1vQ2bVOY2_Z#O2_#O#P2w#P#Q1O#Q;'S2_;'S;=`3W<%lO2_Q2zSOY2_Z;'S2_;'S;=`3W<%lO2_Q3ZP;=`<%l2_Q3aSOY1OZ;'S1O;'S;=`3m<%lO1OQ3pP;=`<%l1OU3xWtSOt#{uw#{x!P#{!P!Q4b!Q#O#{#P;'S#{;'S;=`$d<%lO#{U4ibtSxQOt#{uw#{x#O#{#P#Z#{#Z#[4b#[#]#{#]#^4b#^#a#{#a#b4b#b#g#{#g#h4b#h#i#{#i#j4b#j#m#{#m#n4b#n;'S#{;'S;=`$d<%lO#{U5v[tSOY5qYZ#{Zt5qtu2_uw5qwx2_x#O5q#O#P2w#P#Q/{#Q;'S5q;'S;=`6l<%lO5qU6oP;=`<%l5qU6uP;=`<%l/{U7PUtS!QQOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{U7jW#ZQtSOt#{uw#{x!_#{!_!`8S!`#O#{#P;'S#{;'S;=`$d<%lO#{U8XVtSOt#{uw#{x#O#{#P#Q8n#Q;'S#{;'S;=`$d<%lO#{U8uU#YQtSOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{~9^O#U~U9eU#^QtSOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{U:OUtS!XQOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{U:g]tSOt#{uw#{x}#{}!O,w!O!_#{!_!`-r!`#O#{#P#T#{#T#U;`#U#o,w#o;'S#{;'S;=`$d<%lO#{U;e^tSOt#{uw#{x}#{}!O,w!O!_#{!_!`-r!`#O#{#P#T#{#T#`,w#`#a<a#a#o,w#o;'S#{;'S;=`$d<%lO#{U<f^tSOt#{uw#{x}#{}!O,w!O!_#{!_!`-r!`#O#{#P#T#{#T#g,w#g#h=b#h#o,w#o;'S#{;'S;=`$d<%lO#{U=g^tSOt#{uw#{x}#{}!O,w!O!_#{!_!`-r!`#O#{#P#T#{#T#X,w#X#Y>c#Y#o,w#o;'S#{;'S;=`$d<%lO#{U>j[wQtSOt#{uw#{x}#{}!O,w!O!_#{!_!`-r!`#O#{#P#T#{#T#o,w#o;'S#{;'S;=`$d<%lO#{^?g[#VWtSOt#{uw#{x}#{}!O,w!O!_#{!_!`-r!`#O#{#P#T#{#T#o,w#o;'S#{;'S;=`$d<%lO#{^@d[#XWtSOt#{uw#{x}#{}!O,w!O!_#{!_!`-r!`#O#{#P#T#{#T#o,w#o;'S#{;'S;=`$d<%lO#{^Aa^#WWtSOt#{uw#{x}#{}!O,w!O!_#{!_!`-r!`#O#{#P#T#{#T#f,w#f#gB]#g#o,w#o;'S#{;'S;=`$d<%lO#{UBb^tSOt#{uw#{x}#{}!O,w!O!_#{!_!`-r!`#O#{#P#T#{#T#i,w#i#j=b#j#o,w#o;'S#{;'S;=`$d<%lO#{UCeU!aQtSOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{~C|O#a~",
|
||||
tokenizers: [operatorTokenizer, 1, 2, 3, tokenizer, new LocalTokenGroup("[~RP!O!PU~ZO!|~~", 11)],
|
||||
topRules: {"Program":[0,26]},
|
||||
specialized: [{term: 20, get: (value: any, stack: any) => (specializeKeyword(value, stack) << 1), external: specializeKeyword},{term: 20, get: (value: keyof typeof spec_Identifier) => spec_Identifier[value] || -1}],
|
||||
tokenPrec: 1634
|
||||
skippedNodes: [0,34],
|
||||
repeatNodeCount: 13,
|
||||
tokenData: "Lp~R}OX$OXY$mYp$Opq$mqr$Ors%Wst'^tu(uuw$Owx(|xy)Ryz)lz{$O{|*V|}$O}!O*V!O!P$O!P!Q3r!Q!R*w!R![-l![!]<_!]!^<x!^!}$O!}#O=c#O#P?X#P#Q?^#Q#R$O#R#S?w#S#T$O#T#Y@b#Y#ZA|#Z#b@b#b#cGj#c#f@b#f#gHm#g#h@b#h#iIp#i#o@b#o#p$O#p#qLQ#q;'S$O;'S;=`$g<%l~$O~O$O~~LkS$TU!USOt$Ouw$Ox#O$O#P;'S$O;'S;=`$g<%lO$OS$jP;=`<%l$O^$tU!US#[YOt$Ouw$Ox#O$O#P;'S$O;'S;=`$g<%lO$OU%]Z!USOr%Wrs&Ost%Wtu&iuw%Wwx&ix#O%W#O#P&i#P;'S%W;'S;=`'W<%lO%WU&VU!YQ!USOt$Ouw$Ox#O$O#P;'S$O;'S;=`$g<%lO$OQ&lTOr&irs&{s;'S&i;'S;=`'Q<%lO&iQ'QO!YQQ'TP;=`<%l&iU'ZP;=`<%l%W^'eZrY!USOY'^YZ$OZt'^tu(Wuw'^wx(Wx#O'^#O#P(W#P;'S'^;'S;=`(o<%lO'^Y(]SrYOY(WZ;'S(W;'S;=`(i<%lO(WY(lP;=`<%l(W^(rP;=`<%l'^^(|O#h[}Q~)RO#f~U)YU!US#cQOt$Ouw$Ox#O$O#P;'S$O;'S;=`$g<%lO$OU)sU!US#pQOt$Ouw$Ox#O$O#P;'S$O;'S;=`$g<%lO$OU*[X!USOt$Ouw$Ox!Q$O!Q!R*w!R![-l![#O$O#P;'S$O;'S;=`$g<%lO$OU+Ob!US|QOt$Ouw$Ox!O$O!O!P,W!P!Q$O!Q![-l![#O$O#P#R$O#R#S.i#S#U$O#U#V/W#V#c$O#c#d0l#d#l$O#l#m1z#m;'S$O;'S;=`$g<%lO$OU,]W!USOt$Ouw$Ox!Q$O!Q![,u![#O$O#P;'S$O;'S;=`$g<%lO$OU,|Y!US|QOt$Ouw$Ox!Q$O!Q![,u![#O$O#P#R$O#R#S,W#S;'S$O;'S;=`$g<%lO$OU-s[!US|QOt$Ouw$Ox!O$O!O!P,W!P!Q$O!Q![-l![#O$O#P#R$O#R#S.i#S;'S$O;'S;=`$g<%lO$OU.nW!USOt$Ouw$Ox!Q$O!Q![-l![#O$O#P;'S$O;'S;=`$g<%lO$OU/]X!USOt$Ouw$Ox!Q$O!Q!R/x!R!S/x!S#O$O#P;'S$O;'S;=`$g<%lO$OU0PX!US|QOt$Ouw$Ox!Q$O!Q!R/x!R!S/x!S#O$O#P;'S$O;'S;=`$g<%lO$OU0qW!USOt$Ouw$Ox!Q$O!Q!Y1Z!Y#O$O#P;'S$O;'S;=`$g<%lO$OU1bW!US|QOt$Ouw$Ox!Q$O!Q!Y1Z!Y#O$O#P;'S$O;'S;=`$g<%lO$OU2P[!USOt$Ouw$Ox!Q$O!Q![2u![!c$O!c!i2u!i#O$O#P#T$O#T#Z2u#Z;'S$O;'S;=`$g<%lO$OU2|[!US|QOt$Ouw$Ox!Q$O!Q![2u![!c$O!c!i2u!i#O$O#P#T$O#T#Z2u#Z;'S$O;'S;=`$g<%lO$OU3wW!USOt$Ouw$Ox!P$O!P!Q4a!Q#O$O#P;'S$O;'S;=`$g<%lO$OU4f^!USOY5bYZ$OZt5btu6euw5bwx6ex!P5b!P!Q$O!Q!}5b!}#O;W#O#P8s#P;'S5b;'S;=`<X<%lO5bU5i^!US!mQOY5bYZ$OZt5btu6euw5bwx6ex!P5b!P!Q9Y!Q!}5b!}#O;W#O#P8s#P;'S5b;'S;=`<X<%lO5bQ6jX!mQOY6eZ!P6e!P!Q7V!Q!}6e!}#O7t#O#P8s#P;'S6e;'S;=`9S<%lO6eQ7YP!P!Q7]Q7bU!mQ#Z#[7]#]#^7]#a#b7]#g#h7]#i#j7]#m#n7]Q7wVOY7tZ#O7t#O#P8^#P#Q6e#Q;'S7t;'S;=`8m<%lO7tQ8aSOY7tZ;'S7t;'S;=`8m<%lO7tQ8pP;=`<%l7tQ8vSOY6eZ;'S6e;'S;=`9S<%lO6eQ9VP;=`<%l6eU9_W!USOt$Ouw$Ox!P$O!P!Q9w!Q#O$O#P;'S$O;'S;=`$g<%lO$OU:Ob!US!mQOt$Ouw$Ox#O$O#P#Z$O#Z#[9w#[#]$O#]#^9w#^#a$O#a#b9w#b#g$O#g#h9w#h#i$O#i#j9w#j#m$O#m#n9w#n;'S$O;'S;=`$g<%lO$OU;][!USOY;WYZ$OZt;Wtu7tuw;Wwx7tx#O;W#O#P8^#P#Q5b#Q;'S;W;'S;=`<R<%lO;WU<UP;=`<%l;WU<[P;=`<%l5bU<fU!US!]QOt$Ouw$Ox#O$O#P;'S$O;'S;=`$g<%lO$OU=PU!US#`QOt$Ouw$Ox#O$O#P;'S$O;'S;=`$g<%lO$OU=jW#rQ!USOt$Ouw$Ox!_$O!_!`>S!`#O$O#P;'S$O;'S;=`$g<%lO$OU>XV!USOt$Ouw$Ox#O$O#P#Q>n#Q;'S$O;'S;=`$g<%lO$OU>uU#qQ!USOt$Ouw$Ox#O$O#P;'S$O;'S;=`$g<%lO$O~?^O#i~U?eU#sQ!USOt$Ouw$Ox#O$O#P;'S$O;'S;=`$g<%lO$OU@OU!US!dQOt$Ouw$Ox#O$O#P;'S$O;'S;=`$g<%lO$OU@g^!USOt$Ouw$Ox}$O}!O@b!O!Q$O!Q![@b![!_$O!_!`Ac!`#O$O#P#T$O#T#o@b#o;'S$O;'S;=`$g<%lO$OUAjU!SQ!USOt$Ouw$Ox#O$O#P;'S$O;'S;=`$g<%lO$OUBR_!USOt$Ouw$Ox}$O}!O@b!O!Q$O!Q![@b![!_$O!_!`Ac!`#O$O#P#T$O#T#UCQ#U#o@b#o;'S$O;'S;=`$g<%lO$OUCV`!USOt$Ouw$Ox}$O}!O@b!O!Q$O!Q![@b![!_$O!_!`Ac!`#O$O#P#T$O#T#`@b#`#aDX#a#o@b#o;'S$O;'S;=`$g<%lO$OUD^`!USOt$Ouw$Ox}$O}!O@b!O!Q$O!Q![@b![!_$O!_!`Ac!`#O$O#P#T$O#T#g@b#g#hE`#h#o@b#o;'S$O;'S;=`$g<%lO$OUEe`!USOt$Ouw$Ox}$O}!O@b!O!Q$O!Q![@b![!_$O!_!`Ac!`#O$O#P#T$O#T#X@b#X#YFg#Y#o@b#o;'S$O;'S;=`$g<%lO$OUFn^!ZQ!USOt$Ouw$Ox}$O}!O@b!O!Q$O!Q![@b![!_$O!_!`Ac!`#O$O#P#T$O#T#o@b#o;'S$O;'S;=`$g<%lO$O^Gq^#jW!USOt$Ouw$Ox}$O}!O@b!O!Q$O!Q![@b![!_$O!_!`Ac!`#O$O#P#T$O#T#o@b#o;'S$O;'S;=`$g<%lO$O^Ht^#lW!USOt$Ouw$Ox}$O}!O@b!O!Q$O!Q![@b![!_$O!_!`Ac!`#O$O#P#T$O#T#o@b#o;'S$O;'S;=`$g<%lO$O^Iw`#kW!USOt$Ouw$Ox}$O}!O@b!O!Q$O!Q![@b![!_$O!_!`Ac!`#O$O#P#T$O#T#f@b#f#gJy#g#o@b#o;'S$O;'S;=`$g<%lO$OUKO`!USOt$Ouw$Ox}$O}!O@b!O!Q$O!Q![@b![!_$O!_!`Ac!`#O$O#P#T$O#T#i@b#i#jE`#j#o@b#o;'S$O;'S;=`$g<%lO$OULXUuQ!USOt$Ouw$Ox#O$O#P;'S$O;'S;=`$g<%lO$O~LpO#t~",
|
||||
tokenizers: [operatorTokenizer, 1, 2, 3, tokenizer, pipeStartsLineTokenizer, new LocalTokenGroup("[~RP!O!PU~ZO#d~~", 11)],
|
||||
topRules: {"Program":[0,35]},
|
||||
specialized: [{term: 28, get: (value: any, stack: any) => (specializeKeyword(value, stack) << 1), external: specializeKeyword},{term: 28, get: (value: keyof typeof spec_Identifier) => spec_Identifier[value] || -1}],
|
||||
tokenPrec: 2589
|
||||
})
|
||||
|
|
|
|||
|
|
@ -368,6 +368,138 @@ describe('Parentheses', () => {
|
|||
PositionalArg
|
||||
Number 3`)
|
||||
})
|
||||
|
||||
test('function call with named args on multiple lines in parens', () => {
|
||||
expect(`(tail
|
||||
arg1=true
|
||||
arg2=30
|
||||
)`).toMatchTree(`
|
||||
ParenExpr
|
||||
FunctionCall
|
||||
Identifier tail
|
||||
NamedArg
|
||||
NamedArgPrefix arg1=
|
||||
Boolean true
|
||||
NamedArg
|
||||
NamedArgPrefix arg2=
|
||||
Number 30
|
||||
`)
|
||||
|
||||
expect(`(
|
||||
tail
|
||||
arg1=true
|
||||
arg2=30
|
||||
)`).toMatchTree(`
|
||||
ParenExpr
|
||||
FunctionCall
|
||||
Identifier tail
|
||||
NamedArg
|
||||
NamedArgPrefix arg1=
|
||||
Boolean true
|
||||
NamedArg
|
||||
NamedArgPrefix arg2=
|
||||
Number 30
|
||||
`)
|
||||
})
|
||||
|
||||
test('binop with newlines in parens', () => {
|
||||
expect(`(
|
||||
1 + 2
|
||||
)`).toMatchTree(`
|
||||
ParenExpr
|
||||
BinOp
|
||||
Number 1
|
||||
Plus +
|
||||
Number 2`)
|
||||
})
|
||||
|
||||
test('comparison with newlines in parens', () => {
|
||||
expect(`(
|
||||
1 < 2
|
||||
)`).toMatchTree(`
|
||||
ParenExpr
|
||||
ConditionalOp
|
||||
Number 1
|
||||
Lt <
|
||||
Number 2`)
|
||||
})
|
||||
|
||||
test('function call with multiple identifiers on separate lines in parens', () => {
|
||||
expect(`(echo
|
||||
arg1
|
||||
arg2
|
||||
arg3
|
||||
)`).toMatchTree(`
|
||||
ParenExpr
|
||||
FunctionCall
|
||||
Identifier echo
|
||||
PositionalArg
|
||||
Identifier arg1
|
||||
PositionalArg
|
||||
Identifier arg2
|
||||
PositionalArg
|
||||
Identifier arg3`)
|
||||
})
|
||||
|
||||
test('function call with mulitline identifiers starting separate lines in parens', () => {
|
||||
expect(`(
|
||||
|
||||
echo
|
||||
arg1
|
||||
arg2
|
||||
arg3
|
||||
|
||||
)`).toMatchTree(`
|
||||
ParenExpr
|
||||
FunctionCall
|
||||
Identifier echo
|
||||
PositionalArg
|
||||
Identifier arg1
|
||||
PositionalArg
|
||||
Identifier arg2
|
||||
PositionalArg
|
||||
Identifier arg3`)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Number literals', () => {
|
||||
test('allows underscores in integer literals', () => {
|
||||
expect('10_000').toMatchTree(`Number 10_000`)
|
||||
expect('1_000_000').toMatchTree(`Number 1_000_000`)
|
||||
expect('100_000').toMatchTree(`Number 100_000`)
|
||||
})
|
||||
|
||||
test('allows underscores in decimal literals', () => {
|
||||
expect('3.14_159').toMatchTree(`Number 3.14_159`)
|
||||
expect('1_000.50').toMatchTree(`Number 1_000.50`)
|
||||
expect('0.000_001').toMatchTree(`Number 0.000_001`)
|
||||
})
|
||||
|
||||
test('allows underscores in negative numbers', () => {
|
||||
expect('-10_000').toMatchTree(`Number -10_000`)
|
||||
expect('-3.14_159').toMatchTree(`Number -3.14_159`)
|
||||
})
|
||||
|
||||
test('allows underscores in positive numbers with explicit sign', () => {
|
||||
expect('+10_000').toMatchTree(`Number +10_000`)
|
||||
expect('+3.14_159').toMatchTree(`Number +3.14_159`)
|
||||
})
|
||||
|
||||
test('works in expressions', () => {
|
||||
expect('1_000 + 2_000').toMatchTree(`
|
||||
BinOp
|
||||
Number 1_000
|
||||
Plus +
|
||||
Number 2_000`)
|
||||
})
|
||||
|
||||
test('works in function calls', () => {
|
||||
expect('echo 10_000').toMatchTree(`
|
||||
FunctionCall
|
||||
Identifier echo
|
||||
PositionalArg
|
||||
Number 10_000`)
|
||||
})
|
||||
})
|
||||
|
||||
describe('BinOp', () => {
|
||||
|
|
@ -595,6 +727,87 @@ describe('CompoundAssign', () => {
|
|||
PositionalArg
|
||||
Number 3`)
|
||||
})
|
||||
|
||||
test('parses ??= operator', () => {
|
||||
expect('x ??= 5').toMatchTree(`
|
||||
CompoundAssign
|
||||
AssignableIdentifier x
|
||||
NullishEq ??=
|
||||
Number 5`)
|
||||
})
|
||||
|
||||
test('parses ??= with expression', () => {
|
||||
expect('config ??= get-default').toMatchTree(`
|
||||
CompoundAssign
|
||||
AssignableIdentifier config
|
||||
NullishEq ??=
|
||||
FunctionCallOrIdentifier
|
||||
Identifier get-default`)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Nullish coalescing operator', () => {
|
||||
test('? can still end an identifier', () => {
|
||||
expect('what?').toMatchTree(`
|
||||
FunctionCallOrIdentifier
|
||||
Identifier what?`)
|
||||
})
|
||||
|
||||
test('?? can still end an identifier', () => {
|
||||
expect('what??').toMatchTree(`
|
||||
FunctionCallOrIdentifier
|
||||
Identifier what??`)
|
||||
})
|
||||
|
||||
test('?? can still be in a word', () => {
|
||||
expect('what??the').toMatchTree(`
|
||||
FunctionCallOrIdentifier
|
||||
Identifier what??the`)
|
||||
})
|
||||
|
||||
test('?? can still start a word', () => {
|
||||
expect('??what??the').toMatchTree(`
|
||||
Word ??what??the`)
|
||||
})
|
||||
|
||||
test('parses ?? operator', () => {
|
||||
expect('x ?? 5').toMatchTree(`
|
||||
ConditionalOp
|
||||
Identifier x
|
||||
NullishCoalesce ??
|
||||
Number 5`)
|
||||
})
|
||||
|
||||
test('parses chained ?? operators', () => {
|
||||
expect('a ?? b ?? c').toMatchTree(`
|
||||
ConditionalOp
|
||||
ConditionalOp
|
||||
Identifier a
|
||||
NullishCoalesce ??
|
||||
Identifier b
|
||||
NullishCoalesce ??
|
||||
Identifier c`)
|
||||
})
|
||||
|
||||
test('parses ?? with expressions', () => {
|
||||
expect('get-value ?? default-value').toMatchTree(`
|
||||
ConditionalOp
|
||||
Identifier get-value
|
||||
NullishCoalesce ??
|
||||
Identifier default-value`)
|
||||
})
|
||||
|
||||
test('parses ?? with parenthesized function call', () => {
|
||||
expect('get-value ?? (default 10)').toMatchTree(`
|
||||
ConditionalOp
|
||||
Identifier get-value
|
||||
NullishCoalesce ??
|
||||
ParenExpr
|
||||
FunctionCall
|
||||
Identifier default
|
||||
PositionalArg
|
||||
Number 10`)
|
||||
})
|
||||
})
|
||||
|
||||
describe('DotGet whitespace sensitivity', () => {
|
||||
|
|
@ -639,7 +852,7 @@ describe('Comments', () => {
|
|||
test('are greedy', () => {
|
||||
expect(`
|
||||
x = 5 # one banana
|
||||
y = 2 # two bananas`).toMatchTree(`
|
||||
y = 2 #two bananas`).toMatchTree(`
|
||||
Assign
|
||||
AssignableIdentifier x
|
||||
Eq =
|
||||
|
|
@ -649,7 +862,7 @@ y = 2 # two bananas`).toMatchTree(`
|
|||
AssignableIdentifier y
|
||||
Eq =
|
||||
Number 2
|
||||
Comment # two bananas`)
|
||||
Comment #two bananas`)
|
||||
|
||||
expect(`
|
||||
# some comment
|
||||
|
|
@ -670,11 +883,11 @@ basename = 5 # very astute
|
|||
})
|
||||
|
||||
test('words with # are not considered comments', () => {
|
||||
expect('find #hashtag-file.txt').toMatchTree(`
|
||||
expect('find my#hashtag-file.txt').toMatchTree(`
|
||||
FunctionCall
|
||||
Identifier find
|
||||
PositionalArg
|
||||
Word #hashtag-file.txt`)
|
||||
Word my#hashtag-file.txt`)
|
||||
})
|
||||
|
||||
test('hastags in strings are not comments', () => {
|
||||
|
|
@ -824,3 +1037,34 @@ Assign
|
|||
`)
|
||||
})
|
||||
})
|
||||
|
||||
describe('import', () => {
|
||||
test('parses single import', () => {
|
||||
expect(`import str`).toMatchTree(`
|
||||
Import
|
||||
keyword import
|
||||
Identifier str
|
||||
`)
|
||||
})
|
||||
|
||||
test('parses multiple imports', () => {
|
||||
expect(`import str math list`).toMatchTree(`
|
||||
Import
|
||||
keyword import
|
||||
Identifier str
|
||||
Identifier math
|
||||
Identifier list
|
||||
`)
|
||||
})
|
||||
|
||||
test('parses named args', () => {
|
||||
expect(`import str only=ends-with?`).toMatchTree(`
|
||||
Import
|
||||
keyword import
|
||||
Identifier str
|
||||
NamedArg
|
||||
NamedArgPrefix only=
|
||||
Identifier ends-with?
|
||||
`)
|
||||
})
|
||||
})
|
||||
72
src/parser/tests/bitwise.test.ts
Normal file
72
src/parser/tests/bitwise.test.ts
Normal file
|
|
@ -0,0 +1,72 @@
|
|||
import { expect, describe, test } from 'bun:test'
|
||||
|
||||
import '../shrimp.grammar' // Importing this so changes cause it to retest!
|
||||
|
||||
describe('bitwise operators - grammar', () => {
|
||||
test('parses band (bitwise AND)', () => {
|
||||
expect('5 band 3').toMatchTree(`
|
||||
BinOp
|
||||
Number 5
|
||||
Band band
|
||||
Number 3`)
|
||||
})
|
||||
|
||||
test('parses bor (bitwise OR)', () => {
|
||||
expect('5 bor 3').toMatchTree(`
|
||||
BinOp
|
||||
Number 5
|
||||
Bor bor
|
||||
Number 3`)
|
||||
})
|
||||
|
||||
test('parses bxor (bitwise XOR)', () => {
|
||||
expect('5 bxor 3').toMatchTree(`
|
||||
BinOp
|
||||
Number 5
|
||||
Bxor bxor
|
||||
Number 3`)
|
||||
})
|
||||
|
||||
test('parses << (left shift)', () => {
|
||||
expect('5 << 2').toMatchTree(`
|
||||
BinOp
|
||||
Number 5
|
||||
Shl <<
|
||||
Number 2`)
|
||||
})
|
||||
|
||||
test('parses >> (signed right shift)', () => {
|
||||
expect('20 >> 2').toMatchTree(`
|
||||
BinOp
|
||||
Number 20
|
||||
Shr >>
|
||||
Number 2`)
|
||||
})
|
||||
|
||||
test('parses >>> (unsigned right shift)', () => {
|
||||
expect('-1 >>> 1').toMatchTree(`
|
||||
BinOp
|
||||
Number -1
|
||||
Ushr >>>
|
||||
Number 1`)
|
||||
})
|
||||
|
||||
test('parses bnot (bitwise NOT) as function call', () => {
|
||||
expect('bnot 5').toMatchTree(`
|
||||
FunctionCall
|
||||
Identifier bnot
|
||||
PositionalArg
|
||||
Number 5`)
|
||||
})
|
||||
|
||||
test('bitwise operators work in expressions', () => {
|
||||
expect('x = 5 band 3').toMatchTree(`
|
||||
Assign
|
||||
AssignableIdentifier x
|
||||
Eq =
|
||||
BinOp
|
||||
Number 5
|
||||
Band band
|
||||
Number 3`)
|
||||
})
|
||||
})
|
||||
|
|
@ -298,4 +298,163 @@ end`).toMatchTree(`
|
|||
Number 2
|
||||
`)
|
||||
})
|
||||
|
||||
// NOTE: these are parsed as DotGet(meta, DotGet(script, name)) because that's easiest,
|
||||
// but the compiler flattens them
|
||||
test('chained dot get: meta.script.name', () => {
|
||||
expect('meta = 42; meta.script.name').toMatchTree(`
|
||||
Assign
|
||||
AssignableIdentifier meta
|
||||
Eq =
|
||||
Number 42
|
||||
FunctionCallOrIdentifier
|
||||
DotGet
|
||||
IdentifierBeforeDot meta
|
||||
DotGet
|
||||
IdentifierBeforeDot script
|
||||
Identifier name
|
||||
`)
|
||||
})
|
||||
|
||||
test('chained dot get: a.b.c.d', () => {
|
||||
expect('a = 1; a.b.c.d').toMatchTree(`
|
||||
Assign
|
||||
AssignableIdentifier a
|
||||
Eq =
|
||||
Number 1
|
||||
FunctionCallOrIdentifier
|
||||
DotGet
|
||||
IdentifierBeforeDot a
|
||||
DotGet
|
||||
IdentifierBeforeDot b
|
||||
DotGet
|
||||
IdentifierBeforeDot c
|
||||
Identifier d
|
||||
`)
|
||||
})
|
||||
|
||||
test('chained dot get in function call', () => {
|
||||
expect('config = 1; echo config.db.host').toMatchTree(`
|
||||
Assign
|
||||
AssignableIdentifier config
|
||||
Eq =
|
||||
Number 1
|
||||
FunctionCall
|
||||
Identifier echo
|
||||
PositionalArg
|
||||
DotGet
|
||||
IdentifierBeforeDot config
|
||||
DotGet
|
||||
IdentifierBeforeDot db
|
||||
Identifier host
|
||||
`)
|
||||
})
|
||||
|
||||
test('chained dot get with numeric index at end', () => {
|
||||
expect('obj = 1; obj.items.0').toMatchTree(`
|
||||
Assign
|
||||
AssignableIdentifier obj
|
||||
Eq =
|
||||
Number 1
|
||||
FunctionCallOrIdentifier
|
||||
DotGet
|
||||
IdentifierBeforeDot obj
|
||||
DotGet
|
||||
IdentifierBeforeDot items
|
||||
Number 0
|
||||
`)
|
||||
})
|
||||
|
||||
test('chained dot get with ParenExpr at end', () => {
|
||||
expect('obj = 1; obj.items.(i)').toMatchTree(`
|
||||
Assign
|
||||
AssignableIdentifier obj
|
||||
Eq =
|
||||
Number 1
|
||||
FunctionCallOrIdentifier
|
||||
DotGet
|
||||
IdentifierBeforeDot obj
|
||||
DotGet
|
||||
IdentifierBeforeDot items
|
||||
ParenExpr
|
||||
FunctionCallOrIdentifier
|
||||
Identifier i
|
||||
`)
|
||||
})
|
||||
|
||||
test('not in scope remains Word with chained dots', () => {
|
||||
expect('readme.md.bak').toMatchTree(`Word readme.md.bak`)
|
||||
})
|
||||
|
||||
test('chained dot get in nested functions', () => {
|
||||
expect(`do cfg:
|
||||
do inner:
|
||||
cfg.db.host
|
||||
end
|
||||
end`).toMatchTree(`
|
||||
FunctionDef
|
||||
Do do
|
||||
Params
|
||||
Identifier cfg
|
||||
colon :
|
||||
FunctionDef
|
||||
Do do
|
||||
Params
|
||||
Identifier inner
|
||||
colon :
|
||||
FunctionCallOrIdentifier
|
||||
DotGet
|
||||
IdentifierBeforeDot cfg
|
||||
DotGet
|
||||
IdentifierBeforeDot db
|
||||
Identifier host
|
||||
keyword end
|
||||
keyword end
|
||||
`)
|
||||
})
|
||||
|
||||
test('mixed simple and chained dot get', () => {
|
||||
expect('obj = 1; obj.a; obj.b.c').toMatchTree(`
|
||||
Assign
|
||||
AssignableIdentifier obj
|
||||
Eq =
|
||||
Number 1
|
||||
FunctionCallOrIdentifier
|
||||
DotGet
|
||||
IdentifierBeforeDot obj
|
||||
Identifier a
|
||||
FunctionCallOrIdentifier
|
||||
DotGet
|
||||
IdentifierBeforeDot obj
|
||||
DotGet
|
||||
IdentifierBeforeDot b
|
||||
Identifier c
|
||||
`)
|
||||
})
|
||||
|
||||
test.skip('chained numeric dot get: row.2.1.b', () => {
|
||||
expect('row = []; row.2.1').toMatchTree(`
|
||||
Assign
|
||||
AssignableIdentifier row
|
||||
Eq =
|
||||
Array []
|
||||
FunctionCallOrIdentifier
|
||||
DotGet
|
||||
IdentifierBeforeDot row
|
||||
DotGet
|
||||
Number 2
|
||||
DotGet
|
||||
Number 1
|
||||
Identifier b
|
||||
`)
|
||||
|
||||
test('parses $.pid just fine', () => {
|
||||
expect(`$.pid`).toMatchTree(`
|
||||
FunctionCallOrIdentifier
|
||||
DotGet
|
||||
Dollar $
|
||||
Identifier pid
|
||||
`)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
|||
|
|
@ -2,6 +2,88 @@ import { expect, describe, test } from 'bun:test'
|
|||
|
||||
import '../shrimp.grammar' // Importing this so changes cause it to retest!
|
||||
|
||||
describe('number literals', () => {
|
||||
test('binary numbers', () => {
|
||||
expect('0b110').toMatchTree(`
|
||||
Number 0b110
|
||||
`)
|
||||
})
|
||||
|
||||
test('hex numbers', () => {
|
||||
expect('0xdeadbeef').toMatchTree(`
|
||||
Number 0xdeadbeef
|
||||
`)
|
||||
})
|
||||
|
||||
test('hex numbers uppercase', () => {
|
||||
expect('0xFF').toMatchTree(`
|
||||
Number 0xFF
|
||||
`)
|
||||
})
|
||||
|
||||
test('octal numbers', () => {
|
||||
expect('0o644').toMatchTree(`
|
||||
Number 0o644
|
||||
`)
|
||||
|
||||
expect('0o055').toMatchTree(`
|
||||
Number 0o055
|
||||
`)
|
||||
})
|
||||
|
||||
test('decimal numbers still work', () => {
|
||||
expect('42').toMatchTree(`
|
||||
Number 42
|
||||
`)
|
||||
})
|
||||
|
||||
test('negative binary', () => {
|
||||
expect('-0b110').toMatchTree(`
|
||||
Number -0b110
|
||||
`)
|
||||
})
|
||||
|
||||
test('negative hex', () => {
|
||||
expect('-0xFF').toMatchTree(`
|
||||
Number -0xFF
|
||||
`)
|
||||
})
|
||||
|
||||
test('negative octal', () => {
|
||||
expect('-0o755').toMatchTree(`
|
||||
Number -0o755
|
||||
`)
|
||||
})
|
||||
|
||||
test('positive prefix binary', () => {
|
||||
expect('+0b110').toMatchTree(`
|
||||
Number +0b110
|
||||
`)
|
||||
})
|
||||
|
||||
test('positive prefix hex', () => {
|
||||
expect('+0xFF').toMatchTree(`
|
||||
Number +0xFF
|
||||
`)
|
||||
})
|
||||
|
||||
test('positive prefix octal', () => {
|
||||
expect('+0o644').toMatchTree(`
|
||||
Number +0o644
|
||||
`)
|
||||
})
|
||||
|
||||
test('hex, binary, and octal in arrays', () => {
|
||||
expect('[0xFF 0b110 0o644 42]').toMatchTree(`
|
||||
Array
|
||||
Number 0xFF
|
||||
Number 0b110
|
||||
Number 0o644
|
||||
Number 42
|
||||
`)
|
||||
})
|
||||
})
|
||||
|
||||
describe('array literals', () => {
|
||||
test('work with numbers', () => {
|
||||
expect('[1 2 3]').toMatchTree(`
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import { expect, describe, test } from 'bun:test'
|
||||
import { parser } from '../shrimp'
|
||||
|
||||
import '../shrimp.grammar' // Importing this so changes cause it to retest!
|
||||
|
||||
|
|
@ -98,4 +99,237 @@ describe('pipe expressions', () => {
|
|||
Identifier double
|
||||
`)
|
||||
})
|
||||
|
||||
test('string literals can be piped', () => {
|
||||
expect(`'hey there' | echo`).toMatchTree(`
|
||||
PipeExpr
|
||||
String
|
||||
StringFragment hey there
|
||||
operator |
|
||||
FunctionCallOrIdentifier
|
||||
Identifier echo
|
||||
`)
|
||||
})
|
||||
|
||||
test('number literals can be piped', () => {
|
||||
expect(`42 | echo`).toMatchTree(`
|
||||
PipeExpr
|
||||
Number 42
|
||||
operator |
|
||||
FunctionCallOrIdentifier
|
||||
Identifier echo`)
|
||||
|
||||
expect(`4.22 | echo`).toMatchTree(`
|
||||
PipeExpr
|
||||
Number 4.22
|
||||
operator |
|
||||
FunctionCallOrIdentifier
|
||||
Identifier echo`)
|
||||
})
|
||||
|
||||
test('null literals can be piped', () => {
|
||||
expect(`null | echo`).toMatchTree(`
|
||||
PipeExpr
|
||||
Null null
|
||||
operator |
|
||||
FunctionCallOrIdentifier
|
||||
Identifier echo`)
|
||||
})
|
||||
|
||||
test('boolean literals can be piped', () => {
|
||||
expect(`true | echo`).toMatchTree(`
|
||||
PipeExpr
|
||||
Boolean true
|
||||
operator |
|
||||
FunctionCallOrIdentifier
|
||||
Identifier echo`)
|
||||
})
|
||||
|
||||
test('array literals can be piped', () => {
|
||||
expect(`[1 2 3] | echo`).toMatchTree(`
|
||||
PipeExpr
|
||||
Array
|
||||
Number 1
|
||||
Number 2
|
||||
Number 3
|
||||
operator |
|
||||
FunctionCallOrIdentifier
|
||||
Identifier echo
|
||||
`)
|
||||
})
|
||||
|
||||
test('dict literals can be piped', () => {
|
||||
expect(`[a=1 b=2 c=3] | echo`).toMatchTree(`
|
||||
PipeExpr
|
||||
Dict
|
||||
NamedArg
|
||||
NamedArgPrefix a=
|
||||
Number 1
|
||||
NamedArg
|
||||
NamedArgPrefix b=
|
||||
Number 2
|
||||
NamedArg
|
||||
NamedArgPrefix c=
|
||||
Number 3
|
||||
operator |
|
||||
FunctionCallOrIdentifier
|
||||
Identifier echo
|
||||
`)
|
||||
})
|
||||
})
|
||||
|
||||
describe('pipe continuation', () => {
|
||||
test('pipe on next line', () => {
|
||||
expect(`hello
|
||||
| echo`).toMatchTree(`
|
||||
PipeExpr
|
||||
FunctionCallOrIdentifier
|
||||
Identifier hello
|
||||
operator |
|
||||
FunctionCallOrIdentifier
|
||||
Identifier echo
|
||||
`)
|
||||
|
||||
expect(`echo hello
|
||||
| grep h`).toMatchTree(`
|
||||
PipeExpr
|
||||
FunctionCall
|
||||
Identifier echo
|
||||
PositionalArg
|
||||
Identifier hello
|
||||
operator |
|
||||
FunctionCall
|
||||
Identifier grep
|
||||
PositionalArg
|
||||
Identifier h
|
||||
`)
|
||||
})
|
||||
|
||||
test('pipe on next non-empty line', () => {
|
||||
expect(`hello
|
||||
|
||||
|
||||
| echo`).toMatchTree(`
|
||||
PipeExpr
|
||||
FunctionCallOrIdentifier
|
||||
Identifier hello
|
||||
operator |
|
||||
FunctionCallOrIdentifier
|
||||
Identifier echo
|
||||
`)
|
||||
})
|
||||
|
||||
test('multi-line pipe chain', () => {
|
||||
expect(`echo hello
|
||||
| grep h
|
||||
| sort`).toMatchTree(`
|
||||
PipeExpr
|
||||
FunctionCall
|
||||
Identifier echo
|
||||
PositionalArg
|
||||
Identifier hello
|
||||
operator |
|
||||
FunctionCall
|
||||
Identifier grep
|
||||
PositionalArg
|
||||
Identifier h
|
||||
operator |
|
||||
FunctionCallOrIdentifier
|
||||
Identifier sort
|
||||
`)
|
||||
})
|
||||
|
||||
test('pipe with indentation', () => {
|
||||
expect(`echo hello
|
||||
| grep h
|
||||
| sort`).toMatchTree(`
|
||||
PipeExpr
|
||||
FunctionCall
|
||||
Identifier echo
|
||||
PositionalArg
|
||||
Identifier hello
|
||||
operator |
|
||||
FunctionCall
|
||||
Identifier grep
|
||||
PositionalArg
|
||||
Identifier h
|
||||
operator |
|
||||
FunctionCallOrIdentifier
|
||||
Identifier sort
|
||||
`)
|
||||
})
|
||||
|
||||
test('pipe after operand on next line (trailing pipe style)', () => {
|
||||
expect(`echo hello |
|
||||
grep h`).toMatchTree(`
|
||||
PipeExpr
|
||||
FunctionCall
|
||||
Identifier echo
|
||||
PositionalArg
|
||||
Identifier hello
|
||||
operator |
|
||||
FunctionCall
|
||||
Identifier grep
|
||||
PositionalArg
|
||||
Identifier h
|
||||
`)
|
||||
})
|
||||
|
||||
test('same-line pipes still work', () => {
|
||||
expect('echo hello | grep h | sort').toMatchTree(`
|
||||
PipeExpr
|
||||
FunctionCall
|
||||
Identifier echo
|
||||
PositionalArg
|
||||
Identifier hello
|
||||
operator |
|
||||
FunctionCall
|
||||
Identifier grep
|
||||
PositionalArg
|
||||
Identifier h
|
||||
operator |
|
||||
FunctionCallOrIdentifier
|
||||
Identifier sort
|
||||
`)
|
||||
})
|
||||
|
||||
test('lots of pipes', () => {
|
||||
expect(`
|
||||
'this should help readability in long chains'
|
||||
| split ' '
|
||||
| map (ref str.to-upper)
|
||||
| join '-'
|
||||
| echo
|
||||
`).toMatchTree(`
|
||||
PipeExpr
|
||||
String
|
||||
StringFragment this should help readability in long chains
|
||||
operator |
|
||||
FunctionCall
|
||||
Identifier split
|
||||
PositionalArg
|
||||
String
|
||||
StringFragment
|
||||
operator |
|
||||
FunctionCall
|
||||
Identifier map
|
||||
PositionalArg
|
||||
ParenExpr
|
||||
FunctionCall
|
||||
Identifier ref
|
||||
PositionalArg
|
||||
DotGet
|
||||
IdentifierBeforeDot str
|
||||
Identifier to-upper
|
||||
operator |
|
||||
FunctionCall
|
||||
Identifier join
|
||||
PositionalArg
|
||||
String
|
||||
StringFragment -
|
||||
operator |
|
||||
FunctionCallOrIdentifier
|
||||
Identifier echo
|
||||
`)
|
||||
})
|
||||
})
|
||||
|
|
|
|||
|
|
@ -8,7 +8,8 @@ describe('string interpolation', () => {
|
|||
String
|
||||
StringFragment ${'hello '}
|
||||
Interpolation
|
||||
Identifier name
|
||||
FunctionCallOrIdentifier
|
||||
Identifier name
|
||||
`)
|
||||
})
|
||||
|
||||
|
|
@ -44,7 +45,8 @@ describe('string interpolation', () => {
|
|||
String
|
||||
StringFragment x/
|
||||
Interpolation
|
||||
Identifier y
|
||||
FunctionCallOrIdentifier
|
||||
Identifier y
|
||||
StringFragment /z
|
||||
`)
|
||||
})
|
||||
|
|
@ -122,8 +124,58 @@ describe('string escape sequences', () => {
|
|||
String
|
||||
StringFragment value:
|
||||
Interpolation
|
||||
Identifier x
|
||||
FunctionCallOrIdentifier
|
||||
Identifier x
|
||||
EscapeSeq \\n
|
||||
`)
|
||||
})
|
||||
})
|
||||
|
||||
describe('curly strings', () => {
|
||||
test('work on one line', () => {
|
||||
expect('{ one two three }').toMatchTree(`
|
||||
String
|
||||
CurlyString { one two three }
|
||||
`)
|
||||
})
|
||||
|
||||
test('work on multiple lines', () => {
|
||||
expect(`{
|
||||
one
|
||||
two
|
||||
three }`).toMatchTree(`
|
||||
String
|
||||
CurlyString {
|
||||
one
|
||||
two
|
||||
three }`)
|
||||
})
|
||||
|
||||
test('can contain other curlies', () => {
|
||||
expect(`{ { one }
|
||||
two
|
||||
{ three } }`).toMatchTree(`
|
||||
String
|
||||
CurlyString { { one }
|
||||
two
|
||||
{ three } }`)
|
||||
})
|
||||
})
|
||||
|
||||
describe('double quoted strings', () => {
|
||||
test("work", () => {
|
||||
expect(`"hello world"`).toMatchTree(`
|
||||
String
|
||||
DoubleQuote "hello world"`)
|
||||
})
|
||||
|
||||
test("don't interpolate", () => {
|
||||
expect(`"hello $world"`).toMatchTree(`
|
||||
String
|
||||
DoubleQuote "hello $world"`)
|
||||
|
||||
expect(`"hello $(1 + 2)"`).toMatchTree(`
|
||||
String
|
||||
DoubleQuote "hello $(1 + 2)"`)
|
||||
})
|
||||
})
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
import { ExternalTokenizer, InputStream, Stack } from '@lezer/lr'
|
||||
import { Identifier, AssignableIdentifier, Word, IdentifierBeforeDot, Do } from './shrimp.terms'
|
||||
import { Identifier, AssignableIdentifier, Word, IdentifierBeforeDot, Do, CurlyString, DotGet, newline, pipeStartsLine } from './shrimp.terms'
|
||||
|
||||
// doobie doobie do (we need the `do` keyword to know when we're defining params)
|
||||
export function specializeKeyword(ident: string) {
|
||||
|
|
@ -8,9 +8,9 @@ export function specializeKeyword(ident: string) {
|
|||
|
||||
// tell the dotGet searcher about builtin globals
|
||||
export const globals: string[] = []
|
||||
export const setGlobals = (newGlobals: string[]) => {
|
||||
export const setGlobals = (newGlobals: string[] | Record<string, any>) => {
|
||||
globals.length = 0
|
||||
globals.push(...newGlobals)
|
||||
globals.push(...(Array.isArray(newGlobals) ? newGlobals : Object.keys(newGlobals)))
|
||||
}
|
||||
|
||||
// The only chars that can't be words are whitespace, apostrophes, closing parens, and EOF.
|
||||
|
|
@ -18,6 +18,10 @@ export const setGlobals = (newGlobals: string[]) => {
|
|||
export const tokenizer = new ExternalTokenizer(
|
||||
(input: InputStream, stack: Stack) => {
|
||||
const ch = getFullCodePoint(input, 0)
|
||||
|
||||
// Handle curly strings
|
||||
if (ch === 123 /* { */) return consumeCurlyString(input, stack)
|
||||
|
||||
if (!isWordChar(ch)) return
|
||||
|
||||
// Don't consume things that start with digits - let Number token handle it
|
||||
|
|
@ -26,7 +30,7 @@ export const tokenizer = new ExternalTokenizer(
|
|||
// Don't consume things that start with - or + followed by a digit (negative/positive numbers)
|
||||
if ((ch === 45 /* - */ || ch === 43) /* + */ && isDigit(input.peek(1))) return
|
||||
|
||||
const isValidStart = isLowercaseLetter(ch) || isEmojiOrUnicode(ch)
|
||||
const isValidStart = isIdentStart(ch)
|
||||
const canBeWord = stack.canShift(Word)
|
||||
|
||||
// Consume all word characters, tracking if it remains a valid identifier
|
||||
|
|
@ -119,13 +123,7 @@ const consumeWordToken = (
|
|||
}
|
||||
|
||||
// Track identifier validity: must be lowercase, digit, dash, or emoji/unicode
|
||||
if (
|
||||
!isLowercaseLetter(ch) &&
|
||||
!isDigit(ch) &&
|
||||
ch !== 45 /* - */ &&
|
||||
ch !== 63 /* ? */ &&
|
||||
!isEmojiOrUnicode(ch)
|
||||
) {
|
||||
if (!isIdentChar(ch)) {
|
||||
if (!canBeWord) break
|
||||
isValidIdentifier = false
|
||||
}
|
||||
|
|
@ -157,17 +155,53 @@ const consumeRestOfWord = (input: InputStream, startPos: number, canBeWord: bool
|
|||
return pos
|
||||
}
|
||||
|
||||
// Consumes { curly strings } and tracks braces so you can { have { braces { inside { braces } } }
|
||||
const consumeCurlyString = (input: InputStream, stack: Stack) => {
|
||||
if (!stack.canShift(CurlyString)) return
|
||||
|
||||
let depth = 0
|
||||
let pos = 0
|
||||
|
||||
while (true) {
|
||||
const ch = input.peek(pos)
|
||||
if (ch < 0) return // EOF - invalid
|
||||
|
||||
if (ch === 123) depth++ // {
|
||||
else if (ch === 125) { // }
|
||||
depth--
|
||||
if (depth === 0) {
|
||||
pos++ // consume final }
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
pos++
|
||||
}
|
||||
|
||||
input.acceptToken(CurlyString, pos)
|
||||
}
|
||||
|
||||
// Check if this identifier is in scope (for property access detection)
|
||||
// Returns IdentifierBeforeDot token if in scope, null otherwise
|
||||
const checkForDotGet = (input: InputStream, stack: Stack, pos: number): number | null => {
|
||||
const identifierText = buildIdentifierText(input, pos)
|
||||
const context = stack.context as { scope: { has(name: string): boolean } } | undefined
|
||||
|
||||
// If identifier is in scope, this is property access (e.g., obj.prop)
|
||||
// If not in scope, it should be consumed as a Word (e.g., file.txt)
|
||||
return context?.scope.has(identifierText) || globals.includes(identifierText)
|
||||
? IdentifierBeforeDot
|
||||
: null
|
||||
// Check if identifier is in scope (lexical scope or globals)
|
||||
const inScope = context?.scope.has(identifierText) || globals.includes(identifierText)
|
||||
|
||||
// property access
|
||||
if (inScope) return IdentifierBeforeDot
|
||||
|
||||
// Not in scope - check if we're inside a DotGet chain
|
||||
// Inside the @skip {} block where DotGet is defined, Word cannot be shifted
|
||||
// but Identifier can be. This tells us we're at the RHS of a DotGet.
|
||||
const canShiftIdentifier = stack.canShift(Identifier)
|
||||
const canShiftWord = stack.canShift(Word)
|
||||
const inDotGetChain = canShiftIdentifier && !canShiftWord
|
||||
|
||||
// continue if we're inside a DotGet
|
||||
return inDotGetChain ? IdentifierBeforeDot : null
|
||||
}
|
||||
|
||||
// Decide between AssignableIdentifier and Identifier using grammar state + peek-ahead
|
||||
|
|
@ -193,6 +227,15 @@ const chooseIdentifierToken = (input: InputStream, stack: Stack): number => {
|
|||
|
||||
const nextCh = getFullCodePoint(input, peekPos)
|
||||
const nextCh2 = getFullCodePoint(input, peekPos + 1)
|
||||
const nextCh3 = getFullCodePoint(input, peekPos + 2)
|
||||
|
||||
// Check for ??= (three-character compound operator)
|
||||
if (nextCh === 63 /* ? */ && nextCh2 === 63 /* ? */ && nextCh3 === 61 /* = */) {
|
||||
const charAfterOp = getFullCodePoint(input, peekPos + 3)
|
||||
if (isWhiteSpace(charAfterOp) || charAfterOp === -1 /* EOF */) {
|
||||
return AssignableIdentifier
|
||||
}
|
||||
}
|
||||
|
||||
// Check for compound assignment operators: +=, -=, *=, /=, %=
|
||||
if (
|
||||
|
|
@ -219,6 +262,14 @@ const chooseIdentifierToken = (input: InputStream, stack: Stack): number => {
|
|||
}
|
||||
|
||||
// Character classification helpers
|
||||
export const isIdentStart = (ch: number): boolean => {
|
||||
return isLowercaseLetter(ch) || isEmojiOrUnicode(ch)
|
||||
}
|
||||
|
||||
export const isIdentChar = (ch: number): boolean => {
|
||||
return isLowercaseLetter(ch) || isDigit(ch) || ch === 45 /* - */ || ch === 63 /* ? */ || isEmojiOrUnicode(ch)
|
||||
}
|
||||
|
||||
const isWhiteSpace = (ch: number): boolean => {
|
||||
return ch === 32 /* space */ || ch === 9 /* tab */ || ch === 13 /* \r */
|
||||
}
|
||||
|
|
@ -305,3 +356,34 @@ const isEmojiOrUnicode = (ch: number): boolean => {
|
|||
}
|
||||
|
||||
const getCharSize = (ch: number) => (ch > 0xffff ? 2 : 1) // emoji takes 2 UTF-16 code units
|
||||
|
||||
export const pipeStartsLineTokenizer = new ExternalTokenizer((input: InputStream, stack: Stack) => {
|
||||
const ch = input.peek(0)
|
||||
|
||||
if (ch !== 10 /* \n */) return
|
||||
|
||||
// ignore whitespace
|
||||
let offset = 1
|
||||
let lastNewlineOffset = 0
|
||||
|
||||
while (true) {
|
||||
const ch = input.peek(offset)
|
||||
if (ch === 10 /* \n */) {
|
||||
lastNewlineOffset = offset
|
||||
offset++
|
||||
} else if (isWhiteSpace(ch)) {
|
||||
offset++
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// look for pipe after skipping empty lines
|
||||
if (input.peek(offset) === 124 /* | */) {
|
||||
input.advance(lastNewlineOffset + 1)
|
||||
input.acceptToken(pipeStartsLine)
|
||||
} else {
|
||||
input.advance(1)
|
||||
input.acceptToken(newline)
|
||||
}
|
||||
})
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { type Value, toString, toValue } from 'reefvm'
|
||||
import { type Value, toString } from 'reefvm'
|
||||
|
||||
export const dict = {
|
||||
keys: (dict: Record<string, any>) => Object.keys(dict),
|
||||
|
|
|
|||
128
src/prelude/fs.ts
Normal file
128
src/prelude/fs.ts
Normal file
|
|
@ -0,0 +1,128 @@
|
|||
import { join, resolve, basename, dirname, extname } from 'path'
|
||||
import {
|
||||
readdirSync, mkdirSync, rmdirSync,
|
||||
readFileSync, writeFileSync, appendFileSync,
|
||||
rmSync, copyFileSync,
|
||||
statSync, lstatSync, chmodSync, symlinkSync, readlinkSync,
|
||||
watch
|
||||
} from "fs"
|
||||
|
||||
export const fs = {
|
||||
// Directory operations
|
||||
ls: (path: string) => readdirSync(path),
|
||||
mkdir: (path: string) => mkdirSync(path, { recursive: true }),
|
||||
rmdir: (path: string) => rmdirSync(path === '/' || path === '' ? '/tmp/*' : path, { recursive: true }),
|
||||
pwd: () => process.cwd(),
|
||||
cd: (path: string) => process.chdir(path),
|
||||
|
||||
// Reading
|
||||
read: (path: string) => readFileSync(path, 'utf-8'),
|
||||
cat: (path: string) => { }, // added below
|
||||
'read-bytes': (path: string) => [...readFileSync(path)],
|
||||
|
||||
// Writing
|
||||
write: (path: string, content: string) => writeFileSync(path, content),
|
||||
append: (path: string, content: string) => appendFileSync(path, content),
|
||||
|
||||
// File operations
|
||||
delete: (path: string) => rmSync(path),
|
||||
rm: (path: string) => { }, // added below
|
||||
copy: (from: string, to: string) => copyFileSync(from, to),
|
||||
move: (from: string, to: string) => {
|
||||
fs.copy(from, to)
|
||||
fs.rm(from)
|
||||
},
|
||||
mv: (from: string, to: string) => { }, // added below
|
||||
|
||||
// Path operations
|
||||
basename: (path: string) => basename(path),
|
||||
dirname: (path: string) => dirname(path),
|
||||
extname: (path: string) => extname(path),
|
||||
join: (...paths: string[]) => join(...paths),
|
||||
resolve: (...paths: string[]) => resolve(...paths),
|
||||
|
||||
// File info
|
||||
stat: (path: string) => {
|
||||
try {
|
||||
const stats = statSync(path)
|
||||
const record = Object.fromEntries(Object.entries(stats))
|
||||
record['atime'] = record['atimeMs']
|
||||
record['ctime'] = record['ctimeMs']
|
||||
record['mtime'] = record['mtimeMs']
|
||||
|
||||
delete record['atimeMs']
|
||||
delete record['ctimeMs']
|
||||
delete record['mtimeMs']
|
||||
|
||||
return record
|
||||
} catch {
|
||||
return {}
|
||||
}
|
||||
|
||||
},
|
||||
'exists?': (path: string) => {
|
||||
try {
|
||||
statSync(path)
|
||||
return true
|
||||
}
|
||||
catch {
|
||||
return false
|
||||
}
|
||||
},
|
||||
'file?': (path: string) => {
|
||||
try { return statSync(path).isFile() }
|
||||
catch { return false }
|
||||
},
|
||||
'dir?': (path: string) => {
|
||||
try { return statSync(path).isDirectory() }
|
||||
catch { return false }
|
||||
},
|
||||
'symlink?': (path: string) => {
|
||||
try { return lstatSync(path).isSymbolicLink() }
|
||||
catch { return false }
|
||||
},
|
||||
'exec?': (path: string) => {
|
||||
try {
|
||||
const stats = statSync(path)
|
||||
return !!(stats.mode & 0o111)
|
||||
}
|
||||
catch { return false }
|
||||
},
|
||||
size: (path: string) => {
|
||||
try { return statSync(path).size }
|
||||
catch { return 0 }
|
||||
},
|
||||
|
||||
// Permissions
|
||||
chmod: (path: string, mode: number | string) => {
|
||||
const numMode = typeof mode === 'string' ? parseInt(mode, 8) : mode
|
||||
chmodSync(path, numMode)
|
||||
},
|
||||
|
||||
// Symlinks
|
||||
symlink: (target: string, path: string) => symlinkSync(target, path),
|
||||
readlink: (path: string) => readlinkSync(path, 'utf-8'),
|
||||
|
||||
// Other
|
||||
glob: (pattern: string) => {
|
||||
const dir = pattern.substring(0, pattern.lastIndexOf('/'))
|
||||
const match = pattern.substring(pattern.lastIndexOf('/') + 1)
|
||||
|
||||
if (!match.includes('*')) throw new Error('only * patterns supported')
|
||||
|
||||
const ext = match.split('*').pop()!
|
||||
return readdirSync(dir)
|
||||
.filter((f) => f.endsWith(ext))
|
||||
.map((f) => join(dir, f))
|
||||
|
||||
},
|
||||
|
||||
watch: (path: string, callback: Function) =>
|
||||
watch(path, (event, filename) => callback(event, filename)),
|
||||
}
|
||||
|
||||
|
||||
; (fs as any).cat = fs.read
|
||||
; (fs as any).mv = fs.move
|
||||
; (fs as any).cp = fs.copy
|
||||
; (fs as any).rm = fs.delete
|
||||
|
|
@ -1,11 +1,14 @@
|
|||
// The prelude creates all the builtin Shrimp functions.
|
||||
|
||||
import { join, resolve } from 'path'
|
||||
import {
|
||||
type Value, type VM, toValue,
|
||||
extractParamInfo, isWrapped, getOriginalFunction,
|
||||
} from 'reefvm'
|
||||
|
||||
import { dict } from './dict'
|
||||
import { fs } from './fs'
|
||||
import { json } from './json'
|
||||
import { load } from './load'
|
||||
import { list } from './list'
|
||||
import { math } from './math'
|
||||
|
|
@ -13,11 +16,27 @@ import { str } from './str'
|
|||
|
||||
export const globals = {
|
||||
dict,
|
||||
fs,
|
||||
json,
|
||||
load,
|
||||
list,
|
||||
math,
|
||||
str,
|
||||
|
||||
// shrimp runtime info
|
||||
$: {
|
||||
args: Bun.argv.slice(3),
|
||||
argv: Bun.argv.slice(1),
|
||||
env: process.env,
|
||||
pid: process.pid,
|
||||
cwd: process.env.PWD,
|
||||
script: {
|
||||
name: Bun.argv[2] || '(shrimp)',
|
||||
path: resolve(join('.', Bun.argv[2] ?? ''))
|
||||
},
|
||||
|
||||
},
|
||||
|
||||
// hello
|
||||
echo: (...args: any[]) => {
|
||||
console.log(...args.map(a => {
|
||||
|
|
@ -40,6 +59,29 @@ export const globals = {
|
|||
'var?': function (this: VM, v: string) {
|
||||
return typeof v !== 'string' || this.scope.has(v)
|
||||
},
|
||||
ref: (fn: Function) => fn,
|
||||
import: function (this: VM, atNamed: Record<any, string | string[]> = {}, ...idents: string[]) {
|
||||
const onlyArray = Array.isArray(atNamed.only) ? atNamed.only : [atNamed.only].filter(a => a)
|
||||
const only = new Set(onlyArray)
|
||||
const wantsOnly = only.size > 0
|
||||
|
||||
|
||||
for (const ident of idents) {
|
||||
const module = this.get(ident)
|
||||
|
||||
if (!module) throw new Error(`import: can't find ${ident}`)
|
||||
if (module.type !== 'dict') throw new Error(`import: can't import ${module.type}`)
|
||||
|
||||
for (const [name, value] of module.value.entries()) {
|
||||
if (value.type === 'dict') throw new Error(`import: can't import dicts in dicts`)
|
||||
if (wantsOnly && !only.has(name)) continue
|
||||
this.set(name, value)
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
// env
|
||||
exit: (num: number) => process.exit(num ?? 0),
|
||||
|
||||
// type predicates
|
||||
'string?': (v: any) => toValue(v).type === 'string',
|
||||
|
|
@ -56,6 +98,7 @@ export const globals = {
|
|||
|
||||
// boolean/logic
|
||||
not: (v: any) => !v,
|
||||
bnot: (n: number) => ~(n | 0),
|
||||
|
||||
// utilities
|
||||
inc: (n: number) => n + 1,
|
||||
|
|
|
|||
7
src/prelude/json.ts
Normal file
7
src/prelude/json.ts
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
export const json = {
|
||||
encode: (s: any) => JSON.stringify(s),
|
||||
decode: (s: string) => JSON.parse(s),
|
||||
}
|
||||
|
||||
; (json as any).parse = json.decode
|
||||
; (json as any).stringify = json.encode
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
import { type Value, toValue, toNull } from 'reefvm'
|
||||
|
||||
export const list = {
|
||||
slice: (list: any[], start: number, end?: number) => list.slice(start, end),
|
||||
slice: (list: any[], start: number, end?: number) => list.slice(start, end ? end : undefined),
|
||||
map: async (list: any[], cb: Function) => {
|
||||
let acc: any[] = []
|
||||
for (const value of list) acc.push(await cb(value))
|
||||
|
|
@ -14,6 +14,13 @@ export const list = {
|
|||
}
|
||||
return acc
|
||||
},
|
||||
reject: async (list: any[], cb: Function) => {
|
||||
let acc: any[] = []
|
||||
for (const value of list) {
|
||||
if (!(await cb(value))) acc.push(value)
|
||||
}
|
||||
return acc
|
||||
},
|
||||
reduce: async (list: any[], cb: Function, initial: any) => {
|
||||
let acc = initial
|
||||
for (const value of list) acc = await cb(acc, value)
|
||||
|
|
@ -29,6 +36,8 @@ export const list = {
|
|||
// predicates
|
||||
'empty?': (list: any[]) => list.length === 0,
|
||||
'contains?': (list: any[], item: any) => list.includes(item),
|
||||
'includes?': (list: any[], item: any) => list.includes(item),
|
||||
'has?': (list: any[], item: any) => list.includes(item),
|
||||
'any?': async (list: any[], cb: Function) => {
|
||||
for (const value of list) {
|
||||
if (await cb(value)) return true
|
||||
|
|
@ -63,8 +72,14 @@ export const list = {
|
|||
const realList = list.value as any[]
|
||||
const realStart = start.value as number
|
||||
const realDeleteCount = deleteCount.value as number
|
||||
const realItems = items.map(item => item.value)
|
||||
return toValue(realList.splice(realStart, realDeleteCount, ...realItems))
|
||||
return toValue(realList.splice(realStart, realDeleteCount, ...items))
|
||||
},
|
||||
insert: (list: Value, index: Value, item: Value) => {
|
||||
if (list.type !== 'array') return toNull()
|
||||
const realList = list.value as any[]
|
||||
const realIndex = index.value as number
|
||||
realList.splice(realIndex, 0, item)
|
||||
return toValue(realList.length)
|
||||
},
|
||||
|
||||
// sequence operations
|
||||
|
|
@ -135,4 +150,5 @@ export const list = {
|
|||
; (list.push as any).raw = true
|
||||
; (list.pop as any).raw = true
|
||||
; (list.shift as any).raw = true
|
||||
; (list.unshift as any).raw = true
|
||||
; (list.unshift as any).raw = true
|
||||
; (list.insert as any).raw = true
|
||||
|
|
@ -7,7 +7,9 @@ export const load = async function (this: VM, path: string): Promise<Record<stri
|
|||
const scope = this.scope
|
||||
const pc = this.pc
|
||||
|
||||
const fullPath = resolve(path) + '.sh'
|
||||
let fullPath = resolve(path)
|
||||
if (!path.includes('.')) fullPath += '.sh'
|
||||
|
||||
const code = readFileSync(fullPath, 'utf-8')
|
||||
|
||||
this.pc = this.instructions.length
|
||||
|
|
|
|||
|
|
@ -1,37 +1,37 @@
|
|||
// strings
|
||||
export const str = {
|
||||
join: (arr: string[], sep: string = ',') => arr.join(sep),
|
||||
split: (str: string, sep: string = ',') => str.split(sep),
|
||||
'to-upper': (str: string) => str.toUpperCase(),
|
||||
'to-lower': (str: string) => str.toLowerCase(),
|
||||
trim: (str: string) => str.trim(),
|
||||
split: (str: string, sep: string = ',') => String(str ?? '').split(sep),
|
||||
'to-upper': (str: string) => String(str ?? '').toUpperCase(),
|
||||
'to-lower': (str: string) => String(str ?? '').toLowerCase(),
|
||||
trim: (str: string) => String(str ?? '').trim(),
|
||||
|
||||
// predicates
|
||||
'starts-with?': (str: string, prefix: string) => str.startsWith(prefix),
|
||||
'ends-with?': (str: string, suffix: string) => str.endsWith(suffix),
|
||||
'contains?': (str: string, substr: string) => str.includes(substr),
|
||||
'empty?': (str: string) => str.length === 0,
|
||||
'starts-with?': (str: string, prefix: string) => String(str ?? '').startsWith(prefix),
|
||||
'ends-with?': (str: string, suffix: string) => String(str ?? '').endsWith(suffix),
|
||||
'contains?': (str: string, substr: string) => String(str ?? '').includes(substr),
|
||||
'empty?': (str: string) => String(str ?? '').length === 0,
|
||||
|
||||
// inspection
|
||||
'index-of': (str: string, search: string) => str.indexOf(search),
|
||||
'last-index-of': (str: string, search: string) => str.lastIndexOf(search),
|
||||
'index-of': (str: string, search: string) => String(str ?? '').indexOf(search),
|
||||
'last-index-of': (str: string, search: string) => String(str ?? '').lastIndexOf(search),
|
||||
|
||||
// transformations
|
||||
replace: (str: string, search: string, replacement: string) => str.replace(search, replacement),
|
||||
'replace-all': (str: string, search: string, replacement: string) => str.replaceAll(search, replacement),
|
||||
slice: (str: string, start: number, end?: number | null) => str.slice(start, end ?? undefined),
|
||||
substring: (str: string, start: number, end?: number | null) => str.substring(start, end ?? undefined),
|
||||
replace: (str: string, search: string, replacement: string) => String(str ?? '').replace(search, replacement),
|
||||
'replace-all': (str: string, search: string, replacement: string) => String(str ?? '').replaceAll(search, replacement),
|
||||
slice: (str: string, start: number, end?: number | null) => String(str ?? '').slice(start, end ?? undefined),
|
||||
substring: (str: string, start: number, end?: number | null) => String(str ?? '').substring(start, end ?? undefined),
|
||||
repeat: (str: string, count: number) => {
|
||||
if (count < 0) throw new Error(`repeat: count must be non-negative, got ${count}`)
|
||||
if (!Number.isInteger(count)) throw new Error(`repeat: count must be an integer, got ${count}`)
|
||||
return str.repeat(count)
|
||||
return String(str ?? '').repeat(count)
|
||||
},
|
||||
'pad-start': (str: string, length: number, pad: string = ' ') => str.padStart(length, pad),
|
||||
'pad-end': (str: string, length: number, pad: string = ' ') => str.padEnd(length, pad),
|
||||
lines: (str: string) => str.split('\n'),
|
||||
chars: (str: string) => str.split(''),
|
||||
'pad-start': (str: string, length: number, pad: string = ' ') => String(str ?? '').padStart(length, pad),
|
||||
'pad-end': (str: string, length: number, pad: string = ' ') => String(str ?? '').padEnd(length, pad),
|
||||
lines: (str: string) => String(str ?? '').split('\n'),
|
||||
chars: (str: string) => String(str ?? '').split(''),
|
||||
|
||||
// regex
|
||||
match: (str: string, regex: RegExp) => str.match(regex),
|
||||
'test?': (str: string, regex: RegExp) => regex.test(str),
|
||||
match: (str: string, regex: RegExp) => String(str ?? '').match(regex),
|
||||
'test?': (str: string, regex: RegExp) => regex.test(String(str ?? '')),
|
||||
}
|
||||
329
src/prelude/tests/fs.test.ts
Normal file
329
src/prelude/tests/fs.test.ts
Normal file
|
|
@ -0,0 +1,329 @@
|
|||
import { expect, describe, test, beforeEach, afterEach } from 'bun:test'
|
||||
import { mkdirSync, writeFileSync, rmSync, existsSync } from 'fs'
|
||||
import { join, resolve } from 'path'
|
||||
import { fs } from '../fs'
|
||||
|
||||
const TEST_DIR = resolve('./tmp/shrimp-fs-test')
|
||||
const CWD = process.cwd()
|
||||
|
||||
beforeEach(() => {
|
||||
if (existsSync(TEST_DIR)) {
|
||||
rmSync(TEST_DIR, { recursive: true })
|
||||
}
|
||||
mkdirSync(TEST_DIR, { recursive: true })
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
process.chdir(CWD)
|
||||
if (existsSync(TEST_DIR)) {
|
||||
rmSync(TEST_DIR, { recursive: true })
|
||||
}
|
||||
})
|
||||
|
||||
describe('fs - directory operations', () => {
|
||||
test('fs.ls lists directory contents', () => {
|
||||
writeFileSync(join(TEST_DIR, 'file1.txt'), 'content1')
|
||||
writeFileSync(join(TEST_DIR, 'file2.txt'), 'content2')
|
||||
|
||||
const result = fs.ls(TEST_DIR)
|
||||
expect(result).toContain('file1.txt')
|
||||
expect(result).toContain('file2.txt')
|
||||
})
|
||||
|
||||
test('fs.mkdir creates directory', () => {
|
||||
const newDir = join(TEST_DIR, 'newdir')
|
||||
fs.mkdir(newDir)
|
||||
expect(existsSync(newDir)).toBe(true)
|
||||
})
|
||||
|
||||
test('fs.rmdir removes empty directory', () => {
|
||||
const dir = join(TEST_DIR, 'toremove')
|
||||
mkdirSync(dir)
|
||||
fs.rmdir(dir)
|
||||
expect(existsSync(dir)).toBe(false)
|
||||
})
|
||||
|
||||
test('fs.pwd returns current working directory', () => {
|
||||
const result = fs.pwd()
|
||||
expect(typeof result).toBe('string')
|
||||
expect(result.length).toBeGreaterThan(0)
|
||||
})
|
||||
|
||||
test('fs.cd changes current working directory', () => {
|
||||
const originalCwd = process.cwd()
|
||||
fs.cd(TEST_DIR)
|
||||
expect(process.cwd()).toBe(TEST_DIR)
|
||||
process.chdir(originalCwd) // restore
|
||||
})
|
||||
})
|
||||
|
||||
describe('fs - reading', () => {
|
||||
test('fs.read reads file contents as string', () => {
|
||||
const file = join(TEST_DIR, 'test.txt')
|
||||
writeFileSync(file, 'hello world')
|
||||
|
||||
const result = fs.read(file)
|
||||
expect(result).toBe('hello world')
|
||||
})
|
||||
|
||||
test('fs.cat is alias for fs.read', () => {
|
||||
const file = join(TEST_DIR, 'test.txt')
|
||||
writeFileSync(file, 'hello world')
|
||||
|
||||
const result = fs.cat(file)
|
||||
expect(result).toBe('hello world')
|
||||
})
|
||||
|
||||
test('fs.read-bytes reads file as buffer', () => {
|
||||
const file = join(TEST_DIR, 'test.bin')
|
||||
writeFileSync(file, Buffer.from([1, 2, 3, 4]))
|
||||
|
||||
const result = fs['read-bytes'](file)
|
||||
expect(result).toBeInstanceOf(Array)
|
||||
expect(result).toEqual([1, 2, 3, 4])
|
||||
})
|
||||
})
|
||||
|
||||
describe('fs - writing', () => {
|
||||
test('fs.write writes string to file', async () => {
|
||||
const file = join(TEST_DIR, 'output.txt')
|
||||
fs.write(file, 'test content')
|
||||
|
||||
const content = Bun.file(file).text()
|
||||
expect(await content).toBe('test content')
|
||||
})
|
||||
|
||||
test('fs.append appends to existing file', async () => {
|
||||
const file = join(TEST_DIR, 'append.txt')
|
||||
writeFileSync(file, 'first')
|
||||
fs.append(file, ' second')
|
||||
|
||||
const content = await Bun.file(file).text()
|
||||
expect(content).toBe('first second')
|
||||
})
|
||||
})
|
||||
|
||||
describe('fs - file operations', () => {
|
||||
test('fs.rm removes file', () => {
|
||||
const file = join(TEST_DIR, 'remove.txt')
|
||||
writeFileSync(file, 'content')
|
||||
|
||||
fs.rm(file)
|
||||
expect(existsSync(file)).toBe(false)
|
||||
})
|
||||
|
||||
test('fs.delete is alias for fs.rm', () => {
|
||||
const file = join(TEST_DIR, 'delete.txt')
|
||||
writeFileSync(file, 'content')
|
||||
|
||||
fs.delete(file)
|
||||
expect(existsSync(file)).toBe(false)
|
||||
})
|
||||
|
||||
test('fs.copy copies file', async () => {
|
||||
const src = join(TEST_DIR, 'source.txt')
|
||||
const dest = join(TEST_DIR, 'dest.txt')
|
||||
writeFileSync(src, 'content')
|
||||
|
||||
fs.copy(src, dest)
|
||||
expect(await Bun.file(dest).text()).toBe('content')
|
||||
})
|
||||
|
||||
test('fs.cp is alias for fs.copy', async () => {
|
||||
const src = join(TEST_DIR, 'source2.txt')
|
||||
const dest = join(TEST_DIR, 'dest2.txt')
|
||||
writeFileSync(src, 'content')
|
||||
|
||||
fs.cp(src, dest)
|
||||
expect(await Bun.file(dest).text()).toBe('content')
|
||||
})
|
||||
|
||||
test('fs.move moves file', async () => {
|
||||
const src = join(TEST_DIR, 'source.txt')
|
||||
const dest = join(TEST_DIR, 'moved.txt')
|
||||
writeFileSync(src, 'content')
|
||||
|
||||
fs.move(src, dest)
|
||||
expect(existsSync(src)).toBe(false)
|
||||
expect(await Bun.file(dest).text()).toBe('content')
|
||||
})
|
||||
|
||||
test('fs.mv is alias for fs.move', async () => {
|
||||
const src = join(TEST_DIR, 'source2.txt')
|
||||
const dest = join(TEST_DIR, 'moved2.txt')
|
||||
writeFileSync(src, 'content')
|
||||
|
||||
fs.mv(src, dest)
|
||||
expect(existsSync(src)).toBe(false)
|
||||
expect(await Bun.file(dest).text()).toBe('content')
|
||||
})
|
||||
})
|
||||
|
||||
describe('fs - path operations', () => {
|
||||
test('fs.basename extracts filename from path', () => {
|
||||
expect(fs.basename('/path/to/file.txt')).toBe('file.txt')
|
||||
expect(fs.basename('/path/to/dir/')).toBe('dir')
|
||||
})
|
||||
|
||||
test('fs.dirname extracts directory from path', () => {
|
||||
expect(fs.dirname('/path/to/file.txt')).toBe('/path/to')
|
||||
expect(fs.dirname('/path/to/dir/')).toBe('/path/to')
|
||||
})
|
||||
|
||||
test('fs.extname extracts file extension', () => {
|
||||
expect(fs.extname('file.txt')).toBe('.txt')
|
||||
expect(fs.extname('file.tar.gz')).toBe('.gz')
|
||||
expect(fs.extname('noext')).toBe('')
|
||||
})
|
||||
|
||||
test('fs.join joins path segments', () => {
|
||||
expect(fs.join('path', 'to', 'file.txt')).toBe('path/to/file.txt')
|
||||
expect(fs.join('/absolute', 'path')).toBe('/absolute/path')
|
||||
})
|
||||
|
||||
test('fs.resolve resolves to absolute path', () => {
|
||||
const result = fs.resolve('relative', 'path')
|
||||
expect(result.startsWith('/')).toBe(true)
|
||||
expect(result).toContain('relative')
|
||||
})
|
||||
})
|
||||
|
||||
describe('fs - file info', () => {
|
||||
test('fs.stat returns file stats', () => {
|
||||
const file = join(TEST_DIR, 'stat.txt')
|
||||
writeFileSync(file, 'content')
|
||||
|
||||
const stats = fs.stat(file)
|
||||
expect(stats).toHaveProperty('size')
|
||||
expect(stats).toHaveProperty('mtime')
|
||||
expect(stats.size).toBe(7) // 'content' is 7 bytes
|
||||
})
|
||||
|
||||
test('fs.exists? checks if path exists', () => {
|
||||
const file = join(TEST_DIR, 'exists.txt')
|
||||
expect(fs['exists?'](file)).toBe(false)
|
||||
|
||||
writeFileSync(file, 'content')
|
||||
expect(fs['exists?'](file)).toBe(true)
|
||||
})
|
||||
|
||||
test('fs.file? checks if path is a file', () => {
|
||||
const file = join(TEST_DIR, 'isfile.txt')
|
||||
writeFileSync(file, 'content')
|
||||
|
||||
expect(fs['file?'](file)).toBe(true)
|
||||
expect(fs['file?'](TEST_DIR)).toBe(false)
|
||||
})
|
||||
|
||||
test('fs.dir? checks if path is a directory', () => {
|
||||
const dir = join(TEST_DIR, 'isdir')
|
||||
mkdirSync(dir)
|
||||
|
||||
expect(fs['dir?'](dir)).toBe(true)
|
||||
expect(fs['dir?'](join(TEST_DIR, 'isfile.txt'))).toBe(false)
|
||||
})
|
||||
|
||||
test('fs.symlink? checks if path is a symbolic link', () => {
|
||||
const file = join(TEST_DIR, 'target.txt')
|
||||
const link = join(TEST_DIR, 'link.txt')
|
||||
writeFileSync(file, 'content')
|
||||
|
||||
fs.symlink(file, link)
|
||||
expect(fs['symlink?'](link)).toBe(true)
|
||||
expect(fs['symlink?'](file)).toBe(false)
|
||||
})
|
||||
|
||||
test('fs.exec? checks if file is executable', () => {
|
||||
const file = join(TEST_DIR, 'script.sh')
|
||||
writeFileSync(file, '#!/bin/bash\necho hello')
|
||||
|
||||
fs.chmod(file, 0o755)
|
||||
expect(fs['exec?'](file)).toBe(true)
|
||||
|
||||
fs.chmod(file, 0o644)
|
||||
expect(fs['exec?'](file)).toBe(false)
|
||||
})
|
||||
|
||||
test('fs.size returns file size in bytes', () => {
|
||||
const file = join(TEST_DIR, 'sizeme.txt')
|
||||
writeFileSync(file, 'content')
|
||||
|
||||
expect(fs.size(file)).toBe(7) // 'content' is 7 bytes
|
||||
})
|
||||
})
|
||||
|
||||
describe('fs - permissions', () => {
|
||||
test('fs.chmod changes file permissions with octal number', () => {
|
||||
const file = join(TEST_DIR, 'perms.txt')
|
||||
writeFileSync(file, 'content')
|
||||
|
||||
fs.chmod(file, 0o755)
|
||||
expect(fs['exec?'](file)).toBe(true)
|
||||
|
||||
fs.chmod(file, 0o644)
|
||||
expect(fs['exec?'](file)).toBe(false)
|
||||
})
|
||||
|
||||
test('fs.chmod changes file permissions with string', () => {
|
||||
const file = join(TEST_DIR, 'perms2.txt')
|
||||
writeFileSync(file, 'content')
|
||||
|
||||
fs.chmod(file, '755')
|
||||
expect(fs['exec?'](file)).toBe(true)
|
||||
|
||||
fs.chmod(file, '644')
|
||||
expect(fs['exec?'](file)).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('fs - symlinks', () => {
|
||||
test('fs.symlink creates symbolic link', () => {
|
||||
const target = join(TEST_DIR, 'target.txt')
|
||||
const link = join(TEST_DIR, 'link.txt')
|
||||
writeFileSync(target, 'content')
|
||||
|
||||
fs.symlink(target, link)
|
||||
expect(fs['symlink?'](link)).toBe(true)
|
||||
expect(fs.read(link)).toBe('content')
|
||||
})
|
||||
|
||||
test('fs.readlink reads symbolic link target', () => {
|
||||
const target = join(TEST_DIR, 'target.txt')
|
||||
const link = join(TEST_DIR, 'link.txt')
|
||||
writeFileSync(target, 'content')
|
||||
|
||||
fs.symlink(target, link)
|
||||
expect(fs.readlink(link)).toBe(target)
|
||||
})
|
||||
})
|
||||
|
||||
describe('fs - other', () => {
|
||||
test('fs.glob matches file patterns', () => {
|
||||
writeFileSync(join(TEST_DIR, 'file1.txt'), '')
|
||||
writeFileSync(join(TEST_DIR, 'file2.txt'), '')
|
||||
writeFileSync(join(TEST_DIR, 'file3.md'), '')
|
||||
|
||||
const result = fs.glob(join(TEST_DIR, '*.txt'))
|
||||
expect(result).toHaveLength(2)
|
||||
expect(result).toContain(join(TEST_DIR, 'file1.txt'))
|
||||
expect(result).toContain(join(TEST_DIR, 'file2.txt'))
|
||||
})
|
||||
|
||||
test('fs.watch calls callback on file change', async () => {
|
||||
const file = join(TEST_DIR, 'watch.txt')
|
||||
writeFileSync(file, 'initial')
|
||||
|
||||
let called = false
|
||||
const watcher = fs.watch(file, () => { called = true })
|
||||
|
||||
// Trigger change
|
||||
await new Promise(resolve => setTimeout(resolve, 100))
|
||||
writeFileSync(file, 'updated')
|
||||
|
||||
// Wait for watcher
|
||||
await new Promise(resolve => setTimeout(resolve, 500))
|
||||
|
||||
expect(called).toBe(true)
|
||||
watcher.close?.()
|
||||
})
|
||||
})
|
||||
|
|
@ -77,3 +77,64 @@ describe('introspection', () => {
|
|||
await expect(`describe 'hello'`).toEvaluateTo("#<string: \u001b[32m'hello\u001b[32m'\u001b[0m>", globals)
|
||||
})
|
||||
})
|
||||
|
||||
describe('environment', () => {
|
||||
test('args is an array', async () => {
|
||||
await expect(`array? $.args`).toEvaluateTo(true, globals)
|
||||
})
|
||||
|
||||
test('args can be accessed', async () => {
|
||||
await expect(`type $.args`).toEvaluateTo('array', globals)
|
||||
})
|
||||
|
||||
test('argv includes more than just the args', async () => {
|
||||
await expect(`list.first $.argv | str.ends-with? 'shrimp.test.ts'`).toEvaluateTo(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('ref', () => {
|
||||
expect(`rnd = do x: true end; rnd | type`).toEvaluateTo('boolean')
|
||||
expect(`rnd = do x: true end; ref rnd | type`).toEvaluateTo('function')
|
||||
|
||||
expect(`math.random | type`).toEvaluateTo('number')
|
||||
expect(`ref math.random | type`).toEvaluateTo('native')
|
||||
|
||||
expect(`rnd = math.random; rnd | type`).toEvaluateTo('number')
|
||||
expect(`rnd = ref math.random; rnd | type`).toEvaluateTo('number')
|
||||
expect(`rnd = ref math.random; ref rnd | type`).toEvaluateTo('native')
|
||||
})
|
||||
|
||||
describe('$ global dictionary', () => {
|
||||
test('$.args is an array', async () => {
|
||||
await expect(`$.args | array?`).toEvaluateTo(true, globals)
|
||||
})
|
||||
|
||||
test('$.args can be accessed', async () => {
|
||||
await expect(`$.args | type`).toEvaluateTo('array', globals)
|
||||
})
|
||||
|
||||
test('$.script.name is a string', async () => {
|
||||
await expect(`$.script.name | string?`).toEvaluateTo(true, globals)
|
||||
})
|
||||
|
||||
test('$.script.path is a string', async () => {
|
||||
await expect(`$.script.path | string?`).toEvaluateTo(true, globals)
|
||||
})
|
||||
|
||||
test('$.env is a dict', async () => {
|
||||
await expect(`$.env | dict?`).toEvaluateTo(true, globals)
|
||||
})
|
||||
|
||||
test('$.pid is a number', async () => {
|
||||
await expect(`$.pid | number?`).toEvaluateTo(true, globals)
|
||||
await expect(`$.pid > 0`).toEvaluateTo(true, globals)
|
||||
})
|
||||
|
||||
test('$.cwd is a string', async () => {
|
||||
await expect(`$.cwd | string?`).toEvaluateTo(true, globals)
|
||||
})
|
||||
|
||||
test('$.cwd returns current working directory', async () => {
|
||||
await expect(`$.cwd`).toEvaluateTo(process.cwd(), globals)
|
||||
})
|
||||
})
|
||||
|
|
|
|||
84
src/prelude/tests/json.test.ts
Normal file
84
src/prelude/tests/json.test.ts
Normal file
|
|
@ -0,0 +1,84 @@
|
|||
import { expect, describe, test } from 'bun:test'
|
||||
|
||||
describe('json', () => {
|
||||
test('json.decode', () => {
|
||||
expect(`json.decode '[1,2,3]'`).toEvaluateTo([1, 2, 3])
|
||||
expect(`json.decode '"heya"'`).toEvaluateTo('heya')
|
||||
expect(`json.decode '[true, false, null]'`).toEvaluateTo([true, false, null])
|
||||
expect(`json.decode '{"a": true, "b": false, "c": "yeah"}'`).toEvaluateTo({ a: true, b: false, c: "yeah" })
|
||||
})
|
||||
|
||||
test('json.encode', () => {
|
||||
expect(`json.encode [1 2 3]`).toEvaluateTo('[1,2,3]')
|
||||
expect(`json.encode 'heya'`).toEvaluateTo('"heya"')
|
||||
expect(`json.encode [true false null]`).toEvaluateTo('[true,false,null]')
|
||||
expect(`json.encode [a=true b=false c='yeah'] | json.decode`).toEvaluateTo({ a: true, b: false, c: "yeah" })
|
||||
})
|
||||
|
||||
test('edge cases - empty structures', () => {
|
||||
expect(`json.decode '[]'`).toEvaluateTo([])
|
||||
expect(`json.decode '{}'`).toEvaluateTo({})
|
||||
expect(`json.encode []`).toEvaluateTo('[]')
|
||||
expect(`json.encode [=]`).toEvaluateTo('{}')
|
||||
})
|
||||
|
||||
test('edge cases - special characters in strings', () => {
|
||||
expect(`json.decode '"hello\\\\nworld"'`).toEvaluateTo('hello\nworld')
|
||||
expect(`json.decode '"tab\\\\there"'`).toEvaluateTo('tab\there')
|
||||
expect(`json.decode '"forward/slash"'`).toEvaluateTo('forward/slash')
|
||||
expect(`json.decode '"with\\\\\\\\backslash"'`).toEvaluateTo('with\\backslash')
|
||||
})
|
||||
|
||||
test('numbers - integers and floats', () => {
|
||||
expect(`json.decode '42'`).toEvaluateTo(42)
|
||||
expect(`json.decode '0'`).toEvaluateTo(0)
|
||||
expect(`json.decode '-17'`).toEvaluateTo(-17)
|
||||
expect(`json.decode '3.14159'`).toEvaluateTo(3.14159)
|
||||
expect(`json.decode '-0.5'`).toEvaluateTo(-0.5)
|
||||
})
|
||||
|
||||
test('numbers - scientific notation', () => {
|
||||
expect(`json.decode '1e10'`).toEvaluateTo(1e10)
|
||||
expect(`json.decode '2.5e-3'`).toEvaluateTo(2.5e-3)
|
||||
expect(`json.decode '1.23E+5'`).toEvaluateTo(1.23e5)
|
||||
})
|
||||
|
||||
test('unicode - emoji and special characters', () => {
|
||||
expect(`json.decode '"hello 👋"'`).toEvaluateTo('hello 👋')
|
||||
expect(`json.decode '"🎉🚀✨"'`).toEvaluateTo('🎉🚀✨')
|
||||
expect(`json.encode '你好'`).toEvaluateTo('"你好"')
|
||||
expect(`json.encode 'café'`).toEvaluateTo('"café"')
|
||||
})
|
||||
|
||||
test('nested structures - arrays', () => {
|
||||
expect(`json.decode '[[1,2],[3,4],[5,6]]'`).toEvaluateTo([[1, 2], [3, 4], [5, 6]])
|
||||
expect(`json.decode '[1,[2,[3,[4]]]]'`).toEvaluateTo([1, [2, [3, [4]]]])
|
||||
})
|
||||
|
||||
test('nested structures - objects', () => {
|
||||
expect(`json.decode '{"user":{"name":"Alice","age":30}}'`).toEvaluateTo({
|
||||
user: { name: 'Alice', age: 30 }
|
||||
})
|
||||
expect(`json.decode '{"a":{"b":{"c":"deep"}}}'`).toEvaluateTo({
|
||||
a: { b: { c: 'deep' } }
|
||||
})
|
||||
})
|
||||
|
||||
test('nested structures - mixed arrays and objects', () => {
|
||||
expect(`json.decode '[{"id":1,"tags":["a","b"]},{"id":2,"tags":["c"]}]'`).toEvaluateTo([
|
||||
{ id: 1, tags: ['a', 'b'] },
|
||||
{ id: 2, tags: ['c'] }
|
||||
])
|
||||
expect(`json.decode '{"items":[1,2,3],"meta":{"count":3}}'`).toEvaluateTo({
|
||||
items: [1, 2, 3],
|
||||
meta: { count: 3 }
|
||||
})
|
||||
})
|
||||
|
||||
test('error handling - invalid json', () => {
|
||||
expect(`json.decode '{invalid}'`).toFailEvaluation()
|
||||
expect(`json.decode '[1,2,3'`).toFailEvaluation()
|
||||
expect(`json.decode 'undefined'`).toFailEvaluation()
|
||||
expect(`json.decode ''`).toFailEvaluation()
|
||||
})
|
||||
})
|
||||
|
|
@ -1,41 +1,41 @@
|
|||
import { expect, describe, test } from 'bun:test'
|
||||
import { globals } from '#prelude'
|
||||
|
||||
describe('use', () => {
|
||||
describe('loading a file', () => {
|
||||
test(`imports all a file's functions`, async () => {
|
||||
expect(`
|
||||
math = load ./src/prelude/tests/math
|
||||
math = load ./src/prelude/tests/math.sh
|
||||
math.double 4
|
||||
`).toEvaluateTo(8, globals)
|
||||
|
||||
expect(`
|
||||
math = load ./src/prelude/tests/math
|
||||
math = load ./src/prelude/tests/math.sh
|
||||
math.double (math.double 4)
|
||||
`).toEvaluateTo(16, globals)
|
||||
|
||||
expect(`
|
||||
math = load ./src/prelude/tests/math
|
||||
dbl = math.double
|
||||
math = load ./src/prelude/tests/math.sh
|
||||
dbl = ref math.double
|
||||
dbl (dbl 2)
|
||||
`).toEvaluateTo(8, globals)
|
||||
|
||||
expect(`
|
||||
math = load ./src/prelude/tests/math
|
||||
math = load ./src/prelude/tests/math.sh
|
||||
math.pi
|
||||
`).toEvaluateTo(3.14, globals)
|
||||
|
||||
expect(`
|
||||
math = load ./src/prelude/tests/math
|
||||
math = load ./src/prelude/tests/math.sh
|
||||
math | at 🥧
|
||||
`).toEvaluateTo(3.14159265359, globals)
|
||||
|
||||
expect(`
|
||||
math = load ./src/prelude/tests/math
|
||||
math = load ./src/prelude/tests/math.sh
|
||||
math.🥧
|
||||
`).toEvaluateTo(3.14159265359, globals)
|
||||
|
||||
expect(`
|
||||
math = load ./src/prelude/tests/math
|
||||
math = load ./src/prelude/tests/math.sh
|
||||
math.add1 5
|
||||
`).toEvaluateTo(6, globals)
|
||||
})
|
||||
|
|
@ -66,6 +66,7 @@ describe('string operations', () => {
|
|||
test('slice extracts substring', async () => {
|
||||
await expect(`str.slice 'hello' 1 3`).toEvaluateTo('el')
|
||||
await expect(`str.slice 'hello' 2 null`).toEvaluateTo('llo')
|
||||
await expect(`str.slice 'hello' 2`).toEvaluateTo('llo')
|
||||
})
|
||||
|
||||
test('repeat repeats string', async () => {
|
||||
|
|
@ -193,6 +194,15 @@ describe('collections', () => {
|
|||
`).toEvaluateTo([3, 4, 5])
|
||||
})
|
||||
|
||||
test('list.reject doesnt keep matching elements', async () => {
|
||||
await expect(`
|
||||
is-even = do x:
|
||||
(x % 2) == 0
|
||||
end
|
||||
list.reject [1 2 3 4 5] is-even
|
||||
`).toEvaluateTo([1, 3, 5])
|
||||
})
|
||||
|
||||
test('list.reduce accumulates values', async () => {
|
||||
await expect(`
|
||||
add = do acc x:
|
||||
|
|
@ -339,6 +349,22 @@ describe('collections', () => {
|
|||
await expect(`arr = [1 2 3 4 5]; list.splice arr 3 2; arr`).toEvaluateTo([1, 2, 3])
|
||||
})
|
||||
|
||||
test('list.insert adds element at index and mutates array', async () => {
|
||||
await expect(`arr = [1 2 4 5]; list.insert arr 2 3; arr`).toEvaluateTo([1, 2, 3, 4, 5])
|
||||
})
|
||||
|
||||
test('list.insert returns array length', async () => {
|
||||
await expect(`list.insert [1 2 4] 2 3`).toEvaluateTo(4)
|
||||
})
|
||||
|
||||
test('list.insert at start', async () => {
|
||||
await expect(`arr = [2 3]; list.insert arr 0 1; arr`).toEvaluateTo([1, 2, 3])
|
||||
})
|
||||
|
||||
test('list.insert at end', async () => {
|
||||
await expect(`arr = [1 2]; list.insert arr 2 99; arr`).toEvaluateTo([1, 2, 99])
|
||||
})
|
||||
|
||||
test('list.sort with no callback sorts ascending', async () => {
|
||||
await expect(`list.sort [3 1 4 1 5] null`).toEvaluateTo([1, 1, 3, 4, 5])
|
||||
})
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import { describe } from 'bun:test'
|
||||
import { expect, test } from 'bun:test'
|
||||
import { Shrimp } from '..'
|
||||
import { Shrimp, runCode, compileCode, parseCode, bytecodeToString } from '..'
|
||||
|
||||
describe('Shrimp', () => {
|
||||
test('allows running Shrimp code', async () => {
|
||||
|
|
@ -50,4 +50,403 @@ describe('Shrimp', () => {
|
|||
await shrimp.run('abc = nothing')
|
||||
expect(shrimp.get('abc')).toEqual('nothing')
|
||||
})
|
||||
|
||||
describe('set()', () => {
|
||||
test('allows setting variables', async () => {
|
||||
const shrimp = new Shrimp()
|
||||
|
||||
shrimp.set('foo', 42)
|
||||
expect(shrimp.get('foo')).toEqual(42)
|
||||
|
||||
shrimp.set('bar', 'hello')
|
||||
expect(shrimp.get('bar')).toEqual('hello')
|
||||
})
|
||||
|
||||
test('set variables are accessible in code', async () => {
|
||||
const shrimp = new Shrimp()
|
||||
|
||||
shrimp.set('x', 10)
|
||||
shrimp.set('y', 20)
|
||||
|
||||
const result = await shrimp.run('x + y')
|
||||
expect(result).toEqual(30)
|
||||
})
|
||||
|
||||
test('allows setting functions', async () => {
|
||||
const shrimp = new Shrimp()
|
||||
|
||||
shrimp.set('double', (n: number) => n * 2)
|
||||
|
||||
const result = await shrimp.run('double 21')
|
||||
expect(result).toEqual(42)
|
||||
})
|
||||
|
||||
test('overwrites existing variables', async () => {
|
||||
const shrimp = new Shrimp()
|
||||
|
||||
await shrimp.run('x = 100')
|
||||
expect(shrimp.get('x')).toEqual(100)
|
||||
|
||||
shrimp.set('x', 200)
|
||||
expect(shrimp.get('x')).toEqual(200)
|
||||
})
|
||||
})
|
||||
|
||||
describe('has()', () => {
|
||||
test('returns true for existing variables', async () => {
|
||||
const shrimp = new Shrimp()
|
||||
|
||||
await shrimp.run('x = 5')
|
||||
expect(shrimp.has('x')).toEqual(true)
|
||||
})
|
||||
|
||||
test('returns false for non-existing variables', () => {
|
||||
const shrimp = new Shrimp()
|
||||
|
||||
expect(shrimp.has('nonexistent')).toEqual(false)
|
||||
})
|
||||
|
||||
test('returns true for globals', () => {
|
||||
const shrimp = new Shrimp({ myGlobal: 42 })
|
||||
|
||||
expect(shrimp.has('myGlobal')).toEqual(true)
|
||||
})
|
||||
|
||||
test('returns true for prelude functions', () => {
|
||||
const shrimp = new Shrimp()
|
||||
|
||||
expect(shrimp.has('echo')).toEqual(true)
|
||||
expect(shrimp.has('type')).toEqual(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('call()', () => {
|
||||
test('calls Shrimp functions with positional args', async () => {
|
||||
const shrimp = new Shrimp()
|
||||
|
||||
await shrimp.run(`add = do x y:
|
||||
x + y
|
||||
end`)
|
||||
|
||||
const result = await shrimp.call('add', 5, 10)
|
||||
expect(result).toEqual(15)
|
||||
})
|
||||
|
||||
test('calls Shrimp functions with named args', async () => {
|
||||
const shrimp = new Shrimp()
|
||||
|
||||
await shrimp.run(`greet = do name:
|
||||
str.join [ 'Hello ' name ] ''
|
||||
end`)
|
||||
|
||||
const result = await shrimp.call('greet', { name: 'World' })
|
||||
expect(result).toEqual('Hello World')
|
||||
})
|
||||
|
||||
test('calls native functions', async () => {
|
||||
const shrimp = new Shrimp()
|
||||
|
||||
shrimp.set('multiply', (a: number, b: number) => a * b)
|
||||
|
||||
const result = await shrimp.call('multiply', 6, 7)
|
||||
expect(result).toEqual(42)
|
||||
})
|
||||
|
||||
test('calls prelude functions', async () => {
|
||||
const shrimp = new Shrimp()
|
||||
|
||||
const result = await shrimp.call('type', 42)
|
||||
expect(result).toEqual('number')
|
||||
})
|
||||
|
||||
test('calls async functions', async () => {
|
||||
const shrimp = new Shrimp()
|
||||
|
||||
shrimp.set('fetchData', async () => {
|
||||
return await Promise.resolve('async data')
|
||||
})
|
||||
|
||||
const result = await shrimp.call('fetchData')
|
||||
expect(result).toEqual('async data')
|
||||
})
|
||||
})
|
||||
|
||||
describe('compile()', () => {
|
||||
test('compiles code to bytecode', () => {
|
||||
const shrimp = new Shrimp()
|
||||
|
||||
const bytecode = shrimp.compile('x = 5')
|
||||
|
||||
expect(bytecode).toHaveProperty('instructions')
|
||||
expect(bytecode).toHaveProperty('constants')
|
||||
expect(bytecode).toHaveProperty('labels')
|
||||
expect(bytecode.instructions.length).toBeGreaterThan(0)
|
||||
})
|
||||
|
||||
test('respects globals when compiling', () => {
|
||||
const shrimp = new Shrimp({ customGlobal: 42 })
|
||||
|
||||
const bytecode = shrimp.compile('x = customGlobal')
|
||||
expect(bytecode.instructions.length).toBeGreaterThan(0)
|
||||
})
|
||||
|
||||
test('compiled bytecode can be run', async () => {
|
||||
const shrimp = new Shrimp()
|
||||
|
||||
const bytecode = shrimp.compile('2 * 21')
|
||||
const result = await shrimp.run(bytecode)
|
||||
|
||||
expect(result).toEqual(42)
|
||||
})
|
||||
})
|
||||
|
||||
describe('parse()', () => {
|
||||
test('parses code to syntax tree', () => {
|
||||
const shrimp = new Shrimp()
|
||||
|
||||
const tree = shrimp.parse('x = 5')
|
||||
|
||||
expect(tree).toHaveProperty('length')
|
||||
expect(tree).toHaveProperty('cursor')
|
||||
expect(tree.length).toBeGreaterThan(0)
|
||||
})
|
||||
|
||||
test('respects globals when parsing', () => {
|
||||
const shrimp = new Shrimp({ myVar: 42 })
|
||||
|
||||
const tree = shrimp.parse('x = myVar + 10')
|
||||
|
||||
// Should parse without errors
|
||||
expect(tree).toHaveProperty('length')
|
||||
expect(tree.length).toBeGreaterThan(0)
|
||||
})
|
||||
|
||||
test('parses function definitions', () => {
|
||||
const shrimp = new Shrimp()
|
||||
|
||||
const tree = shrimp.parse(`add = do x y:
|
||||
x + y
|
||||
end`)
|
||||
|
||||
expect(tree.length).toBeGreaterThan(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe('get()', () => {
|
||||
test('returns null for undefined variables', () => {
|
||||
const shrimp = new Shrimp()
|
||||
|
||||
expect(shrimp.get('undefined')).toEqual(null)
|
||||
})
|
||||
|
||||
test('returns values from code execution', async () => {
|
||||
const shrimp = new Shrimp()
|
||||
|
||||
await shrimp.run('x = 42')
|
||||
expect(shrimp.get('x')).toEqual(42)
|
||||
})
|
||||
|
||||
test('returns arrays', async () => {
|
||||
const shrimp = new Shrimp()
|
||||
|
||||
await shrimp.run('arr = [1 2 3]')
|
||||
expect(shrimp.get('arr')).toEqual([1, 2, 3])
|
||||
})
|
||||
|
||||
test('returns dicts', async () => {
|
||||
const shrimp = new Shrimp()
|
||||
|
||||
await shrimp.run('dict = [a=1 b=2]')
|
||||
expect(shrimp.get('dict')).toEqual({ a: 1, b: 2 })
|
||||
})
|
||||
})
|
||||
|
||||
describe('running bytecode directly', () => {
|
||||
test('can run pre-compiled bytecode', async () => {
|
||||
const shrimp = new Shrimp()
|
||||
|
||||
const bytecode = shrimp.compile('x = 100')
|
||||
const result = await shrimp.run(bytecode)
|
||||
|
||||
expect(result).toEqual(100)
|
||||
expect(shrimp.get('x')).toEqual(100)
|
||||
})
|
||||
|
||||
test('maintains state across bytecode runs', async () => {
|
||||
const shrimp = new Shrimp()
|
||||
|
||||
const bytecode1 = shrimp.compile('x = 10')
|
||||
const bytecode2 = shrimp.compile('x + 5')
|
||||
|
||||
await shrimp.run(bytecode1)
|
||||
const result = await shrimp.run(bytecode2)
|
||||
|
||||
expect(result).toEqual(15)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Functional API', () => {
|
||||
describe('runCode()', () => {
|
||||
test('runs code and returns result', async () => {
|
||||
const result = await runCode('1 + 1')
|
||||
expect(result).toEqual(2)
|
||||
})
|
||||
|
||||
test('works with globals', async () => {
|
||||
const result = await runCode('greet', { greet: () => 'hello' })
|
||||
expect(result).toEqual('hello')
|
||||
})
|
||||
|
||||
test('has access to prelude', async () => {
|
||||
const result = await runCode('type 42')
|
||||
expect(result).toEqual('number')
|
||||
})
|
||||
|
||||
test('returns null for empty code', async () => {
|
||||
const result = await runCode('')
|
||||
expect(result).toEqual(null)
|
||||
})
|
||||
})
|
||||
|
||||
describe('compileCode()', () => {
|
||||
test('compiles code to bytecode', () => {
|
||||
const bytecode = compileCode('x = 5')
|
||||
|
||||
expect(bytecode).toHaveProperty('instructions')
|
||||
expect(bytecode).toHaveProperty('constants')
|
||||
expect(bytecode.instructions.length).toBeGreaterThan(0)
|
||||
})
|
||||
|
||||
test('respects globals', () => {
|
||||
const bytecode = compileCode('x = myGlobal', { myGlobal: 42 })
|
||||
|
||||
expect(bytecode.instructions.length).toBeGreaterThan(0)
|
||||
})
|
||||
|
||||
test('compiled bytecode is usable', async () => {
|
||||
const bytecode = compileCode('21 * 2')
|
||||
const result = await runCode('21 * 2')
|
||||
|
||||
expect(result).toEqual(42)
|
||||
})
|
||||
})
|
||||
|
||||
describe('parseCode()', () => {
|
||||
test('parses code to syntax tree', () => {
|
||||
const tree = parseCode('x = 5')
|
||||
|
||||
expect(tree).toHaveProperty('length')
|
||||
expect(tree.length).toBeGreaterThan(0)
|
||||
})
|
||||
|
||||
test('respects globals', () => {
|
||||
const tree = parseCode('x = myGlobal', { myGlobal: 42 })
|
||||
|
||||
expect(tree.length).toBeGreaterThan(0)
|
||||
})
|
||||
|
||||
test('handles complex expressions', () => {
|
||||
const tree = parseCode(`add = do x y:
|
||||
x + y
|
||||
end
|
||||
result = add 5 10`)
|
||||
|
||||
expect(tree.length).toBeGreaterThan(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe('bytecodeToString()', () => {
|
||||
test('converts bytecode to human-readable format', () => {
|
||||
const bytecode = compileCode('x = 42')
|
||||
const str = bytecodeToString(bytecode)
|
||||
|
||||
expect(typeof str).toEqual('string')
|
||||
expect(str.length).toBeGreaterThan(0)
|
||||
})
|
||||
|
||||
test('shows instructions', () => {
|
||||
const bytecode = compileCode('1 + 1')
|
||||
const str = bytecodeToString(bytecode)
|
||||
|
||||
// Should contain some opcodes
|
||||
expect(str).toContain('PUSH')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Integration tests', () => {
|
||||
test('complex REPL-like workflow', async () => {
|
||||
const shrimp = new Shrimp()
|
||||
|
||||
// Define a function
|
||||
await shrimp.run(`double = do x:
|
||||
x * 2
|
||||
end`)
|
||||
expect(shrimp.has('double')).toEqual(true)
|
||||
|
||||
// Use the function
|
||||
const result1 = await shrimp.run('double 21')
|
||||
expect(result1).toEqual(42)
|
||||
|
||||
// Call it from TypeScript
|
||||
const result2 = await shrimp.call('double', 50)
|
||||
expect(result2).toEqual(100)
|
||||
|
||||
// Define another function using the first
|
||||
await shrimp.run(`quadruple = do x:
|
||||
double (double x)
|
||||
end`)
|
||||
|
||||
const result3 = await shrimp.run('quadruple 5')
|
||||
expect(result3).toEqual(20)
|
||||
})
|
||||
|
||||
test('mixing native and Shrimp functions', async () => {
|
||||
const shrimp = new Shrimp({
|
||||
log: (msg: string) => `Logged: ${msg}`,
|
||||
multiply: (a: number, b: number) => a * b,
|
||||
})
|
||||
|
||||
await shrimp.run(`greet = do name:
|
||||
log name
|
||||
end`)
|
||||
|
||||
const result1 = await shrimp.run('greet Alice')
|
||||
expect(result1).toEqual('Logged: Alice')
|
||||
|
||||
await shrimp.run(`calc = do x:
|
||||
multiply x 3
|
||||
end`)
|
||||
|
||||
const result2 = await shrimp.run('calc 7')
|
||||
expect(result2).toEqual(21)
|
||||
})
|
||||
|
||||
test('working with arrays and dicts', async () => {
|
||||
const shrimp = new Shrimp()
|
||||
|
||||
await shrimp.run('nums = [1 2 3 4 5]')
|
||||
expect(shrimp.get('nums')).toEqual([1, 2, 3, 4, 5])
|
||||
|
||||
await shrimp.run("config = [host='localhost' port=3000]")
|
||||
expect(shrimp.get('config')).toEqual({ host: 'localhost', port: 3000 })
|
||||
|
||||
const result = await shrimp.run('length nums')
|
||||
expect(result).toEqual(5)
|
||||
})
|
||||
|
||||
test('compile once, run multiple times', async () => {
|
||||
const bytecode = compileCode('x * 2')
|
||||
|
||||
const shrimp1 = new Shrimp()
|
||||
shrimp1.set('x', 10)
|
||||
const result1 = await shrimp1.run(bytecode)
|
||||
expect(result1).toEqual(20)
|
||||
|
||||
const shrimp2 = new Shrimp()
|
||||
shrimp2.set('x', 100)
|
||||
const result2 = await shrimp2.run(bytecode)
|
||||
expect(result2).toEqual(200)
|
||||
})
|
||||
})
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@
|
|||
"shrimp"
|
||||
],
|
||||
"extensions": [
|
||||
".sh"
|
||||
".shrimp"
|
||||
],
|
||||
"configuration": "./language-configuration.json"
|
||||
}
|
||||
|
|
@ -80,11 +80,12 @@
|
|||
"publisher": "shrimp-lang",
|
||||
"scripts": {
|
||||
"vscode:prepublish": "bun run package",
|
||||
"compile": "bun run compile:client && bun run compile:server",
|
||||
"generate-prelude-metadata": "bun scripts/generate-prelude-metadata.ts",
|
||||
"compile": "bun run generate-prelude-metadata && bun run compile:client && bun run compile:server",
|
||||
"compile:client": "bun build client/src/extension.ts --outdir client/dist --target node --format cjs --external vscode",
|
||||
"compile:server": "bun build server/src/server.ts --outdir server/dist --target node --format cjs",
|
||||
"watch": "bun run compile:client --watch & bun run compile:server --watch",
|
||||
"package": "bun run compile:client --minify && bun run compile:server --minify",
|
||||
"package": "bun run generate-prelude-metadata && bun run compile:client --minify && bun run compile:server --minify",
|
||||
"check-types": "tsc --noEmit",
|
||||
"build-and-install": "bun run package && bunx @vscode/vsce package --allow-missing-repository && code --install-extension shrimp-*.vsix"
|
||||
},
|
||||
|
|
|
|||
117
vscode-extension/scripts/generate-prelude-metadata.ts
Normal file
117
vscode-extension/scripts/generate-prelude-metadata.ts
Normal file
|
|
@ -0,0 +1,117 @@
|
|||
#!/usr/bin/env bun
|
||||
/**
|
||||
* Generates prelude metadata for the VSCode extension.
|
||||
* - Prelude names (for parser scope tracking)
|
||||
* - Function signatures (for autocomplete)
|
||||
*/
|
||||
|
||||
import { writeFileSync } from 'fs'
|
||||
import { join } from 'path'
|
||||
import { globals } from '../../src/prelude'
|
||||
|
||||
// Extract parameter names from a function
|
||||
const extractParams = (fn: Function): string[] => {
|
||||
const fnStr = fn.toString()
|
||||
const match = fnStr.match(/\(([^)]*)\)/)
|
||||
if (!match) return []
|
||||
|
||||
const paramsStr = match[1]!.trim()
|
||||
if (!paramsStr) return []
|
||||
|
||||
// Split by comma, but be careful of default values with commas
|
||||
const params: string[] = []
|
||||
let current = ''
|
||||
let inString = false
|
||||
let stringChar = ''
|
||||
|
||||
for (let i = 0; i < paramsStr.length; i++) {
|
||||
const char = paramsStr[i]
|
||||
if ((char === '"' || char === "'") && (i === 0 || paramsStr[i - 1] !== '\\')) {
|
||||
if (!inString) {
|
||||
inString = true
|
||||
stringChar = char
|
||||
} else if (char === stringChar) {
|
||||
inString = false
|
||||
}
|
||||
}
|
||||
|
||||
if (char === ',' && !inString) {
|
||||
params.push(current.trim())
|
||||
current = ''
|
||||
} else {
|
||||
current += char
|
||||
}
|
||||
}
|
||||
if (current.trim()) params.push(current.trim())
|
||||
|
||||
return params
|
||||
.map((p) => p.split(/[=:]/)[0]!.trim()) // Handle defaults and types
|
||||
.filter((p) => p && p !== 'this')
|
||||
}
|
||||
|
||||
// Generate metadata for a module
|
||||
const generateModuleMetadata = (module: Record<string, any>) => {
|
||||
const metadata: Record<string, { params: string[] }> = {}
|
||||
|
||||
for (const [name, value] of Object.entries(module)) {
|
||||
if (typeof value === 'function') {
|
||||
metadata[name] = { params: extractParams(value) }
|
||||
}
|
||||
}
|
||||
|
||||
return metadata
|
||||
}
|
||||
|
||||
// Generate names list
|
||||
const names = Object.keys(globals).sort()
|
||||
|
||||
// Generate module metadata
|
||||
const moduleMetadata: Record<string, any> = {}
|
||||
for (const [name, value] of Object.entries(globals)) {
|
||||
if (typeof value === 'object' && value !== null && name !== '$') {
|
||||
moduleMetadata[name] = generateModuleMetadata(value)
|
||||
}
|
||||
}
|
||||
|
||||
// Generate dollar metadata
|
||||
const dollarMetadata: Record<string, { params: string[] }> = {}
|
||||
if (globals.$ && typeof globals.$ === 'object') {
|
||||
for (const key of Object.keys(globals.$)) {
|
||||
dollarMetadata[key] = { params: [] }
|
||||
}
|
||||
}
|
||||
|
||||
// Write prelude-names.ts
|
||||
const namesOutput = `// Auto-generated by scripts/generate-prelude-metadata.ts
|
||||
// Do not edit manually - run 'bun run generate-prelude-metadata' to regenerate
|
||||
|
||||
export const PRELUDE_NAMES = ${JSON.stringify(names, null, 2)} as const
|
||||
`
|
||||
|
||||
const namesPath = join(import.meta.dir, '../server/src/metadata/prelude-names.ts')
|
||||
writeFileSync(namesPath, namesOutput)
|
||||
|
||||
// Write prelude-completions.ts
|
||||
const completionsOutput = `// Auto-generated by scripts/generate-prelude-metadata.ts
|
||||
// Do not edit manually - run 'bun run generate-prelude-metadata' to regenerate
|
||||
|
||||
export type CompletionMetadata = {
|
||||
params: string[]
|
||||
description?: string
|
||||
}
|
||||
|
||||
export const completions = {
|
||||
modules: ${JSON.stringify(moduleMetadata, null, 2)},
|
||||
dollar: ${JSON.stringify(dollarMetadata, null, 2)},
|
||||
} as const
|
||||
`
|
||||
|
||||
const completionsPath = join(import.meta.dir, '../server/src/metadata/prelude-completions.ts')
|
||||
writeFileSync(completionsPath, completionsOutput)
|
||||
|
||||
console.log(`✓ Generated ${names.length} prelude names to server/src/metadata/prelude-names.ts`)
|
||||
console.log(
|
||||
`✓ Generated completions for ${
|
||||
Object.keys(moduleMetadata).length
|
||||
} modules to server/src/metadata/prelude-completions.ts`
|
||||
)
|
||||
52
vscode-extension/server/src/completion/completionProvider.ts
Normal file
52
vscode-extension/server/src/completion/completionProvider.ts
Normal file
|
|
@ -0,0 +1,52 @@
|
|||
import { CompletionItem, CompletionItemKind } from 'vscode-languageserver/node'
|
||||
import { TextDocument } from 'vscode-languageserver-textdocument'
|
||||
import { completions } from '../metadata/prelude-completions'
|
||||
import { analyzeCompletionContext } from './contextAnalyzer'
|
||||
|
||||
/**
|
||||
* Provides context-aware completions for Shrimp code.
|
||||
* Returns module function completions (dict.*, list.*, str.*) or dollar property
|
||||
* completions ($.*) based on the cursor position.
|
||||
*/
|
||||
export const provideCompletions = (
|
||||
document: TextDocument,
|
||||
position: { line: number; character: number }
|
||||
): CompletionItem[] => {
|
||||
const context = analyzeCompletionContext(document, position)
|
||||
|
||||
if (context.type === 'module') {
|
||||
return buildModuleCompletions(context.moduleName)
|
||||
}
|
||||
|
||||
if (context.type === 'dollar') {
|
||||
return buildDollarCompletions()
|
||||
}
|
||||
|
||||
return [] // No completions for other contexts yet
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds completion items for module functions (dict.*, list.*, str.*).
|
||||
*/
|
||||
const buildModuleCompletions = (moduleName: string): CompletionItem[] => {
|
||||
const functions = completions.modules[moduleName as keyof typeof completions.modules]
|
||||
if (!functions) return []
|
||||
|
||||
return Object.entries(functions).map(([name, meta]) => ({
|
||||
label: name,
|
||||
kind: CompletionItemKind.Method,
|
||||
detail: `(${meta.params.join(', ')})`,
|
||||
insertText: name,
|
||||
}))
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds completion items for dollar properties ($.*).
|
||||
*/
|
||||
const buildDollarCompletions = (): CompletionItem[] => {
|
||||
return Object.entries(completions.dollar).map(([name, meta]) => ({
|
||||
label: name,
|
||||
kind: CompletionItemKind.Property,
|
||||
insertText: name,
|
||||
}))
|
||||
}
|
||||
66
vscode-extension/server/src/completion/contextAnalyzer.ts
Normal file
66
vscode-extension/server/src/completion/contextAnalyzer.ts
Normal file
|
|
@ -0,0 +1,66 @@
|
|||
import { TextDocument } from 'vscode-languageserver-textdocument'
|
||||
import { SyntaxNode } from '@lezer/common'
|
||||
import { parser } from '../../../../src/parser/shrimp'
|
||||
import * as Terms from '../../../../src/parser/shrimp.terms'
|
||||
|
||||
export type CompletionContext =
|
||||
| { type: 'module'; moduleName: string }
|
||||
| { type: 'dollar' }
|
||||
| { type: 'none' }
|
||||
|
||||
/**
|
||||
* Analyzes the document at the given position to determine what kind of
|
||||
* completion context we're in (module member access, dollar property, or none).
|
||||
*/
|
||||
export const analyzeCompletionContext = (
|
||||
document: TextDocument,
|
||||
position: { line: number; character: number }
|
||||
): CompletionContext => {
|
||||
const offset = document.offsetAt(position)
|
||||
const text = document.getText()
|
||||
const tree = parser.parse(text)
|
||||
|
||||
// Find node at cursor - could be DotGet or Identifier inside DotGet
|
||||
const node = tree.resolveInner(offset, -1)
|
||||
|
||||
console.log(`🔍 Node at cursor: ${node.name} (type: ${node.type.id})`)
|
||||
console.log(`🔍 Parent: ${node.parent?.name} (type: ${node.parent?.type.id})`)
|
||||
console.log(`🔍 Node text: "${text.slice(node.from, node.to)}"`)
|
||||
|
||||
const SUPPORTED_MODULES = ['dict', 'list', 'str', 'math', 'fs', 'json', 'load']
|
||||
|
||||
// Case 1: Incomplete DotGet (dict. or $.)
|
||||
// resolveInner returns DotGet node directly
|
||||
if (node.type.id === Terms.DotGet) {
|
||||
const leftSide = extractLeftSide(node, text)
|
||||
console.log(`✅ Case 1: DotGet found, left side: "${leftSide}"`)
|
||||
if (leftSide === '$') return { type: 'dollar' }
|
||||
if (SUPPORTED_MODULES.includes(leftSide)) {
|
||||
return { type: 'module', moduleName: leftSide }
|
||||
}
|
||||
}
|
||||
|
||||
// Case 2: Partial identifier (dict.g or $.e)
|
||||
// resolveInner returns Identifier, parent is DotGet
|
||||
if (node.type.id === Terms.Identifier && node.parent?.type.id === Terms.DotGet) {
|
||||
const dotGetNode = node.parent
|
||||
const leftSide = extractLeftSide(dotGetNode, text)
|
||||
console.log(`✅ Case 2: Identifier in DotGet found, left side: "${leftSide}"`)
|
||||
if (leftSide === '$') return { type: 'dollar' }
|
||||
if (SUPPORTED_MODULES.includes(leftSide)) {
|
||||
return { type: 'module', moduleName: leftSide }
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`❌ No matching context found`)
|
||||
return { type: 'none' }
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the text of the left side of a DotGet node (the part before the dot).
|
||||
*/
|
||||
const extractLeftSide = (dotGetNode: SyntaxNode, text: string): string => {
|
||||
const firstChild = dotGetNode.firstChild
|
||||
if (!firstChild) return ''
|
||||
return text.slice(firstChild.from, firstChild.to)
|
||||
}
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
import { SyntaxNode } from '@lezer/common'
|
||||
import { TextDocument } from 'vscode-languageserver-textdocument'
|
||||
import * as Terms from '../../../src/parser/shrimp.terms'
|
||||
import { globals } from '../../../src/prelude'
|
||||
import { PRELUDE_NAMES } from './metadata/prelude-names'
|
||||
|
||||
/**
|
||||
* Tracks variables in scope at a given position in the parse tree.
|
||||
|
|
@ -13,8 +13,7 @@ export class EditorScopeAnalyzer {
|
|||
|
||||
constructor(document: TextDocument) {
|
||||
this.document = document
|
||||
const preludeKeys = Object.keys(globals)
|
||||
this.scopeCache.set(0, new Set(preludeKeys))
|
||||
this.scopeCache.set(0, new Set(PRELUDE_NAMES))
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
|||
732
vscode-extension/server/src/metadata/prelude-completions.ts
Normal file
732
vscode-extension/server/src/metadata/prelude-completions.ts
Normal file
|
|
@ -0,0 +1,732 @@
|
|||
// Auto-generated by scripts/generate-prelude-metadata.ts
|
||||
// Do not edit manually - run 'bun run generate-prelude-metadata' to regenerate
|
||||
|
||||
export type CompletionMetadata = {
|
||||
params: string[]
|
||||
description?: string
|
||||
}
|
||||
|
||||
export const completions = {
|
||||
modules: {
|
||||
"dict": {
|
||||
"keys": {
|
||||
"params": [
|
||||
"dict"
|
||||
]
|
||||
},
|
||||
"values": {
|
||||
"params": [
|
||||
"dict"
|
||||
]
|
||||
},
|
||||
"entries": {
|
||||
"params": [
|
||||
"dict"
|
||||
]
|
||||
},
|
||||
"has?": {
|
||||
"params": [
|
||||
"dict",
|
||||
"key"
|
||||
]
|
||||
},
|
||||
"get": {
|
||||
"params": [
|
||||
"dict",
|
||||
"key",
|
||||
"defaultValue"
|
||||
]
|
||||
},
|
||||
"set": {
|
||||
"params": [
|
||||
"dict",
|
||||
"key",
|
||||
"value"
|
||||
]
|
||||
},
|
||||
"merge": {
|
||||
"params": [
|
||||
"...dicts"
|
||||
]
|
||||
},
|
||||
"empty?": {
|
||||
"params": [
|
||||
"dict"
|
||||
]
|
||||
},
|
||||
"map": {
|
||||
"params": [
|
||||
"dict",
|
||||
"cb"
|
||||
]
|
||||
},
|
||||
"filter": {
|
||||
"params": [
|
||||
"dict",
|
||||
"cb"
|
||||
]
|
||||
},
|
||||
"from-entries": {
|
||||
"params": [
|
||||
"entries"
|
||||
]
|
||||
}
|
||||
},
|
||||
"fs": {
|
||||
"ls": {
|
||||
"params": [
|
||||
"path"
|
||||
]
|
||||
},
|
||||
"mkdir": {
|
||||
"params": [
|
||||
"path"
|
||||
]
|
||||
},
|
||||
"rmdir": {
|
||||
"params": [
|
||||
"path"
|
||||
]
|
||||
},
|
||||
"pwd": {
|
||||
"params": []
|
||||
},
|
||||
"cd": {
|
||||
"params": [
|
||||
"path"
|
||||
]
|
||||
},
|
||||
"read": {
|
||||
"params": [
|
||||
"path"
|
||||
]
|
||||
},
|
||||
"cat": {
|
||||
"params": [
|
||||
"path"
|
||||
]
|
||||
},
|
||||
"read-bytes": {
|
||||
"params": [
|
||||
"path"
|
||||
]
|
||||
},
|
||||
"write": {
|
||||
"params": [
|
||||
"path",
|
||||
"content"
|
||||
]
|
||||
},
|
||||
"append": {
|
||||
"params": [
|
||||
"path",
|
||||
"content"
|
||||
]
|
||||
},
|
||||
"delete": {
|
||||
"params": [
|
||||
"path"
|
||||
]
|
||||
},
|
||||
"rm": {
|
||||
"params": [
|
||||
"path"
|
||||
]
|
||||
},
|
||||
"copy": {
|
||||
"params": [
|
||||
"from",
|
||||
"to"
|
||||
]
|
||||
},
|
||||
"move": {
|
||||
"params": [
|
||||
"from",
|
||||
"to"
|
||||
]
|
||||
},
|
||||
"mv": {
|
||||
"params": [
|
||||
"from",
|
||||
"to"
|
||||
]
|
||||
},
|
||||
"basename": {
|
||||
"params": [
|
||||
"path"
|
||||
]
|
||||
},
|
||||
"dirname": {
|
||||
"params": [
|
||||
"path"
|
||||
]
|
||||
},
|
||||
"extname": {
|
||||
"params": [
|
||||
"path"
|
||||
]
|
||||
},
|
||||
"join": {
|
||||
"params": [
|
||||
"...paths"
|
||||
]
|
||||
},
|
||||
"resolve": {
|
||||
"params": [
|
||||
"...paths"
|
||||
]
|
||||
},
|
||||
"stat": {
|
||||
"params": [
|
||||
"path"
|
||||
]
|
||||
},
|
||||
"exists?": {
|
||||
"params": [
|
||||
"path"
|
||||
]
|
||||
},
|
||||
"file?": {
|
||||
"params": [
|
||||
"path"
|
||||
]
|
||||
},
|
||||
"dir?": {
|
||||
"params": [
|
||||
"path"
|
||||
]
|
||||
},
|
||||
"symlink?": {
|
||||
"params": [
|
||||
"path"
|
||||
]
|
||||
},
|
||||
"exec?": {
|
||||
"params": [
|
||||
"path"
|
||||
]
|
||||
},
|
||||
"size": {
|
||||
"params": [
|
||||
"path"
|
||||
]
|
||||
},
|
||||
"chmod": {
|
||||
"params": [
|
||||
"path",
|
||||
"mode"
|
||||
]
|
||||
},
|
||||
"symlink": {
|
||||
"params": [
|
||||
"target",
|
||||
"path"
|
||||
]
|
||||
},
|
||||
"readlink": {
|
||||
"params": [
|
||||
"path"
|
||||
]
|
||||
},
|
||||
"glob": {
|
||||
"params": [
|
||||
"pattern"
|
||||
]
|
||||
},
|
||||
"watch": {
|
||||
"params": [
|
||||
"path",
|
||||
"callback"
|
||||
]
|
||||
},
|
||||
"cp": {
|
||||
"params": [
|
||||
"from",
|
||||
"to"
|
||||
]
|
||||
}
|
||||
},
|
||||
"json": {
|
||||
"encode": {
|
||||
"params": [
|
||||
"s"
|
||||
]
|
||||
},
|
||||
"decode": {
|
||||
"params": [
|
||||
"s"
|
||||
]
|
||||
},
|
||||
"parse": {
|
||||
"params": [
|
||||
"s"
|
||||
]
|
||||
},
|
||||
"stringify": {
|
||||
"params": [
|
||||
"s"
|
||||
]
|
||||
}
|
||||
},
|
||||
"list": {
|
||||
"slice": {
|
||||
"params": [
|
||||
"list",
|
||||
"start",
|
||||
"end"
|
||||
]
|
||||
},
|
||||
"map": {
|
||||
"params": [
|
||||
"list",
|
||||
"cb"
|
||||
]
|
||||
},
|
||||
"filter": {
|
||||
"params": [
|
||||
"list",
|
||||
"cb"
|
||||
]
|
||||
},
|
||||
"reject": {
|
||||
"params": [
|
||||
"list",
|
||||
"cb"
|
||||
]
|
||||
},
|
||||
"reduce": {
|
||||
"params": [
|
||||
"list",
|
||||
"cb",
|
||||
"initial"
|
||||
]
|
||||
},
|
||||
"find": {
|
||||
"params": [
|
||||
"list",
|
||||
"cb"
|
||||
]
|
||||
},
|
||||
"empty?": {
|
||||
"params": [
|
||||
"list"
|
||||
]
|
||||
},
|
||||
"contains?": {
|
||||
"params": [
|
||||
"list",
|
||||
"item"
|
||||
]
|
||||
},
|
||||
"includes?": {
|
||||
"params": [
|
||||
"list",
|
||||
"item"
|
||||
]
|
||||
},
|
||||
"has?": {
|
||||
"params": [
|
||||
"list",
|
||||
"item"
|
||||
]
|
||||
},
|
||||
"any?": {
|
||||
"params": [
|
||||
"list",
|
||||
"cb"
|
||||
]
|
||||
},
|
||||
"all?": {
|
||||
"params": [
|
||||
"list",
|
||||
"cb"
|
||||
]
|
||||
},
|
||||
"push": {
|
||||
"params": [
|
||||
"list",
|
||||
"item"
|
||||
]
|
||||
},
|
||||
"pop": {
|
||||
"params": [
|
||||
"list"
|
||||
]
|
||||
},
|
||||
"shift": {
|
||||
"params": [
|
||||
"list"
|
||||
]
|
||||
},
|
||||
"unshift": {
|
||||
"params": [
|
||||
"list",
|
||||
"item"
|
||||
]
|
||||
},
|
||||
"splice": {
|
||||
"params": [
|
||||
"list",
|
||||
"start",
|
||||
"deleteCount",
|
||||
"...items"
|
||||
]
|
||||
},
|
||||
"insert": {
|
||||
"params": [
|
||||
"list",
|
||||
"index",
|
||||
"item"
|
||||
]
|
||||
},
|
||||
"reverse": {
|
||||
"params": [
|
||||
"list"
|
||||
]
|
||||
},
|
||||
"sort": {
|
||||
"params": [
|
||||
"list",
|
||||
"cb"
|
||||
]
|
||||
},
|
||||
"concat": {
|
||||
"params": [
|
||||
"...lists"
|
||||
]
|
||||
},
|
||||
"flatten": {
|
||||
"params": [
|
||||
"list",
|
||||
"depth"
|
||||
]
|
||||
},
|
||||
"unique": {
|
||||
"params": [
|
||||
"list"
|
||||
]
|
||||
},
|
||||
"zip": {
|
||||
"params": [
|
||||
"list1",
|
||||
"list2"
|
||||
]
|
||||
},
|
||||
"first": {
|
||||
"params": [
|
||||
"list"
|
||||
]
|
||||
},
|
||||
"last": {
|
||||
"params": [
|
||||
"list"
|
||||
]
|
||||
},
|
||||
"rest": {
|
||||
"params": [
|
||||
"list"
|
||||
]
|
||||
},
|
||||
"take": {
|
||||
"params": [
|
||||
"list",
|
||||
"n"
|
||||
]
|
||||
},
|
||||
"drop": {
|
||||
"params": [
|
||||
"list",
|
||||
"n"
|
||||
]
|
||||
},
|
||||
"append": {
|
||||
"params": [
|
||||
"list",
|
||||
"item"
|
||||
]
|
||||
},
|
||||
"prepend": {
|
||||
"params": [
|
||||
"list",
|
||||
"item"
|
||||
]
|
||||
},
|
||||
"index-of": {
|
||||
"params": [
|
||||
"list",
|
||||
"item"
|
||||
]
|
||||
},
|
||||
"sum": {
|
||||
"params": [
|
||||
"list"
|
||||
]
|
||||
},
|
||||
"count": {
|
||||
"params": [
|
||||
"list",
|
||||
"cb"
|
||||
]
|
||||
},
|
||||
"partition": {
|
||||
"params": [
|
||||
"list",
|
||||
"cb"
|
||||
]
|
||||
},
|
||||
"compact": {
|
||||
"params": [
|
||||
"list"
|
||||
]
|
||||
},
|
||||
"group-by": {
|
||||
"params": [
|
||||
"list",
|
||||
"cb"
|
||||
]
|
||||
}
|
||||
},
|
||||
"math": {
|
||||
"abs": {
|
||||
"params": [
|
||||
"n"
|
||||
]
|
||||
},
|
||||
"floor": {
|
||||
"params": [
|
||||
"n"
|
||||
]
|
||||
},
|
||||
"ceil": {
|
||||
"params": [
|
||||
"n"
|
||||
]
|
||||
},
|
||||
"round": {
|
||||
"params": [
|
||||
"n"
|
||||
]
|
||||
},
|
||||
"min": {
|
||||
"params": [
|
||||
"...nums"
|
||||
]
|
||||
},
|
||||
"max": {
|
||||
"params": [
|
||||
"...nums"
|
||||
]
|
||||
},
|
||||
"pow": {
|
||||
"params": [
|
||||
"base",
|
||||
"exp"
|
||||
]
|
||||
},
|
||||
"sqrt": {
|
||||
"params": [
|
||||
"n"
|
||||
]
|
||||
},
|
||||
"random": {
|
||||
"params": []
|
||||
},
|
||||
"clamp": {
|
||||
"params": [
|
||||
"n",
|
||||
"min",
|
||||
"max"
|
||||
]
|
||||
},
|
||||
"sign": {
|
||||
"params": [
|
||||
"n"
|
||||
]
|
||||
},
|
||||
"trunc": {
|
||||
"params": [
|
||||
"n"
|
||||
]
|
||||
},
|
||||
"even?": {
|
||||
"params": [
|
||||
"n"
|
||||
]
|
||||
},
|
||||
"odd?": {
|
||||
"params": [
|
||||
"n"
|
||||
]
|
||||
},
|
||||
"positive?": {
|
||||
"params": [
|
||||
"n"
|
||||
]
|
||||
},
|
||||
"negative?": {
|
||||
"params": [
|
||||
"n"
|
||||
]
|
||||
},
|
||||
"zero?": {
|
||||
"params": [
|
||||
"n"
|
||||
]
|
||||
}
|
||||
},
|
||||
"str": {
|
||||
"join": {
|
||||
"params": [
|
||||
"arr",
|
||||
"sep"
|
||||
]
|
||||
},
|
||||
"split": {
|
||||
"params": [
|
||||
"str",
|
||||
"sep"
|
||||
]
|
||||
},
|
||||
"to-upper": {
|
||||
"params": [
|
||||
"str"
|
||||
]
|
||||
},
|
||||
"to-lower": {
|
||||
"params": [
|
||||
"str"
|
||||
]
|
||||
},
|
||||
"trim": {
|
||||
"params": [
|
||||
"str"
|
||||
]
|
||||
},
|
||||
"starts-with?": {
|
||||
"params": [
|
||||
"str",
|
||||
"prefix"
|
||||
]
|
||||
},
|
||||
"ends-with?": {
|
||||
"params": [
|
||||
"str",
|
||||
"suffix"
|
||||
]
|
||||
},
|
||||
"contains?": {
|
||||
"params": [
|
||||
"str",
|
||||
"substr"
|
||||
]
|
||||
},
|
||||
"empty?": {
|
||||
"params": [
|
||||
"str"
|
||||
]
|
||||
},
|
||||
"index-of": {
|
||||
"params": [
|
||||
"str",
|
||||
"search"
|
||||
]
|
||||
},
|
||||
"last-index-of": {
|
||||
"params": [
|
||||
"str",
|
||||
"search"
|
||||
]
|
||||
},
|
||||
"replace": {
|
||||
"params": [
|
||||
"str",
|
||||
"search",
|
||||
"replacement"
|
||||
]
|
||||
},
|
||||
"replace-all": {
|
||||
"params": [
|
||||
"str",
|
||||
"search",
|
||||
"replacement"
|
||||
]
|
||||
},
|
||||
"slice": {
|
||||
"params": [
|
||||
"str",
|
||||
"start",
|
||||
"end"
|
||||
]
|
||||
},
|
||||
"substring": {
|
||||
"params": [
|
||||
"str",
|
||||
"start",
|
||||
"end"
|
||||
]
|
||||
},
|
||||
"repeat": {
|
||||
"params": [
|
||||
"str",
|
||||
"count"
|
||||
]
|
||||
},
|
||||
"pad-start": {
|
||||
"params": [
|
||||
"str",
|
||||
"length",
|
||||
"pad"
|
||||
]
|
||||
},
|
||||
"pad-end": {
|
||||
"params": [
|
||||
"str",
|
||||
"length",
|
||||
"pad"
|
||||
]
|
||||
},
|
||||
"lines": {
|
||||
"params": [
|
||||
"str"
|
||||
]
|
||||
},
|
||||
"chars": {
|
||||
"params": [
|
||||
"str"
|
||||
]
|
||||
},
|
||||
"match": {
|
||||
"params": [
|
||||
"str",
|
||||
"regex"
|
||||
]
|
||||
},
|
||||
"test?": {
|
||||
"params": [
|
||||
"str",
|
||||
"regex"
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
dollar: {
|
||||
"args": {
|
||||
"params": []
|
||||
},
|
||||
"argv": {
|
||||
"params": []
|
||||
},
|
||||
"env": {
|
||||
"params": []
|
||||
},
|
||||
"pid": {
|
||||
"params": []
|
||||
},
|
||||
"cwd": {
|
||||
"params": []
|
||||
},
|
||||
"script": {
|
||||
"params": []
|
||||
}
|
||||
},
|
||||
} as const
|
||||
40
vscode-extension/server/src/metadata/prelude-names.ts
Normal file
40
vscode-extension/server/src/metadata/prelude-names.ts
Normal file
|
|
@ -0,0 +1,40 @@
|
|||
// Auto-generated by scripts/generate-prelude-metadata.ts
|
||||
// Do not edit manually - run 'bun run generate-prelude-metadata' to regenerate
|
||||
|
||||
export const PRELUDE_NAMES = [
|
||||
"$",
|
||||
"array?",
|
||||
"at",
|
||||
"bnot",
|
||||
"boolean?",
|
||||
"dec",
|
||||
"describe",
|
||||
"dict",
|
||||
"dict?",
|
||||
"each",
|
||||
"echo",
|
||||
"empty?",
|
||||
"exit",
|
||||
"fs",
|
||||
"function?",
|
||||
"identity",
|
||||
"import",
|
||||
"inc",
|
||||
"inspect",
|
||||
"json",
|
||||
"length",
|
||||
"list",
|
||||
"load",
|
||||
"math",
|
||||
"not",
|
||||
"null?",
|
||||
"number?",
|
||||
"range",
|
||||
"ref",
|
||||
"some?",
|
||||
"str",
|
||||
"string?",
|
||||
"type",
|
||||
"var",
|
||||
"var?"
|
||||
] as const
|
||||
|
|
@ -1,7 +1,11 @@
|
|||
import { TextDocument } from 'vscode-languageserver-textdocument'
|
||||
import { buildDiagnostics } from './diagnostics'
|
||||
import { buildSemanticTokens, TOKEN_MODIFIERS, TOKEN_TYPES } from './semanticTokens'
|
||||
import { provideCompletions } from './completion/completionProvider'
|
||||
import { provideSignatureHelp } from './signatureHelp'
|
||||
import { PRELUDE_NAMES } from './metadata/prelude-names'
|
||||
import { parser } from '../../../src/parser/shrimp'
|
||||
import { setGlobals } from '../../../src/parser/tokenizer'
|
||||
import { Compiler } from '../../../src/compiler/compiler'
|
||||
import { Tree } from '@lezer/common'
|
||||
import {
|
||||
|
|
@ -16,6 +20,9 @@ import {
|
|||
import { setGlobals } from '../../../src/parser/tokenizer'
|
||||
import { globals } from '../../../src/prelude'
|
||||
|
||||
// Initialize parser with prelude globals so it knows dict/list/str are in scope
|
||||
setGlobals(PRELUDE_NAMES)
|
||||
|
||||
const connection = createConnection(ProposedFeatures.all)
|
||||
const documents = new TextDocuments(TextDocument)
|
||||
documents.listen(connection)
|
||||
|
|
@ -31,6 +38,7 @@ documents.onDidOpen(handleDocumentOpen)
|
|||
documents.onDidChangeContent(handleDocumentChange)
|
||||
documents.onDidClose(handleDocumentClose)
|
||||
connection.onCompletion(handleCompletion)
|
||||
connection.onSignatureHelp(handleSignatureHelp)
|
||||
|
||||
// Debug commands
|
||||
connection.onRequest('shrimp/parseTree', handleParseTree)
|
||||
|
|
@ -48,6 +56,9 @@ function handleInitialize(): InitializeResult {
|
|||
completionProvider: {
|
||||
triggerCharacters: ['.'],
|
||||
},
|
||||
signatureHelpProvider: {
|
||||
triggerCharacters: [' '],
|
||||
},
|
||||
semanticTokensProvider: {
|
||||
legend: {
|
||||
tokenTypes: TOKEN_TYPES,
|
||||
|
|
@ -96,13 +107,49 @@ function handleDocumentClose(event: TextDocumentChangeEvent<TextDocument>) {
|
|||
documentTrees.delete(event.document.uri)
|
||||
}
|
||||
|
||||
function handleCompletion() {
|
||||
const keywords = ['if', 'else', 'do', 'end', 'and', 'or', 'true', 'false', 'null']
|
||||
function handleCompletion(params: any) {
|
||||
const document = documents.get(params.textDocument.uri)
|
||||
if (!document) {
|
||||
console.log('❌ No document found')
|
||||
return []
|
||||
}
|
||||
|
||||
return keywords.map((keyword) => ({
|
||||
const position = params.position
|
||||
const text = document.getText()
|
||||
const offset = document.offsetAt(position)
|
||||
console.log(`📍 Text around cursor: "${text.slice(Math.max(0, offset - 10), offset + 10)}"`)
|
||||
|
||||
// First try context-aware completions (module/dollar)
|
||||
const contextCompletions = provideCompletions(document, position)
|
||||
console.log(`🎯 Context completions count: ${contextCompletions.length}`)
|
||||
if (contextCompletions.length > 0) {
|
||||
console.log(
|
||||
`✅ Returning ${contextCompletions.length} completions:`,
|
||||
contextCompletions.map((c) => c.label).join(', ')
|
||||
)
|
||||
return contextCompletions
|
||||
}
|
||||
|
||||
// Fall back to keywords + prelude globals (for Ctrl+Space in general context)
|
||||
console.log(`⌨️ Falling back to keywords + prelude globals`)
|
||||
const keywords = ['if', 'else', 'do', 'end', 'and', 'or', 'true', 'false', 'null']
|
||||
const keywordCompletions = keywords.map((keyword) => ({
|
||||
label: keyword,
|
||||
kind: CompletionItemKind.Keyword,
|
||||
}))
|
||||
|
||||
const preludeCompletions = PRELUDE_NAMES.map((name) => ({
|
||||
label: name,
|
||||
kind: CompletionItemKind.Function,
|
||||
}))
|
||||
|
||||
return [...keywordCompletions, ...preludeCompletions]
|
||||
}
|
||||
|
||||
function handleSignatureHelp(params: any) {
|
||||
const document = documents.get(params.textDocument.uri)
|
||||
if (!document) return
|
||||
return provideSignatureHelp(document, params.position)
|
||||
}
|
||||
|
||||
function handleParseTree(params: { uri: string }) {
|
||||
|
|
|
|||
105
vscode-extension/server/src/signatureHelp.ts
Normal file
105
vscode-extension/server/src/signatureHelp.ts
Normal file
|
|
@ -0,0 +1,105 @@
|
|||
import { SignatureHelp, SignatureInformation, ParameterInformation } from 'vscode-languageserver/node'
|
||||
import { TextDocument } from 'vscode-languageserver-textdocument'
|
||||
import { Tree, SyntaxNode } from '@lezer/common'
|
||||
import { parser } from '../../../src/parser/shrimp'
|
||||
import { completions } from './metadata/prelude-completions'
|
||||
|
||||
export const provideSignatureHelp = (
|
||||
document: TextDocument,
|
||||
position: { line: number; character: number }
|
||||
): SignatureHelp | undefined => {
|
||||
const text = document.getText()
|
||||
const tree = parser.parse(text)
|
||||
const cursorPos = document.offsetAt(position)
|
||||
|
||||
const context = findCallContext(tree, cursorPos, text)
|
||||
if (!context) return
|
||||
|
||||
const params = lookupFunctionParams(context.funcName)
|
||||
if (!params) return
|
||||
|
||||
return {
|
||||
signatures: [buildSignature(context.funcName, params)],
|
||||
activeParameter: Math.min(context.argCount, params.length - 1),
|
||||
}
|
||||
}
|
||||
|
||||
const findCallContext = (tree: Tree, cursorPos: number, text: string) => {
|
||||
const findBestCall = (node: SyntaxNode): SyntaxNode | undefined => {
|
||||
let result: SyntaxNode | undefined
|
||||
|
||||
const isCall = node.name === 'FunctionCall' || node.name === 'FunctionCallOrIdentifier'
|
||||
|
||||
// Call ends just before cursor (within 5 chars)
|
||||
if (isCall && node.to <= cursorPos && cursorPos <= node.to + 5) {
|
||||
result = node
|
||||
}
|
||||
|
||||
// Cursor is inside the call's span
|
||||
if (isCall && node.from < cursorPos && cursorPos < node.to) {
|
||||
result = node
|
||||
}
|
||||
|
||||
// Recurse - prefer smaller spans (more specific)
|
||||
let child = node.firstChild
|
||||
while (child) {
|
||||
const found = findBestCall(child)
|
||||
if (found) {
|
||||
const foundSpan = found.to - found.from
|
||||
const resultSpan = result ? result.to - result.from : Infinity
|
||||
if (foundSpan < resultSpan) {
|
||||
result = found
|
||||
}
|
||||
}
|
||||
child = child.nextSibling
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
const call = findBestCall(tree.topNode)
|
||||
if (!call) return
|
||||
|
||||
// Count args before cursor
|
||||
let argCount = 0
|
||||
let child = call.firstChild
|
||||
while (child) {
|
||||
if ((child.name === 'PositionalArg' || child.name === 'NamedArg') && child.to <= cursorPos) {
|
||||
argCount++
|
||||
}
|
||||
child = child.nextSibling
|
||||
}
|
||||
|
||||
// Extract function name
|
||||
const firstChild = call.firstChild
|
||||
if (!firstChild) return
|
||||
|
||||
let funcName: string | undefined
|
||||
if (firstChild.name === 'DotGet') {
|
||||
funcName = text.slice(firstChild.from, firstChild.to)
|
||||
} else if (firstChild.name === 'Identifier') {
|
||||
funcName = text.slice(firstChild.from, firstChild.to)
|
||||
}
|
||||
|
||||
if (!funcName) return
|
||||
|
||||
return { funcName, argCount }
|
||||
}
|
||||
|
||||
const lookupFunctionParams = (funcName: string): string[] | undefined => {
|
||||
// Handle module functions: "list.map" → modules.list.map
|
||||
if (funcName.includes('.')) {
|
||||
const [moduleName, methodName] = funcName.split('.')
|
||||
const module = completions.modules[moduleName as keyof typeof completions.modules]
|
||||
const method = module?.[methodName as keyof typeof module]
|
||||
return method?.params as string[] | undefined
|
||||
}
|
||||
|
||||
// TODO: Handle top-level prelude functions (print, range, etc.)
|
||||
}
|
||||
|
||||
const buildSignature = (funcName: string, params: string[]): SignatureInformation => {
|
||||
const label = `${funcName}(${params.join(', ')})`
|
||||
const parameters: ParameterInformation[] = params.map(p => ({ label: p }))
|
||||
return { label, parameters }
|
||||
}
|
||||
41
vscode-extension/tmp/test-dotget-parse.ts
Normal file
41
vscode-extension/tmp/test-dotget-parse.ts
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
import { parser } from '../../src/parser/shrimp'
|
||||
import { setGlobals } from '../../src/parser/tokenizer'
|
||||
import { PRELUDE_NAMES } from '../server/src/prelude-names'
|
||||
|
||||
// Set globals for DotGet detection
|
||||
setGlobals(PRELUDE_NAMES as unknown as string[])
|
||||
|
||||
// Test cases - does incomplete DotGet parse correctly?
|
||||
const testCases = [
|
||||
'dict.',
|
||||
'dict.g',
|
||||
'dict.get',
|
||||
'$.',
|
||||
'$.e',
|
||||
'$.env',
|
||||
]
|
||||
|
||||
for (const code of testCases) {
|
||||
console.log(`\nTesting: "${code}"`)
|
||||
const tree = parser.parse(code)
|
||||
const cursor = tree.cursor()
|
||||
|
||||
// Print the parse tree
|
||||
const printTree = (depth = 0) => {
|
||||
const indent = ' '.repeat(depth)
|
||||
console.log(`${indent}${cursor.name} [${cursor.from}-${cursor.to}]`)
|
||||
|
||||
if (cursor.firstChild()) {
|
||||
do {
|
||||
printTree(depth + 1)
|
||||
} while (cursor.nextSibling())
|
||||
cursor.parent()
|
||||
}
|
||||
}
|
||||
|
||||
printTree()
|
||||
|
||||
// Check at cursor position (end of string)
|
||||
const node = tree.resolveInner(code.length, -1)
|
||||
console.log(`Node at end: ${node.name} (type: ${node.type.id})`)
|
||||
}
|
||||
Loading…
Reference in New Issue
Block a user