Compare commits
225 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 491e37a7f8 | |||
| 69b2297280 | |||
| 87cb01392a | |||
| e45a6d9bf7 | |||
| 71c5e31836 | |||
| 4ccf97f667 | |||
| 03a83abfbb | |||
| d8c63e7981 | |||
| 93518f8294 | |||
| 1a308eadf5 | |||
| 259e7a7dd4 | |||
| 6ae955e926 | |||
| 59b92714d2 | |||
| 4da3c5ac06 | |||
| 31603d705a | |||
| b49619c110 | |||
| 5994a2d8f4 | |||
|
|
b21751a790 | ||
|
|
65119b720a | ||
|
|
88ee108a1e | ||
|
|
e1859c1bda | ||
|
|
07a42d9767 | ||
|
|
ef20c67e61 | ||
| 9b1890a3db | |||
| 21e7ed41af | |||
| 757a50e23e | |||
| cb7cdaea62 | |||
| 688181654e | |||
| 728c5df9eb | |||
| 04e14cd83e | |||
| b2d298ec6f | |||
|
|
5ad6125527 | ||
|
|
f160093c4d | ||
|
|
1ea130f8e0 | ||
|
|
ae9896c8a2 | ||
|
|
0d3f9867e6 | ||
|
|
cbc75f5ed7 | ||
|
|
a836591854 | ||
|
|
d0005d9ccd | ||
|
|
cc604bea49 | ||
|
|
2c2b277b29 | ||
|
|
1682a7ccb7 | ||
|
|
0e92525b54 | ||
|
|
6a6675d30f | ||
|
|
d003d65a15 | ||
|
|
579d755205 | ||
|
|
566beb87ef | ||
|
|
9e4471ad38 | ||
|
|
3eac0a27a5 | ||
|
|
e38e8d4f1e | ||
| abd78108c8 | |||
| ae46988219 | |||
| e4bdddc762 | |||
| 7feb3cd7b0 | |||
| 1fec471da9 | |||
| 09d2420508 | |||
| 028ccf2bf9 | |||
| 1458da58cc | |||
| 4a27a8b474 | |||
| f13be7817c | |||
| 7fe6e3b5ad | |||
| c4368f24fc | |||
| dcf94296fa | |||
| 12370361c4 | |||
| 0c6ce16bcd | |||
| c244435ae2 | |||
| b400f48676 | |||
| 793565cafa | |||
| feae5d314e | |||
| 10e1986fe2 | |||
| 9eaa71fe2d | |||
| f58ff1785a | |||
| 970ceeb8b0 | |||
| e2f5024a4c | |||
| 8008f37f16 | |||
| c9140bd018 | |||
| ba5ce0a88c | |||
| 398cd57b1d | |||
| f8718ac05b | |||
| d4596c3afa | |||
| 69bbe17992 | |||
| 2d4c79b30f | |||
| 238af9affc | |||
| a6c283759d | |||
| 63ee57e7f0 | |||
| 503ca41155 | |||
| a156d24a91 | |||
| 019f7d84b1 | |||
| 4c794944ef | |||
| 99a5aa5312 | |||
| 7bbf43a725 | |||
| 4c15526d1b | |||
| c741cfee51 | |||
| 012b8c8cf1 | |||
| 4c3f7a8bfc | |||
| fe6f54b402 | |||
| 49f3f3e09f | |||
| 0d1dce4868 | |||
| d18ab2507c | |||
| 7e69356f79 | |||
| 9863f46f38 | |||
| 45f31d0678 | |||
| 49a6320fef | |||
| 51f67ac908 | |||
| 7da437212d | |||
| 740379d7b2 | |||
| 19c4fb5033 | |||
| f57452ece2 | |||
| 4590d66105 | |||
| 3aa40ae2c2 | |||
| da0af799d8 | |||
| 9f45252522 | |||
| bae0da31c2 | |||
| 4258503c0e | |||
| d4a772e88b | |||
| 68ec6f9f3e | |||
|
|
59cf459d74 | ||
|
|
890eb811b9 | ||
|
|
fd3c5da59b | ||
| 13adbe4c0e | |||
| b3ec6995db | |||
| 854ed02625 | |||
| c325bca611 | |||
| 1082cc1281 | |||
| afaedeea23 | |||
| 3ac606d0b2 | |||
| 62e42328e1 | |||
| 5b363c833a | |||
| e0095b110f | |||
| a38932a833 | |||
| 03596aab5b | |||
| bd1dbe75f3 | |||
| 669e58b71e | |||
| 152aac269f | |||
| a428e98d41 | |||
| d6aea4b0f9 | |||
| 44b30d2339 | |||
| 3aa75843ac | |||
| 061452a334 | |||
| 4494cbce91 | |||
| 47d1ea1a0b | |||
| 82a97c0a5a | |||
| 7645efc4f9 | |||
| 47c3fda4c8 | |||
| ab12212df2 | |||
| fcfbace65e | |||
| 54a5fec08e | |||
| ea01a93563 | |||
| dec2f2d094 | |||
| 66fa15595c | |||
| 7b4a02ec29 | |||
| 7229f4afd0 | |||
| 290ac59cee | |||
| b0f6c75427 | |||
| 146d2a22ee | |||
| 5ff78d49c1 | |||
| f4a065beae | |||
| 750ffbbfa8 | |||
| a5c7cc6304 | |||
| 4ae12a217e | |||
| 4a8aa7421d | |||
| 03c7bfee39 | |||
| 1a3e041001 | |||
| 600330ba7f | |||
| a535dc9605 | |||
| 0e96911879 | |||
| fa67c26c0a | |||
| 5f46346213 | |||
| 6112d7e5a2 | |||
| 653ff5df10 | |||
| f9b0aa2db5 | |||
| 7589518ca7 | |||
| d93ce85178 | |||
| e39b67c87c | |||
| f57b1c985e | |||
| d074b59a89 | |||
| e49583d959 | |||
| b651ff9583 | |||
| f3c6f2c032 | |||
| b99394e94f | |||
| 2d7f0dbe25 | |||
| e0e5e82869 | |||
| d707ee7e6b | |||
| b31b981343 | |||
| 67e0db090b | |||
| 24e0b49679 | |||
| 70ac5544a9 | |||
| 7756306e1d | |||
| 7bcd582dc6 | |||
| 6f531a2ebf | |||
| e68624b608 | |||
| 2fab792c1a | |||
| f1eaafee19 | |||
| 950eef0e69 | |||
| dc557deb40 | |||
| ee0e6c6c41 | |||
| 5f4bf60062 | |||
| f4cbe54a88 | |||
|
|
90a1f63847 | ||
|
|
402748d1da | ||
|
|
cc06bdf2a7 | ||
| fec4b626df | |||
| fa034d4bd4 | |||
| 8addb77e90 | |||
| 1791e5a6c7 | |||
| f14013aa55 | |||
| 0d631ccf84 | |||
| 2fa432ea3f | |||
| 78849c7d36 | |||
| 0aeaed60c3 | |||
| 887be41248 | |||
| 0d73789a25 | |||
| 4f53218b9f | |||
| 34c7d244ce | |||
| 2329a2ebb6 | |||
| c883854187 | |||
| f31be80bb0 | |||
| a8fd79a990 | |||
| bc0684185a | |||
| e60e3184fa | |||
| f8d2236292 | |||
| 4f961d3039 | |||
| d957675ac8 | |||
| 9bc514a782 | |||
| 701ca98401 |
5
.gitignore
vendored
5
.gitignore
vendored
|
|
@ -34,4 +34,7 @@ report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
|
||||||
.DS_Store
|
.DS_Store
|
||||||
|
|
||||||
/tmp
|
/tmp
|
||||||
/docs
|
vscode-extension/tmp
|
||||||
|
/docs
|
||||||
|
|
||||||
|
*.vsix
|
||||||
|
|
|
||||||
|
|
@ -200,7 +200,7 @@ function parseExpression(input: string) {
|
||||||
- **Not in scope** → Parses as `Word("obj.prop")` → compiles to `PUSH 'obj.prop'` (treated as file path/string)
|
- **Not in scope** → Parses as `Word("obj.prop")` → compiles to `PUSH 'obj.prop'` (treated as file path/string)
|
||||||
|
|
||||||
Implementation files:
|
Implementation files:
|
||||||
- **src/parser/scopeTracker.ts**: ContextTracker that maintains immutable scope chain
|
- **src/parser/parserScopeContext.ts**: ContextTracker that maintains immutable scope chain
|
||||||
- **src/parser/tokenizer.ts**: External tokenizer checks `stack.context` to decide if dot creates DotGet or Word
|
- **src/parser/tokenizer.ts**: External tokenizer checks `stack.context` to decide if dot creates DotGet or Word
|
||||||
- Scope tracking: Captures variables from assignments (`x = 5`) and function parameters (`fn x:`)
|
- Scope tracking: Captures variables from assignments (`x = 5`) and function parameters (`fn x:`)
|
||||||
- See `src/parser/tests/dot-get.test.ts` for comprehensive examples
|
- See `src/parser/tests/dot-get.test.ts` for comprehensive examples
|
||||||
|
|
|
||||||
|
|
@ -12,7 +12,7 @@ Go to http://localhost:3000 to try out the playground.
|
||||||
tail log.txt lines=50
|
tail log.txt lines=50
|
||||||
|
|
||||||
name = "Shrimp"
|
name = "Shrimp"
|
||||||
greet = fn person: echo "Hello" person
|
greet = do person: echo "Hello" person
|
||||||
|
|
||||||
result = tail log.txt lines=10
|
result = tail log.txt lines=10
|
||||||
|
|
||||||
|
|
@ -33,7 +33,7 @@ Go to http://localhost:3000 to try out the playground.
|
||||||
## Architecture
|
## Architecture
|
||||||
|
|
||||||
**parser/** - Lezer grammar and tokenizers that parse Shrimp code into syntax trees
|
**parser/** - Lezer grammar and tokenizers that parse Shrimp code into syntax trees
|
||||||
**editor/** - CodeMirror integration with syntax highlighting and language support
|
**editor/** - CodeMirror integration with syntax highlighting and language support
|
||||||
**compiler/** - Transforms syntax trees into ReefVM bytecode for execution
|
**compiler/** - Transforms syntax trees into ReefVM bytecode for execution
|
||||||
|
|
||||||
The flow: Shrimp source → parser (CST) → compiler (bytecode) → ReefVM (execution)
|
The flow: Shrimp source → parser (CST) → compiler (bytecode) → ReefVM (execution)
|
||||||
|
|
|
||||||
192
bin/parser-tree.ts
Executable file
192
bin/parser-tree.ts
Executable file
|
|
@ -0,0 +1,192 @@
|
||||||
|
#!/usr/bin/env bun
|
||||||
|
|
||||||
|
// WARNING: [[ No human has been anywhere near this file. It's pure Claude slop.
|
||||||
|
// Enter at your own risk. ]]
|
||||||
|
|
||||||
|
import { readFileSync } from 'fs'
|
||||||
|
|
||||||
|
type CallInfo = {
|
||||||
|
method: string
|
||||||
|
line: number
|
||||||
|
calls: Set<string>
|
||||||
|
isRecursive?: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse the parser file and extract method calls
|
||||||
|
function analyzeParser(filePath: string): Map<string, CallInfo> {
|
||||||
|
const content = readFileSync(filePath, 'utf-8')
|
||||||
|
const lines = content.split('\n')
|
||||||
|
const methods = new Map<string, CallInfo>()
|
||||||
|
|
||||||
|
// Find all method definitions
|
||||||
|
const methodRegex = /^\s*(\w+)\s*\([^)]*\):\s*/
|
||||||
|
|
||||||
|
let currentMethod: string | null = null
|
||||||
|
let braceDepth = 0
|
||||||
|
let classDepth = 0
|
||||||
|
|
||||||
|
for (let i = 0; i < lines.length; i++) {
|
||||||
|
const line = lines[i] || ''
|
||||||
|
|
||||||
|
// Track if we're inside the Parser class
|
||||||
|
if (line.includes('class Parser')) {
|
||||||
|
classDepth = braceDepth + 1 // Will be the depth after we process this line's brace
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for method definition (only inside class, at class level)
|
||||||
|
// Check BEFORE incrementing braceDepth
|
||||||
|
if (classDepth > 0 && braceDepth === classDepth) {
|
||||||
|
const methodMatch = line.match(methodRegex)
|
||||||
|
if (methodMatch && !line.includes('class ')) {
|
||||||
|
currentMethod = methodMatch[1]!
|
||||||
|
methods.set(currentMethod, {
|
||||||
|
method: currentMethod,
|
||||||
|
line: i + 1,
|
||||||
|
calls: new Set()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Track brace depth
|
||||||
|
braceDepth += (line.match(/{/g) || []).length
|
||||||
|
braceDepth -= (line.match(/}/g) || []).length
|
||||||
|
|
||||||
|
// Find method calls within current method
|
||||||
|
if (currentMethod && braceDepth > 0) {
|
||||||
|
// Match this.methodName() calls
|
||||||
|
const callRegex = /this\.(\w+)\s*\(/g
|
||||||
|
let match
|
||||||
|
while ((match = callRegex.exec(line)) !== null) {
|
||||||
|
const calledMethod = match[1]!
|
||||||
|
const info = methods.get(currentMethod)!
|
||||||
|
info.calls.add(calledMethod)
|
||||||
|
|
||||||
|
// Mark recursive calls
|
||||||
|
if (calledMethod === currentMethod) {
|
||||||
|
info.isRecursive = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Reset when method ends
|
||||||
|
if (braceDepth === 0) {
|
||||||
|
currentMethod = null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return methods
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build tree structure starting from a root method
|
||||||
|
function buildTree(
|
||||||
|
method: string,
|
||||||
|
callGraph: Map<string, CallInfo>,
|
||||||
|
visited: Set<string>,
|
||||||
|
indent = '',
|
||||||
|
isLast = true,
|
||||||
|
depth = 0,
|
||||||
|
maxDepth = 3
|
||||||
|
): string[] {
|
||||||
|
const lines: string[] = []
|
||||||
|
const info = callGraph.get(method)
|
||||||
|
|
||||||
|
if (!info) return lines
|
||||||
|
|
||||||
|
// Add current method
|
||||||
|
const prefix = depth === 0 ? '' : (isLast ? '└─> ' : '├─> ')
|
||||||
|
const suffix = info.isRecursive ? ' (recursive)' : ''
|
||||||
|
const lineNum = `[line ${info.line}]`
|
||||||
|
lines.push(`${indent}${prefix}${method}() ${lineNum}${suffix}`)
|
||||||
|
|
||||||
|
// Stop if we've reached max depth
|
||||||
|
if (depth >= maxDepth) {
|
||||||
|
return lines
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prevent infinite recursion in tree display
|
||||||
|
if (visited.has(method)) {
|
||||||
|
return lines
|
||||||
|
}
|
||||||
|
|
||||||
|
const newVisited = new Set(visited)
|
||||||
|
newVisited.add(method)
|
||||||
|
|
||||||
|
// Helper methods to filter out (low-level utilities)
|
||||||
|
const helperPatterns = /^(is|next|peek|expect|current|op)/i
|
||||||
|
|
||||||
|
// Get sorted unique calls (filter out recursive self-calls for display)
|
||||||
|
const calls = Array.from(info.calls)
|
||||||
|
.filter(c => callGraph.has(c)) // Only show parser methods
|
||||||
|
.filter(c => c !== method) // Don't show immediate self-recursion
|
||||||
|
.filter(c => !helperPatterns.test(c)) // Filter out helpers
|
||||||
|
.sort()
|
||||||
|
|
||||||
|
// Add children
|
||||||
|
const newIndent = indent + (isLast ? ' ' : '│ ')
|
||||||
|
calls.forEach((call, idx) => {
|
||||||
|
const childLines = buildTree(
|
||||||
|
call,
|
||||||
|
callGraph,
|
||||||
|
newVisited,
|
||||||
|
newIndent,
|
||||||
|
idx === calls.length - 1,
|
||||||
|
depth + 1,
|
||||||
|
maxDepth
|
||||||
|
)
|
||||||
|
lines.push(...childLines)
|
||||||
|
})
|
||||||
|
|
||||||
|
return lines
|
||||||
|
}
|
||||||
|
|
||||||
|
// Main
|
||||||
|
const parserPath = './src/parser/parser2.ts'
|
||||||
|
const maxDepth = parseInt(process.argv[2] || '5')
|
||||||
|
|
||||||
|
console.log('Parser Call Tree for', parserPath)
|
||||||
|
console.log(`Max depth: ${maxDepth}`)
|
||||||
|
console.log('═'.repeat(60))
|
||||||
|
console.log()
|
||||||
|
|
||||||
|
const callGraph = analyzeParser(parserPath)
|
||||||
|
|
||||||
|
// Start from parse() method
|
||||||
|
const tree = buildTree('parse', callGraph, new Set(), '', true, 0, maxDepth)
|
||||||
|
console.log(tree.join('\n'))
|
||||||
|
|
||||||
|
// Show some stats
|
||||||
|
console.log('\n' + '═'.repeat(60))
|
||||||
|
console.log('Stats:')
|
||||||
|
console.log(` Total methods: ${callGraph.size}`)
|
||||||
|
console.log(` Entry point: parse()`)
|
||||||
|
|
||||||
|
// Find methods that are never called (potential dead code or entry points)
|
||||||
|
const allCalled = new Set<string>()
|
||||||
|
for (const info of callGraph.values()) {
|
||||||
|
info.calls.forEach(c => allCalled.add(c))
|
||||||
|
}
|
||||||
|
|
||||||
|
const uncalled = Array.from(callGraph.keys())
|
||||||
|
.filter(m => !allCalled.has(m) && m !== 'parse')
|
||||||
|
.sort()
|
||||||
|
|
||||||
|
if (uncalled.length > 0) {
|
||||||
|
console.log(`\n Uncalled methods: ${uncalled.join(', ')}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find most-called methods
|
||||||
|
const callCount = new Map<string, number>()
|
||||||
|
for (const info of callGraph.values()) {
|
||||||
|
for (const called of info.calls) {
|
||||||
|
callCount.set(called, (callCount.get(called) || 0) + 1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const topCalled = Array.from(callCount.entries())
|
||||||
|
.sort((a, b) => b[1] - a[1])
|
||||||
|
.slice(0, 5)
|
||||||
|
|
||||||
|
console.log(`\n Most-called methods:`)
|
||||||
|
for (const [method, count] of topCalled) {
|
||||||
|
console.log(` ${method}() - called ${count} times`)
|
||||||
|
}
|
||||||
7
bin/repl
7
bin/repl
|
|
@ -7,6 +7,9 @@ import * as readline from 'readline'
|
||||||
import { readFileSync, writeFileSync } from 'fs'
|
import { readFileSync, writeFileSync } from 'fs'
|
||||||
import { basename } from 'path'
|
import { basename } from 'path'
|
||||||
|
|
||||||
|
globals.$.script.name = '(repl)'
|
||||||
|
globals.$.script.path = '(repl)'
|
||||||
|
|
||||||
async function repl() {
|
async function repl() {
|
||||||
const commands = ['/clear', '/reset', '/vars', '/funcs', '/history', '/bytecode', '/exit', '/save', '/quit']
|
const commands = ['/clear', '/reset', '/vars', '/funcs', '/history', '/bytecode', '/exit', '/save', '/quit']
|
||||||
|
|
||||||
|
|
@ -145,7 +148,7 @@ async function repl() {
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const compiler = new Compiler(trimmed, Object.keys(globals))
|
const compiler = new Compiler(trimmed, [...Object.keys(globals), ...vm.vars()])
|
||||||
|
|
||||||
// Save VM state before appending bytecode, in case execution fails
|
// Save VM state before appending bytecode, in case execution fails
|
||||||
const savedInstructions = [...vm.instructions]
|
const savedInstructions = [...vm.instructions]
|
||||||
|
|
@ -235,7 +238,7 @@ async function loadFile(filePath: string): Promise<{ vm: VM; codeHistory: string
|
||||||
if (!trimmed) continue
|
if (!trimmed) continue
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const compiler = new Compiler(trimmed)
|
const compiler = new Compiler(trimmed, [...Object.keys(globals), ...vm.vars()])
|
||||||
vm.appendBytecode(compiler.bytecode)
|
vm.appendBytecode(compiler.bytecode)
|
||||||
await vm.continue()
|
await vm.continue()
|
||||||
codeHistory.push(trimmed)
|
codeHistory.push(trimmed)
|
||||||
|
|
|
||||||
125
bin/shrimp
125
bin/shrimp
|
|
@ -1,57 +1,83 @@
|
||||||
#!/usr/bin/env bun
|
#!/usr/bin/env bun
|
||||||
|
|
||||||
import { Compiler } from '../src/compiler/compiler'
|
import { colors, globals as prelude } from '../src/prelude'
|
||||||
import { colors, globals } from '../src/prelude'
|
import { treeToString2 } from '../src/utils/tree'
|
||||||
import { VM, fromValue, bytecodeToString } from 'reefvm'
|
import { runCode, runFile, compileFile, parseCode } from '../src'
|
||||||
import { readFileSync, writeFileSync, mkdirSync } from 'fs'
|
import { resolve } from 'path'
|
||||||
import { randomUUID } from "crypto"
|
import { bytecodeToString } from 'reefvm'
|
||||||
|
import { readFileSync } from 'fs'
|
||||||
import { spawn } from 'child_process'
|
import { spawn } from 'child_process'
|
||||||
import { join } from 'path'
|
import { join } from 'path'
|
||||||
|
|
||||||
async function runFile(filePath: string) {
|
|
||||||
try {
|
|
||||||
const code = readFileSync(filePath, 'utf-8')
|
|
||||||
const compiler = new Compiler(code, Object.keys(globals))
|
|
||||||
const vm = new VM(compiler.bytecode, globals)
|
|
||||||
await vm.run()
|
|
||||||
return vm.stack.length ? fromValue(vm.stack[vm.stack.length - 1]) : null
|
|
||||||
} catch (error: any) {
|
|
||||||
console.error(`${colors.red}Error:${colors.reset} ${error.message}`)
|
|
||||||
process.exit(1)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function compileFile(filePath: string) {
|
|
||||||
try {
|
|
||||||
const code = readFileSync(filePath, 'utf-8')
|
|
||||||
const compiler = new Compiler(code)
|
|
||||||
return bytecodeToString(compiler.bytecode)
|
|
||||||
} catch (error: any) {
|
|
||||||
console.error(`${colors.red}Error:${colors.reset} ${error.message}`)
|
|
||||||
process.exit(1)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function showHelp() {
|
function showHelp() {
|
||||||
console.log(`${colors.bright}${colors.magenta}🦐 Shrimp${colors.reset} is a scripting language in a shell.
|
console.log(`${colors.bright}${colors.magenta}🦐 Shrimp${colors.reset} is a scripting language in a shell.
|
||||||
|
|
||||||
${colors.bright}Usage:${colors.reset} shrimp <command> [...args]
|
${colors.bright}Usage:${colors.reset} shrimp <command> [options] [...args]
|
||||||
|
|
||||||
${colors.bright}Commands:${colors.reset}
|
${colors.bright}Commands:${colors.reset}
|
||||||
${colors.cyan}run ${colors.yellow}./my-file.sh${colors.reset} Execute a file with Shrimp
|
${colors.cyan}run ${colors.yellow}./my-file.sh${colors.reset} Execute a file with Shrimp
|
||||||
|
${colors.cyan}parse ${colors.yellow}./my-file.sh${colors.reset} Print parse tree for Shrimp file
|
||||||
${colors.cyan}bytecode ${colors.yellow}./my-file.sh${colors.reset} Print bytecode for Shrimp file
|
${colors.cyan}bytecode ${colors.yellow}./my-file.sh${colors.reset} Print bytecode for Shrimp file
|
||||||
${colors.cyan}eval ${colors.yellow}'some code'${colors.reset} Evaluate a line of Shrimp code
|
${colors.cyan}eval ${colors.yellow}'some code'${colors.reset} Evaluate a line of Shrimp code
|
||||||
|
${colors.cyan}print ${colors.yellow}'some code'${colors.reset} Evaluate a line of Shrimp code and print the result
|
||||||
${colors.cyan}repl${colors.reset} Start REPL
|
${colors.cyan}repl${colors.reset} Start REPL
|
||||||
${colors.cyan}help${colors.reset} Print this help message
|
${colors.cyan}help${colors.reset} Print this help message
|
||||||
${colors.cyan}version${colors.reset} Print version`)
|
${colors.cyan}version${colors.reset} Print version
|
||||||
|
|
||||||
|
${colors.bright}Options:${colors.reset}
|
||||||
|
${colors.cyan}eval -I${colors.reset} ${colors.yellow}<module>${colors.reset} Import module (can be repeated)
|
||||||
|
Example: shrimp -I math -e 'random | echo'
|
||||||
|
Example: shrimp -Imath -Istr -e 'random | echo'`)
|
||||||
}
|
}
|
||||||
|
|
||||||
function showVersion() {
|
function showVersion() {
|
||||||
console.log('🦐 v0.0.1')
|
console.log('🦐 v0.0.1 (non-lezer parser)')
|
||||||
|
}
|
||||||
|
|
||||||
|
async function evalCode(code: string, imports: string[]) {
|
||||||
|
const idx = Bun.argv.indexOf('--')
|
||||||
|
prelude.$.args = idx >= 0 ? Bun.argv.slice(idx + 1) : []
|
||||||
|
|
||||||
|
const importStatement = imports.length > 0 ? `import ${imports.join(' ')}` : ''
|
||||||
|
if (importStatement) code = `${importStatement}; ${code}`
|
||||||
|
return await runCode(code)
|
||||||
}
|
}
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
const args = process.argv.slice(2)
|
let args = process.argv.slice(2)
|
||||||
|
|
||||||
|
if (args.length === 0) {
|
||||||
|
showHelp()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse -I flags for imports (supports both "-I math" and "-Imath")
|
||||||
|
const imports: string[] = []
|
||||||
|
|
||||||
|
while (args.length > 0) {
|
||||||
|
const arg = args[0]
|
||||||
|
|
||||||
|
if (arg === '-I') {
|
||||||
|
// "-I math" format
|
||||||
|
if (args.length < 2) {
|
||||||
|
console.log(`${colors.bright}error: -I requires a module name${colors.reset}`)
|
||||||
|
process.exit(1)
|
||||||
|
}
|
||||||
|
imports.push(args[1])
|
||||||
|
args = args.slice(2)
|
||||||
|
} else if (arg.startsWith('-I')) {
|
||||||
|
// "-Imath" format
|
||||||
|
const moduleName = arg.slice(2)
|
||||||
|
if (!moduleName) {
|
||||||
|
console.log(`${colors.bright}error: -I requires a module name${colors.reset}`)
|
||||||
|
process.exit(1)
|
||||||
|
}
|
||||||
|
imports.push(moduleName)
|
||||||
|
args = args.slice(1)
|
||||||
|
} else {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (args.length === 0) {
|
if (args.length === 0) {
|
||||||
showHelp()
|
showHelp()
|
||||||
|
|
@ -85,10 +111,18 @@ async function main() {
|
||||||
process.exit(1)
|
process.exit(1)
|
||||||
}
|
}
|
||||||
|
|
||||||
try { mkdirSync('/tmp/shrimp') } catch { }
|
await evalCode(code, imports)
|
||||||
const path = `/tmp/shrimp/${randomUUID()}.sh`
|
return
|
||||||
writeFileSync(path, code)
|
}
|
||||||
console.log(await runFile(path))
|
|
||||||
|
if (['print', '-print', '--print', '-E'].includes(command)) {
|
||||||
|
const code = args[1]
|
||||||
|
if (!code) {
|
||||||
|
console.log(`${colors.bright}usage: shrimp print <code>${colors.reset}`)
|
||||||
|
process.exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(await evalCode(code, imports))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -98,7 +132,18 @@ async function main() {
|
||||||
console.log(`${colors.bright}usage: shrimp bytecode <file>${colors.reset}`)
|
console.log(`${colors.bright}usage: shrimp bytecode <file>${colors.reset}`)
|
||||||
process.exit(1)
|
process.exit(1)
|
||||||
}
|
}
|
||||||
console.log(await compileFile(file))
|
console.log(bytecodeToString(compileFile(file)))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (['parse', '-parse', '--parse', '-p'].includes(command)) {
|
||||||
|
const file = args[1]
|
||||||
|
if (!file) {
|
||||||
|
console.log(`${colors.bright}usage: shrimp parse <file>${colors.reset}`)
|
||||||
|
process.exit(1)
|
||||||
|
}
|
||||||
|
const input = readFileSync(file, 'utf-8')
|
||||||
|
console.log(treeToString2(parseCode(input).topNode, input))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -108,10 +153,12 @@ async function main() {
|
||||||
console.log(`${colors.bright}usage: shrimp run <file>${colors.reset}`)
|
console.log(`${colors.bright}usage: shrimp run <file>${colors.reset}`)
|
||||||
process.exit(1)
|
process.exit(1)
|
||||||
}
|
}
|
||||||
|
prelude.$.script.path = resolve(file)
|
||||||
await runFile(file)
|
await runFile(file)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
prelude.$.script.path = resolve(command)
|
||||||
await runFile(command)
|
await runFile(command)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
42
bun.lock
42
bun.lock
|
|
@ -2,7 +2,7 @@
|
||||||
"lockfileVersion": 1,
|
"lockfileVersion": 1,
|
||||||
"workspaces": {
|
"workspaces": {
|
||||||
"": {
|
"": {
|
||||||
"name": "bun-react-template",
|
"name": "shrimp",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@codemirror/view": "^6.38.3",
|
"@codemirror/view": "^6.38.3",
|
||||||
"@lezer/generator": "^1.8.0",
|
"@lezer/generator": "^1.8.0",
|
||||||
|
|
@ -16,43 +16,45 @@
|
||||||
"@lezer/highlight": "^1.2.1",
|
"@lezer/highlight": "^1.2.1",
|
||||||
"@lezer/lr": "^1.4.2",
|
"@lezer/lr": "^1.4.2",
|
||||||
"@types/bun": "latest",
|
"@types/bun": "latest",
|
||||||
|
"diff": "^8.0.2",
|
||||||
|
"kleur": "^4.1.5",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"packages": {
|
"packages": {
|
||||||
"@codemirror/autocomplete": ["@codemirror/autocomplete@6.19.0", "", { "dependencies": { "@codemirror/language": "^6.0.0", "@codemirror/state": "^6.0.0", "@codemirror/view": "^6.17.0", "@lezer/common": "^1.0.0" } }, "sha512-61Hfv3cF07XvUxNeC3E7jhG8XNi1Yom1G0lRC936oLnlF+jrbrv8rc/J98XlYzcsAoTVupfsf5fLej1aI8kyIg=="],
|
"@codemirror/autocomplete": ["@codemirror/autocomplete@6.19.1", "", { "dependencies": { "@codemirror/language": "^6.0.0", "@codemirror/state": "^6.0.0", "@codemirror/view": "^6.17.0", "@lezer/common": "^1.0.0" } }, "sha512-q6NenYkEy2fn9+JyjIxMWcNjzTL/IhwqfzOut1/G3PrIFkrbl4AL7Wkse5tLrQUUyqGoAKU5+Pi5jnnXxH5HGw=="],
|
||||||
|
|
||||||
"@codemirror/commands": ["@codemirror/commands@6.8.1", "", { "dependencies": { "@codemirror/language": "^6.0.0", "@codemirror/state": "^6.4.0", "@codemirror/view": "^6.27.0", "@lezer/common": "^1.1.0" } }, "sha512-KlGVYufHMQzxbdQONiLyGQDUW0itrLZwq3CcY7xpv9ZLRHqzkBSoteocBHtMCoY7/Ci4xhzSrToIeLg7FxHuaw=="],
|
"@codemirror/commands": ["@codemirror/commands@6.10.0", "", { "dependencies": { "@codemirror/language": "^6.0.0", "@codemirror/state": "^6.4.0", "@codemirror/view": "^6.27.0", "@lezer/common": "^1.1.0" } }, "sha512-2xUIc5mHXQzT16JnyOFkh8PvfeXuIut3pslWGfsGOhxP/lpgRm9HOl/mpzLErgt5mXDovqA0d11P21gofRLb9w=="],
|
||||||
|
|
||||||
"@codemirror/language": ["@codemirror/language@6.11.3", "", { "dependencies": { "@codemirror/state": "^6.0.0", "@codemirror/view": "^6.23.0", "@lezer/common": "^1.1.0", "@lezer/highlight": "^1.0.0", "@lezer/lr": "^1.0.0", "style-mod": "^4.0.0" } }, "sha512-9HBM2XnwDj7fnu0551HkGdrUrrqmYq/WC5iv6nbY2WdicXdGbhR/gfbZOH73Aqj4351alY1+aoG9rCNfiwS1RA=="],
|
"@codemirror/language": ["@codemirror/language@6.11.3", "", { "dependencies": { "@codemirror/state": "^6.0.0", "@codemirror/view": "^6.23.0", "@lezer/common": "^1.1.0", "@lezer/highlight": "^1.0.0", "@lezer/lr": "^1.0.0", "style-mod": "^4.0.0" } }, "sha512-9HBM2XnwDj7fnu0551HkGdrUrrqmYq/WC5iv6nbY2WdicXdGbhR/gfbZOH73Aqj4351alY1+aoG9rCNfiwS1RA=="],
|
||||||
|
|
||||||
"@codemirror/lint": ["@codemirror/lint@6.8.5", "", { "dependencies": { "@codemirror/state": "^6.0.0", "@codemirror/view": "^6.35.0", "crelt": "^1.0.5" } }, "sha512-s3n3KisH7dx3vsoeGMxsbRAgKe4O1vbrnKBClm99PU0fWxmxsx5rR2PfqQgIt+2MMJBHbiJ5rfIdLYfB9NNvsA=="],
|
"@codemirror/lint": ["@codemirror/lint@6.9.2", "", { "dependencies": { "@codemirror/state": "^6.0.0", "@codemirror/view": "^6.35.0", "crelt": "^1.0.5" } }, "sha512-sv3DylBiIyi+xKwRCJAAsBZZZWo82shJ/RTMymLabAdtbkV5cSKwWDeCgtUq3v8flTaXS2y1kKkICuRYtUswyQ=="],
|
||||||
|
|
||||||
"@codemirror/search": ["@codemirror/search@6.5.11", "", { "dependencies": { "@codemirror/state": "^6.0.0", "@codemirror/view": "^6.0.0", "crelt": "^1.0.5" } }, "sha512-KmWepDE6jUdL6n8cAAqIpRmLPBZ5ZKnicE8oGU/s3QrAVID+0VhLFrzUucVKHG5035/BSykhExDL/Xm7dHthiA=="],
|
"@codemirror/search": ["@codemirror/search@6.5.11", "", { "dependencies": { "@codemirror/state": "^6.0.0", "@codemirror/view": "^6.0.0", "crelt": "^1.0.5" } }, "sha512-KmWepDE6jUdL6n8cAAqIpRmLPBZ5ZKnicE8oGU/s3QrAVID+0VhLFrzUucVKHG5035/BSykhExDL/Xm7dHthiA=="],
|
||||||
|
|
||||||
"@codemirror/state": ["@codemirror/state@6.5.2", "", { "dependencies": { "@marijn/find-cluster-break": "^1.0.0" } }, "sha512-FVqsPqtPWKVVL3dPSxy8wEF/ymIEuVzF1PK3VbUgrxXpJUSHQWWZz4JMToquRxnkw+36LTamCZG2iua2Ptq0fA=="],
|
"@codemirror/state": ["@codemirror/state@6.5.2", "", { "dependencies": { "@marijn/find-cluster-break": "^1.0.0" } }, "sha512-FVqsPqtPWKVVL3dPSxy8wEF/ymIEuVzF1PK3VbUgrxXpJUSHQWWZz4JMToquRxnkw+36LTamCZG2iua2Ptq0fA=="],
|
||||||
|
|
||||||
"@codemirror/view": ["@codemirror/view@6.38.3", "", { "dependencies": { "@codemirror/state": "^6.5.0", "crelt": "^1.0.6", "style-mod": "^4.1.0", "w3c-keyname": "^2.2.4" } }, "sha512-x2t87+oqwB1mduiQZ6huIghjMt4uZKFEdj66IcXw7+a5iBEvv9lh7EWDRHI7crnD4BMGpnyq/RzmCGbiEZLcvQ=="],
|
"@codemirror/view": ["@codemirror/view@6.38.6", "", { "dependencies": { "@codemirror/state": "^6.5.0", "crelt": "^1.0.6", "style-mod": "^4.1.0", "w3c-keyname": "^2.2.4" } }, "sha512-qiS0z1bKs5WOvHIAC0Cybmv4AJSkAXgX5aD6Mqd2epSLlVJsQl8NG23jCVouIgkh4All/mrbdsf2UOLFnJw0tw=="],
|
||||||
|
|
||||||
"@lezer/common": ["@lezer/common@1.2.3", "", {}, "sha512-w7ojc8ejBqr2REPsWxJjrMFsA/ysDCFICn8zEOR9mrqzOu2amhITYuLD8ag6XZf0CFXDrhKqw7+tW8cX66NaDA=="],
|
"@lezer/common": ["@lezer/common@1.3.0", "", {}, "sha512-L9X8uHCYU310o99L3/MpJKYxPzXPOS7S0NmBaM7UO/x2Kb2WbmMLSkfvdr1KxRIFYOpbY0Jhn7CfLSUDzL8arQ=="],
|
||||||
|
|
||||||
"@lezer/generator": ["@lezer/generator@1.8.0", "", { "dependencies": { "@lezer/common": "^1.1.0", "@lezer/lr": "^1.3.0" }, "bin": { "lezer-generator": "src/lezer-generator.cjs" } }, "sha512-/SF4EDWowPqV1jOgoGSGTIFsE7Ezdr7ZYxyihl5eMKVO5tlnpIhFcDavgm1hHY5GEonoOAEnJ0CU0x+tvuAuUg=="],
|
"@lezer/generator": ["@lezer/generator@1.8.0", "", { "dependencies": { "@lezer/common": "^1.1.0", "@lezer/lr": "^1.3.0" }, "bin": { "lezer-generator": "src/lezer-generator.cjs" } }, "sha512-/SF4EDWowPqV1jOgoGSGTIFsE7Ezdr7ZYxyihl5eMKVO5tlnpIhFcDavgm1hHY5GEonoOAEnJ0CU0x+tvuAuUg=="],
|
||||||
|
|
||||||
"@lezer/highlight": ["@lezer/highlight@1.2.1", "", { "dependencies": { "@lezer/common": "^1.0.0" } }, "sha512-Z5duk4RN/3zuVO7Jq0pGLJ3qynpxUVsh7IbUbGj88+uV2ApSAn6kWg2au3iJb+0Zi7kKtqffIESgNcRXWZWmSA=="],
|
"@lezer/highlight": ["@lezer/highlight@1.2.3", "", { "dependencies": { "@lezer/common": "^1.3.0" } }, "sha512-qXdH7UqTvGfdVBINrgKhDsVTJTxactNNxLk7+UMwZhU13lMHaOBlJe9Vqp907ya56Y3+ed2tlqzys7jDkTmW0g=="],
|
||||||
|
|
||||||
"@lezer/lr": ["@lezer/lr@1.4.2", "", { "dependencies": { "@lezer/common": "^1.0.0" } }, "sha512-pu0K1jCIdnQ12aWNaAVU5bzi7Bd1w54J3ECgANPmYLtQKP0HBj2cE/5coBD66MT10xbtIuUr7tg0Shbsvk0mDA=="],
|
"@lezer/lr": ["@lezer/lr@1.4.3", "", { "dependencies": { "@lezer/common": "^1.0.0" } }, "sha512-yenN5SqAxAPv/qMnpWW0AT7l+SxVrgG+u0tNsRQWqbrz66HIl8DnEbBObvy21J5K7+I1v7gsAnlE2VQ5yYVSeA=="],
|
||||||
|
|
||||||
"@marijn/find-cluster-break": ["@marijn/find-cluster-break@1.0.2", "", {}, "sha512-l0h88YhZFyKdXIFNfSWpyjStDjGHwZ/U7iobcK1cQQD8sejsONdQtTVU+1wVN1PBw40PiiHB1vA5S7VTfQiP9g=="],
|
"@marijn/find-cluster-break": ["@marijn/find-cluster-break@1.0.2", "", {}, "sha512-l0h88YhZFyKdXIFNfSWpyjStDjGHwZ/U7iobcK1cQQD8sejsONdQtTVU+1wVN1PBw40PiiHB1vA5S7VTfQiP9g=="],
|
||||||
|
|
||||||
"@types/bun": ["@types/bun@1.2.22", "", { "dependencies": { "bun-types": "1.2.22" } }, "sha512-5A/KrKos2ZcN0c6ljRSOa1fYIyCKhZfIVYeuyb4snnvomnpFqC0tTsEkdqNxbAgExV384OETQ//WAjl3XbYqQA=="],
|
"@types/bun": ["@types/bun@1.3.2", "", { "dependencies": { "bun-types": "1.3.2" } }, "sha512-t15P7k5UIgHKkxwnMNkJbWlh/617rkDGEdSsDbu+qNHTaz9SKf7aC8fiIlUdD5RPpH6GEkP0cK7WlvmrEBRtWg=="],
|
||||||
|
|
||||||
"@types/node": ["@types/node@24.5.2", "", { "dependencies": { "undici-types": "~7.12.0" } }, "sha512-FYxk1I7wPv3K2XBaoyH2cTnocQEu8AOZ60hPbsyukMPLv5/5qr7V1i8PLHdl6Zf87I+xZXFvPCXYjiTFq+YSDQ=="],
|
"@types/node": ["@types/node@24.10.0", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-qzQZRBqkFsYyaSWXuEHc2WR9c0a0CXwiE5FWUvn7ZM+vdy1uZLfCunD38UzhuB7YN/J11ndbDBcTmOdxJo9Q7A=="],
|
||||||
|
|
||||||
"@types/react": ["@types/react@19.1.13", "", { "dependencies": { "csstype": "^3.0.2" } }, "sha512-hHkbU/eoO3EG5/MZkuFSKmYqPbSVk5byPFa3e7y/8TybHiLMACgI8seVYlicwk7H5K/rI2px9xrQp/C+AUDTiQ=="],
|
"@types/react": ["@types/react@19.2.2", "", { "dependencies": { "csstype": "^3.0.2" } }, "sha512-6mDvHUFSjyT2B2yeNx2nUgMxh9LtOWvkhIU3uePn2I2oyNymUAX1NIsdgviM4CH+JSrp2D2hsMvJOkxY+0wNRA=="],
|
||||||
|
|
||||||
"bun-plugin-tailwind": ["bun-plugin-tailwind@0.0.15", "", { "peerDependencies": { "typescript": "^5.0.0" } }, "sha512-qtAXMNGG4R0UGGI8zWrqm2B7BdXqx48vunJXBPzfDOHPA5WkRUZdTSbE7TFwO4jLhYqSE23YMWsM9NhE6ovobw=="],
|
"bun-plugin-tailwind": ["bun-plugin-tailwind@0.0.15", "", { "peerDependencies": { "typescript": "^5.0.0" } }, "sha512-qtAXMNGG4R0UGGI8zWrqm2B7BdXqx48vunJXBPzfDOHPA5WkRUZdTSbE7TFwO4jLhYqSE23YMWsM9NhE6ovobw=="],
|
||||||
|
|
||||||
"bun-types": ["bun-types@1.2.22", "", { "dependencies": { "@types/node": "*" }, "peerDependencies": { "@types/react": "^19" } }, "sha512-hwaAu8tct/Zn6Zft4U9BsZcXkYomzpHJX28ofvx7k0Zz2HNz54n1n+tDgxoWFGB4PcFvJXJQloPhaV2eP3Q6EA=="],
|
"bun-types": ["bun-types@1.3.2", "", { "dependencies": { "@types/node": "*" }, "peerDependencies": { "@types/react": "^19" } }, "sha512-i/Gln4tbzKNuxP70OWhJRZz1MRfvqExowP7U6JKoI8cntFrtxg7RJK3jvz7wQW54UuvNC8tbKHHri5fy74FVqg=="],
|
||||||
|
|
||||||
"codemirror": ["codemirror@6.0.2", "", { "dependencies": { "@codemirror/autocomplete": "^6.0.0", "@codemirror/commands": "^6.0.0", "@codemirror/language": "^6.0.0", "@codemirror/lint": "^6.0.0", "@codemirror/search": "^6.0.0", "@codemirror/state": "^6.0.0", "@codemirror/view": "^6.0.0" } }, "sha512-VhydHotNW5w1UGK0Qj96BwSk/Zqbp9WbnyK2W/eVMv4QyF41INRGpjUhFJY7/uDNuudSc33a/PKr4iDqRduvHw=="],
|
"codemirror": ["codemirror@6.0.2", "", { "dependencies": { "@codemirror/autocomplete": "^6.0.0", "@codemirror/commands": "^6.0.0", "@codemirror/language": "^6.0.0", "@codemirror/lint": "^6.0.0", "@codemirror/search": "^6.0.0", "@codemirror/state": "^6.0.0", "@codemirror/view": "^6.0.0" } }, "sha512-VhydHotNW5w1UGK0Qj96BwSk/Zqbp9WbnyK2W/eVMv4QyF41INRGpjUhFJY7/uDNuudSc33a/PKr4iDqRduvHw=="],
|
||||||
|
|
||||||
|
|
@ -60,17 +62,21 @@
|
||||||
|
|
||||||
"csstype": ["csstype@3.1.3", "", {}, "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw=="],
|
"csstype": ["csstype@3.1.3", "", {}, "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw=="],
|
||||||
|
|
||||||
"hono": ["hono@4.9.8", "", {}, "sha512-JW8Bb4RFWD9iOKxg5PbUarBYGM99IcxFl2FPBo2gSJO11jjUDqlP1Bmfyqt8Z/dGhIQ63PMA9LdcLefXyIasyg=="],
|
"diff": ["diff@8.0.2", "", {}, "sha512-sSuxWU5j5SR9QQji/o2qMvqRNYRDOcBTgsJ/DeCf4iSN4gW+gNMXM7wFIP+fdXZxoNiAnHUTGjCr+TSWXdRDKg=="],
|
||||||
|
|
||||||
"reefvm": ["reefvm@git+https://git.nose.space/defunkt/reefvm#c69b172c78853756ec8acba5bc33d93eb6a571c6", { "peerDependencies": { "typescript": "^5" } }, "c69b172c78853756ec8acba5bc33d93eb6a571c6"],
|
"hono": ["hono@4.10.4", "", {}, "sha512-YG/fo7zlU3KwrBL5vDpWKisLYiM+nVstBQqfr7gCPbSYURnNEP9BDxEMz8KfsDR9JX0lJWDRNc6nXX31v7ZEyg=="],
|
||||||
|
|
||||||
"style-mod": ["style-mod@4.1.2", "", {}, "sha512-wnD1HyVqpJUI2+eKZ+eo1UwghftP6yuFheBqqe+bWCotBjC2K1YnteJILRMs3SM4V/0dLEW1SC27MWP5y+mwmw=="],
|
"kleur": ["kleur@4.1.5", "", {}, "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ=="],
|
||||||
|
|
||||||
"tailwindcss": ["tailwindcss@4.1.13", "", {}, "sha512-i+zidfmTqtwquj4hMEwdjshYYgMbOrPzb9a0M3ZgNa0JMoZeFC6bxZvO8yr8ozS6ix2SDz0+mvryPeBs2TFE+w=="],
|
"reefvm": ["reefvm@git+https://git.nose.space/defunkt/reefvm#3e2e68b31f504347225a4d705c7568a0957d629e", { "peerDependencies": { "typescript": "^5" } }, "3e2e68b31f504347225a4d705c7568a0957d629e"],
|
||||||
|
|
||||||
"typescript": ["typescript@5.9.2", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A=="],
|
"style-mod": ["style-mod@4.1.3", "", {}, "sha512-i/n8VsZydrugj3Iuzll8+x/00GH2vnYsk1eomD8QiRrSAeW6ItbCQDtfXCeJHd0iwiNagqjQkvpvREEPtW3IoQ=="],
|
||||||
|
|
||||||
"undici-types": ["undici-types@7.12.0", "", {}, "sha512-goOacqME2GYyOZZfb5Lgtu+1IDmAlAEu5xnD3+xTzS10hT0vzpf0SPjkXwAw9Jm+4n/mQGDP3LO8CPbYROeBfQ=="],
|
"tailwindcss": ["tailwindcss@4.1.17", "", {}, "sha512-j9Ee2YjuQqYT9bbRTfTZht9W/ytp5H+jJpZKiYdP/bpnXARAuELt9ofP0lPnmHjbga7SNQIxdTAXCmtKVYjN+Q=="],
|
||||||
|
|
||||||
|
"typescript": ["typescript@5.9.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw=="],
|
||||||
|
|
||||||
|
"undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="],
|
||||||
|
|
||||||
"w3c-keyname": ["w3c-keyname@2.2.8", "", {}, "sha512-dpojBhNsCNN7T82Tm7k26A6G9ML3NkhDsnw9n/eoxSRlVBB4CEtIQ/KTCLI2Fwf3ataSXRhYFkQi3SlnFwPvPQ=="],
|
"w3c-keyname": ["w3c-keyname@2.2.8", "", {}, "sha512-dpojBhNsCNN7T82Tm7k26A6G9ML3NkhDsnw9n/eoxSRlVBB4CEtIQ/KTCLI2Fwf3ataSXRhYFkQi3SlnFwPvPQ=="],
|
||||||
}
|
}
|
||||||
|
|
|
||||||
18
examples/d20.sh
Normal file
18
examples/d20.sh
Normal file
|
|
@ -0,0 +1,18 @@
|
||||||
|
#!/usr/bin/env shrimp
|
||||||
|
# usage: dice <sides>
|
||||||
|
|
||||||
|
import math only=random
|
||||||
|
import list only=first
|
||||||
|
import str only=[replace starts-with?]
|
||||||
|
|
||||||
|
sides = $.args | first
|
||||||
|
sides ??= 20
|
||||||
|
|
||||||
|
if sides | starts-with? d:
|
||||||
|
sides = replace sides //\D// ''
|
||||||
|
end
|
||||||
|
|
||||||
|
sides = number sides
|
||||||
|
|
||||||
|
echo 'Rolling d$sides...'
|
||||||
|
random 1 sides | echo
|
||||||
1
examples/find.shrimp
Normal file
1
examples/find.shrimp
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
echo
|
||||||
31
examples/license.sh
Normal file
31
examples/license.sh
Normal file
|
|
@ -0,0 +1,31 @@
|
||||||
|
#!/usr/bin/env shrimp
|
||||||
|
|
||||||
|
year = date.now | date.year
|
||||||
|
project = fs.pwd | fs.basename | str.titlecase
|
||||||
|
|
||||||
|
{
|
||||||
|
|
||||||
|
Copyright $year $project Authors
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the “Software”), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||||
|
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||||
|
IN THE SOFTWARE.
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
| str.trim
|
||||||
|
| echo
|
||||||
39
examples/password.sh
Normal file
39
examples/password.sh
Normal file
|
|
@ -0,0 +1,39 @@
|
||||||
|
#!/usr/bin/env shrimp
|
||||||
|
# usage: password <length> [!spaced] [!symbols]
|
||||||
|
|
||||||
|
if ($.args | list.contains? -h):
|
||||||
|
echo 'usage: password <length> [!spaced] [!symbols]'
|
||||||
|
exit
|
||||||
|
end
|
||||||
|
|
||||||
|
password = do n=22 symbols=true spaced=true:
|
||||||
|
chars = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'
|
||||||
|
if symbols: chars += '!@#%^&*-=()[]<>' end
|
||||||
|
|
||||||
|
out = []
|
||||||
|
i = 0
|
||||||
|
max = length chars
|
||||||
|
|
||||||
|
while i < n:
|
||||||
|
idx = math.floor ((math.random) * max)
|
||||||
|
ch = chars | at idx
|
||||||
|
list.push out ch
|
||||||
|
i += 1
|
||||||
|
end
|
||||||
|
|
||||||
|
if spaced:
|
||||||
|
pos1 = math.floor((n - 2) / 3)
|
||||||
|
pos2 = math.floor((n - 2) * 2 / 3)
|
||||||
|
|
||||||
|
list.insert out pos2 ' '
|
||||||
|
list.insert out pos1 ' '
|
||||||
|
end
|
||||||
|
|
||||||
|
str.join out ''
|
||||||
|
end
|
||||||
|
|
||||||
|
missing-arg? = do x: $.args | list.contains? x | not end
|
||||||
|
|
||||||
|
num = $.args | list.reject (do x: x | str.starts-with? ! end) | list.first
|
||||||
|
|
||||||
|
password num symbols=(missing-arg? !symbols) spaced=(missing-arg? !spaced) | echo
|
||||||
9
examples/scripts.sh
Normal file
9
examples/scripts.sh
Normal file
|
|
@ -0,0 +1,9 @@
|
||||||
|
#!/usr/bin/env shrimp
|
||||||
|
|
||||||
|
if not fs.exists? 'package.json':
|
||||||
|
echo '🦐 package.json not found'
|
||||||
|
exit 1
|
||||||
|
end
|
||||||
|
|
||||||
|
package = fs.read 'package.json' | json.decode
|
||||||
|
package.scripts | dict.keys | list.sort | each do x: echo x end
|
||||||
19
package.json
19
package.json
|
|
@ -1,13 +1,16 @@
|
||||||
{
|
{
|
||||||
"name": "bun-react-template",
|
"name": "shrimp",
|
||||||
"version": "0.1.0",
|
"version": "0.1.0",
|
||||||
|
"exports": "./src/index.ts",
|
||||||
"private": true,
|
"private": true,
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"dev": "bun generate-parser && bun --hot src/server/server.tsx",
|
"dev": "bun --hot src/server/server.tsx",
|
||||||
"generate-parser": "lezer-generator src/parser/shrimp.grammar --typeScript -o src/parser/shrimp.ts",
|
"repl": "bun bin/repl",
|
||||||
"repl": "bun generate-parser && bun bin/repl",
|
"update-reef": "rm -rf ~/.bun/install/cache/ && rm bun.lock && bun update reefvm",
|
||||||
"update-reef": "rm -rf ~/.bun/install/cache/ && bun update reefvm"
|
"cli:install": "ln -s \"$(pwd)/bin/shrimp\" ~/.bun/bin/shrimp",
|
||||||
|
"cli:remove": "rm ~/.bun/bin/shrimp",
|
||||||
|
"check": "bunx tsc --noEmit"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@codemirror/view": "^6.38.3",
|
"@codemirror/view": "^6.38.3",
|
||||||
|
|
@ -21,11 +24,13 @@
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@lezer/highlight": "^1.2.1",
|
"@lezer/highlight": "^1.2.1",
|
||||||
"@lezer/lr": "^1.4.2",
|
"@lezer/lr": "^1.4.2",
|
||||||
"@types/bun": "latest"
|
"@types/bun": "latest",
|
||||||
|
"diff": "^8.0.2",
|
||||||
|
"kleur": "^4.1.5"
|
||||||
},
|
},
|
||||||
"prettier": {
|
"prettier": {
|
||||||
"semi": false,
|
"semi": false,
|
||||||
"singleQuote": true,
|
"singleQuote": true,
|
||||||
"printWidth": 100
|
"printWidth": 100
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -1,14 +1,14 @@
|
||||||
import { CompilerError } from '#compiler/compilerError.ts'
|
import { CompilerError } from '#compiler/compilerError.ts'
|
||||||
import { parser } from '#parser/shrimp.ts'
|
import { parse, setGlobals } from '#parser/parser2'
|
||||||
import * as terms from '#parser/shrimp.terms'
|
import { SyntaxNode, Tree } from '#parser/node'
|
||||||
import { setGlobals } from '#parser/tokenizer'
|
import { tokenizeCurlyString } from '#parser/curlyTokenizer'
|
||||||
import type { SyntaxNode, Tree } from '@lezer/common'
|
|
||||||
import { assert, errorMessage } from '#utils/utils'
|
import { assert, errorMessage } from '#utils/utils'
|
||||||
import { toBytecode, type Bytecode, type ProgramItem, bytecodeToString } from 'reefvm'
|
import { toBytecode, type Bytecode, type ProgramItem, bytecodeToString } from 'reefvm'
|
||||||
import {
|
import {
|
||||||
checkTreeForErrors,
|
checkTreeForErrors,
|
||||||
getAllChildren,
|
getAllChildren,
|
||||||
getAssignmentParts,
|
getAssignmentParts,
|
||||||
|
getCompoundAssignmentParts,
|
||||||
getBinaryParts,
|
getBinaryParts,
|
||||||
getDotGetParts,
|
getDotGetParts,
|
||||||
getFunctionCallParts,
|
getFunctionCallParts,
|
||||||
|
|
@ -17,6 +17,7 @@ import {
|
||||||
getNamedArgParts,
|
getNamedArgParts,
|
||||||
getPipeExprParts,
|
getPipeExprParts,
|
||||||
getStringParts,
|
getStringParts,
|
||||||
|
getTryExprParts,
|
||||||
} from '#compiler/utils'
|
} from '#compiler/utils'
|
||||||
|
|
||||||
const DEBUG = false
|
const DEBUG = false
|
||||||
|
|
@ -49,15 +50,19 @@ function processEscapeSeq(escapeSeq: string): string {
|
||||||
|
|
||||||
export class Compiler {
|
export class Compiler {
|
||||||
instructions: ProgramItem[] = []
|
instructions: ProgramItem[] = []
|
||||||
|
labelCount = 0
|
||||||
fnLabelCount = 0
|
fnLabelCount = 0
|
||||||
ifLabelCount = 0
|
ifLabelCount = 0
|
||||||
|
tryLabelCount = 0
|
||||||
|
loopLabelCount = 0
|
||||||
bytecode: Bytecode
|
bytecode: Bytecode
|
||||||
pipeCounter = 0
|
pipeCounter = 0
|
||||||
|
|
||||||
constructor(public input: string, globals?: string[]) {
|
constructor(public input: string, globals?: string[] | Record<string, any>) {
|
||||||
try {
|
try {
|
||||||
if (globals) setGlobals(globals)
|
if (globals) setGlobals(Array.isArray(globals) ? globals : Object.keys(globals))
|
||||||
const cst = parser.parse(input)
|
const ast = parse(input)
|
||||||
|
const cst = new Tree(ast)
|
||||||
const errors = checkTreeForErrors(cst)
|
const errors = checkTreeForErrors(cst)
|
||||||
|
|
||||||
const firstError = errors[0]
|
const firstError = errors[0]
|
||||||
|
|
@ -71,6 +76,7 @@ export class Compiler {
|
||||||
if (DEBUG) {
|
if (DEBUG) {
|
||||||
const bytecodeString = bytecodeToString(this.bytecode)
|
const bytecodeString = bytecodeToString(this.bytecode)
|
||||||
console.log(`\n🤖 bytecode:\n----------------\n${bytecodeString}\n\n`)
|
console.log(`\n🤖 bytecode:\n----------------\n${bytecodeString}\n\n`)
|
||||||
|
console.log(`\n🤖 bytecode:\n----------------\n${this.instructions}\n\n`)
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
if (error instanceof CompilerError) {
|
if (error instanceof CompilerError) {
|
||||||
|
|
@ -82,7 +88,7 @@ export class Compiler {
|
||||||
}
|
}
|
||||||
|
|
||||||
#compileCst(cst: Tree, input: string) {
|
#compileCst(cst: Tree, input: string) {
|
||||||
const isProgram = cst.topNode.type.id === terms.Program
|
const isProgram = cst.topNode.type.is('Program')
|
||||||
assert(isProgram, `Expected Program node, got ${cst.topNode.type.name}`)
|
assert(isProgram, `Expected Program node, got ${cst.topNode.type.name}`)
|
||||||
|
|
||||||
let child = cst.topNode.firstChild
|
let child = cst.topNode.firstChild
|
||||||
|
|
@ -98,15 +104,28 @@ export class Compiler {
|
||||||
const value = input.slice(node.from, node.to)
|
const value = input.slice(node.from, node.to)
|
||||||
if (DEBUG) console.log(`🫦 ${node.name}: ${value}`)
|
if (DEBUG) console.log(`🫦 ${node.name}: ${value}`)
|
||||||
|
|
||||||
switch (node.type.id) {
|
switch (node.type.name) {
|
||||||
case terms.Number:
|
case 'Number':
|
||||||
const number = Number(value)
|
// Handle sign prefix for hex, binary, and octal literals
|
||||||
if (Number.isNaN(number))
|
// Number() doesn't parse '-0xFF', '+0xFF', '-0o77', etc. correctly
|
||||||
|
let numberValue: number
|
||||||
|
if (value.startsWith('-') && (value.includes('0x') || value.includes('0b') || value.includes('0o'))) {
|
||||||
|
numberValue = -Number(value.slice(1))
|
||||||
|
} else if (value.startsWith('+') && (value.includes('0x') || value.includes('0b') || value.includes('0o'))) {
|
||||||
|
numberValue = Number(value.slice(1))
|
||||||
|
} else {
|
||||||
|
numberValue = Number(value)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Number.isNaN(numberValue))
|
||||||
throw new CompilerError(`Invalid number literal: ${value}`, node.from, node.to)
|
throw new CompilerError(`Invalid number literal: ${value}`, node.from, node.to)
|
||||||
|
|
||||||
return [[`PUSH`, number]]
|
return [[`PUSH`, numberValue]]
|
||||||
|
|
||||||
|
case 'String': {
|
||||||
|
if (node.firstChild?.type.is('CurlyString'))
|
||||||
|
return this.#compileCurlyString(value, input)
|
||||||
|
|
||||||
case terms.String: {
|
|
||||||
const { parts, hasInterpolation } = getStringParts(node, input)
|
const { parts, hasInterpolation } = getStringParts(node, input)
|
||||||
|
|
||||||
// Simple string without interpolation or escapes - extract text directly
|
// Simple string without interpolation or escapes - extract text directly
|
||||||
|
|
@ -121,19 +140,19 @@ export class Compiler {
|
||||||
parts.forEach((part) => {
|
parts.forEach((part) => {
|
||||||
const partValue = input.slice(part.from, part.to)
|
const partValue = input.slice(part.from, part.to)
|
||||||
|
|
||||||
switch (part.type.id) {
|
switch (part.type.name) {
|
||||||
case terms.StringFragment:
|
case 'StringFragment':
|
||||||
// Plain text fragment - just push as-is
|
// Plain text fragment - just push as-is
|
||||||
instructions.push(['PUSH', partValue])
|
instructions.push(['PUSH', partValue])
|
||||||
break
|
break
|
||||||
|
|
||||||
case terms.EscapeSeq:
|
case 'EscapeSeq':
|
||||||
// Process escape sequence and push the result
|
// Process escape sequence and push the result
|
||||||
const processed = processEscapeSeq(partValue)
|
const processed = processEscapeSeq(partValue)
|
||||||
instructions.push(['PUSH', processed])
|
instructions.push(['PUSH', processed])
|
||||||
break
|
break
|
||||||
|
|
||||||
case terms.Interpolation:
|
case 'Interpolation':
|
||||||
// Interpolation contains either Identifier or ParenExpr (the $ is anonymous)
|
// Interpolation contains either Identifier or ParenExpr (the $ is anonymous)
|
||||||
const child = part.firstChild
|
const child = part.firstChild
|
||||||
if (!child) {
|
if (!child) {
|
||||||
|
|
@ -157,15 +176,15 @@ export class Compiler {
|
||||||
return instructions
|
return instructions
|
||||||
}
|
}
|
||||||
|
|
||||||
case terms.Boolean: {
|
case 'Boolean': {
|
||||||
return [[`PUSH`, value === 'true']]
|
return [[`PUSH`, value === 'true']]
|
||||||
}
|
}
|
||||||
|
|
||||||
case terms.Null: {
|
case 'Null': {
|
||||||
return [[`PUSH`, null]]
|
return [[`PUSH`, null]]
|
||||||
}
|
}
|
||||||
|
|
||||||
case terms.Regex: {
|
case 'Regex': {
|
||||||
// remove the surrounding slashes and any flags
|
// remove the surrounding slashes and any flags
|
||||||
const [_, pattern, flags] = value.match(/^\/\/(.*)\/\/([gimsuy]*)$/) || []
|
const [_, pattern, flags] = value.match(/^\/\/(.*)\/\/([gimsuy]*)$/) || []
|
||||||
if (!pattern) {
|
if (!pattern) {
|
||||||
|
|
@ -182,29 +201,50 @@ export class Compiler {
|
||||||
return [['PUSH', regex]]
|
return [['PUSH', regex]]
|
||||||
}
|
}
|
||||||
|
|
||||||
case terms.Identifier: {
|
case 'Identifier': {
|
||||||
return [[`TRY_LOAD`, value]]
|
return [[`TRY_LOAD`, value]]
|
||||||
}
|
}
|
||||||
|
|
||||||
case terms.Word: {
|
case 'Word': {
|
||||||
return [['PUSH', value]]
|
return [['PUSH', value]]
|
||||||
}
|
}
|
||||||
|
|
||||||
case terms.DotGet: {
|
case 'DotGet': {
|
||||||
|
// DotGet is parsed into a nested tree because it's hard to parse it into a flat one.
|
||||||
|
// However, we want a flat tree - so we're going to pretend like we are getting one from the parser.
|
||||||
|
//
|
||||||
|
// This: DotGet(config, DotGet(script, name))
|
||||||
|
// Becomes: DotGet(config, script, name)
|
||||||
const { objectName, property } = getDotGetParts(node, input)
|
const { objectName, property } = getDotGetParts(node, input)
|
||||||
const instructions: ProgramItem[] = []
|
const instructions: ProgramItem[] = []
|
||||||
|
|
||||||
instructions.push(['TRY_LOAD', objectName])
|
instructions.push(['TRY_LOAD', objectName])
|
||||||
if (property.type.id === terms.ParenExpr) {
|
|
||||||
instructions.push(...this.#compileNode(property, input))
|
const flattenProperty = (prop: SyntaxNode): void => {
|
||||||
} else {
|
if (prop.type.is('DotGet')) {
|
||||||
const propertyValue = input.slice(property.from, property.to)
|
const nestedParts = getDotGetParts(prop, input)
|
||||||
instructions.push(['PUSH', propertyValue])
|
|
||||||
|
const nestedObjectValue = input.slice(nestedParts.object.from, nestedParts.object.to)
|
||||||
|
instructions.push(['PUSH', nestedObjectValue])
|
||||||
|
instructions.push(['DOT_GET'])
|
||||||
|
|
||||||
|
flattenProperty(nestedParts.property)
|
||||||
|
} else {
|
||||||
|
if (prop.type.is('ParenExpr')) {
|
||||||
|
instructions.push(...this.#compileNode(prop, input))
|
||||||
|
} else {
|
||||||
|
const propertyValue = input.slice(prop.from, prop.to)
|
||||||
|
instructions.push(['PUSH', propertyValue])
|
||||||
|
}
|
||||||
|
instructions.push(['DOT_GET'])
|
||||||
|
}
|
||||||
}
|
}
|
||||||
instructions.push(['DOT_GET'])
|
|
||||||
|
flattenProperty(property)
|
||||||
return instructions
|
return instructions
|
||||||
}
|
}
|
||||||
|
|
||||||
case terms.BinOp: {
|
case 'BinOp': {
|
||||||
const { left, op, right } = getBinaryParts(node)
|
const { left, op, right } = getBinaryParts(node)
|
||||||
const instructions: ProgramItem[] = []
|
const instructions: ProgramItem[] = []
|
||||||
instructions.push(...this.#compileNode(left, input))
|
instructions.push(...this.#compileNode(left, input))
|
||||||
|
|
@ -227,6 +267,24 @@ export class Compiler {
|
||||||
case '%':
|
case '%':
|
||||||
instructions.push(['MOD'])
|
instructions.push(['MOD'])
|
||||||
break
|
break
|
||||||
|
case 'band':
|
||||||
|
instructions.push(['BIT_AND'])
|
||||||
|
break
|
||||||
|
case 'bor':
|
||||||
|
instructions.push(['BIT_OR'])
|
||||||
|
break
|
||||||
|
case 'bxor':
|
||||||
|
instructions.push(['BIT_XOR'])
|
||||||
|
break
|
||||||
|
case '<<':
|
||||||
|
instructions.push(['BIT_SHL'])
|
||||||
|
break
|
||||||
|
case '>>':
|
||||||
|
instructions.push(['BIT_SHR'])
|
||||||
|
break
|
||||||
|
case '>>>':
|
||||||
|
instructions.push(['BIT_USHR'])
|
||||||
|
break
|
||||||
default:
|
default:
|
||||||
throw new CompilerError(`Unsupported binary operator: ${opValue}`, op.from, op.to)
|
throw new CompilerError(`Unsupported binary operator: ${opValue}`, op.from, op.to)
|
||||||
}
|
}
|
||||||
|
|
@ -234,7 +292,7 @@ export class Compiler {
|
||||||
return instructions
|
return instructions
|
||||||
}
|
}
|
||||||
|
|
||||||
case terms.Assign: {
|
case 'Assign': {
|
||||||
const assignParts = getAssignmentParts(node)
|
const assignParts = getAssignmentParts(node)
|
||||||
const instructions: ProgramItem[] = []
|
const instructions: ProgramItem[] = []
|
||||||
|
|
||||||
|
|
@ -265,15 +323,78 @@ export class Compiler {
|
||||||
return instructions
|
return instructions
|
||||||
}
|
}
|
||||||
|
|
||||||
case terms.ParenExpr: {
|
case 'CompoundAssign': {
|
||||||
|
const { identifier, operator, right } = getCompoundAssignmentParts(node)
|
||||||
|
const identifierName = input.slice(identifier.from, identifier.to)
|
||||||
|
const instructions: ProgramItem[] = []
|
||||||
|
const opValue = input.slice(operator.from, operator.to)
|
||||||
|
|
||||||
|
// Special handling for ??= since it needs conditional evaluation
|
||||||
|
if (opValue === '??=') {
|
||||||
|
instructions.push(['LOAD', identifierName])
|
||||||
|
|
||||||
|
const skipLabel: Label = `.skip_${this.labelCount++}`
|
||||||
|
const rightInstructions = this.#compileNode(right, input)
|
||||||
|
|
||||||
|
instructions.push(['DUP'])
|
||||||
|
instructions.push(['PUSH', null])
|
||||||
|
instructions.push(['NEQ'])
|
||||||
|
instructions.push(['JUMP_IF_TRUE', skipLabel])
|
||||||
|
instructions.push(['POP'])
|
||||||
|
instructions.push(...rightInstructions)
|
||||||
|
|
||||||
|
instructions.push([`${skipLabel}:`])
|
||||||
|
instructions.push(['DUP'])
|
||||||
|
instructions.push(['STORE', identifierName])
|
||||||
|
|
||||||
|
return instructions
|
||||||
|
}
|
||||||
|
|
||||||
|
// Standard compound assignments: evaluate both sides, then operate
|
||||||
|
instructions.push(['LOAD', identifierName]) // will throw if undefined
|
||||||
|
instructions.push(...this.#compileNode(right, input))
|
||||||
|
|
||||||
|
switch (opValue) {
|
||||||
|
case '+=':
|
||||||
|
instructions.push(['ADD'])
|
||||||
|
break
|
||||||
|
case '-=':
|
||||||
|
instructions.push(['SUB'])
|
||||||
|
break
|
||||||
|
case '*=':
|
||||||
|
instructions.push(['MUL'])
|
||||||
|
break
|
||||||
|
case '/=':
|
||||||
|
instructions.push(['DIV'])
|
||||||
|
break
|
||||||
|
case '%=':
|
||||||
|
instructions.push(['MOD'])
|
||||||
|
break
|
||||||
|
default:
|
||||||
|
throw new CompilerError(
|
||||||
|
`Unknown compound operator: ${opValue}`,
|
||||||
|
operator.from,
|
||||||
|
operator.to
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// DUP and store (same as regular assignment)
|
||||||
|
instructions.push(['DUP'])
|
||||||
|
instructions.push(['STORE', identifierName])
|
||||||
|
|
||||||
|
return instructions
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'ParenExpr': {
|
||||||
const child = node.firstChild
|
const child = node.firstChild
|
||||||
if (!child) return [] // I guess it is empty parentheses?
|
if (!child) return [] // I guess it is empty parentheses?
|
||||||
|
|
||||||
return this.#compileNode(child, input)
|
return this.#compileNode(child, input)
|
||||||
}
|
}
|
||||||
|
|
||||||
case terms.FunctionDef: {
|
case 'FunctionDef': {
|
||||||
const { paramNames, bodyNodes } = getFunctionDefParts(node, input)
|
const { paramNames, bodyNodes, catchVariable, catchBody, finallyBody } =
|
||||||
|
getFunctionDefParts(node, input)
|
||||||
const instructions: ProgramItem[] = []
|
const instructions: ProgramItem[] = []
|
||||||
const functionLabel: Label = `.func_${this.fnLabelCount++}`
|
const functionLabel: Label = `.func_${this.fnLabelCount++}`
|
||||||
const afterLabel: Label = `.after_${functionLabel}`
|
const afterLabel: Label = `.after_${functionLabel}`
|
||||||
|
|
@ -281,9 +402,33 @@ export class Compiler {
|
||||||
instructions.push(['JUMP', afterLabel])
|
instructions.push(['JUMP', afterLabel])
|
||||||
|
|
||||||
instructions.push([`${functionLabel}:`])
|
instructions.push([`${functionLabel}:`])
|
||||||
bodyNodes.forEach((bodyNode) => {
|
|
||||||
instructions.push(...this.#compileNode(bodyNode, input))
|
const compileFunctionBody = () => {
|
||||||
})
|
const bodyInstructions: ProgramItem[] = []
|
||||||
|
bodyNodes.forEach((bodyNode, index) => {
|
||||||
|
bodyInstructions.push(...this.#compileNode(bodyNode, input))
|
||||||
|
if (index < bodyNodes.length - 1) {
|
||||||
|
bodyInstructions.push(['POP'])
|
||||||
|
}
|
||||||
|
})
|
||||||
|
return bodyInstructions
|
||||||
|
}
|
||||||
|
|
||||||
|
if (catchVariable || finallyBody) {
|
||||||
|
// If function has catch or finally, wrap body in try/catch/finally
|
||||||
|
instructions.push(
|
||||||
|
...this.#compileTryCatchFinally(
|
||||||
|
compileFunctionBody,
|
||||||
|
catchVariable,
|
||||||
|
catchBody,
|
||||||
|
finallyBody,
|
||||||
|
input
|
||||||
|
)
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
instructions.push(...compileFunctionBody())
|
||||||
|
}
|
||||||
|
|
||||||
instructions.push(['RETURN'])
|
instructions.push(['RETURN'])
|
||||||
|
|
||||||
instructions.push([`${afterLabel}:`])
|
instructions.push([`${afterLabel}:`])
|
||||||
|
|
@ -293,9 +438,31 @@ export class Compiler {
|
||||||
return instructions
|
return instructions
|
||||||
}
|
}
|
||||||
|
|
||||||
case terms.FunctionCallOrIdentifier: {
|
case 'FunctionCallOrIdentifier': {
|
||||||
if (node.firstChild?.type.id === terms.DotGet) {
|
if (node.firstChild?.type.is('DotGet')) {
|
||||||
return this.#compileNode(node.firstChild, input)
|
const instructions: ProgramItem[] = []
|
||||||
|
const callLabel: Label = `.call_dotget_${++this.labelCount}`
|
||||||
|
const afterLabel: Label = `.after_dotget_${++this.labelCount}`
|
||||||
|
|
||||||
|
instructions.push(...this.#compileNode(node.firstChild, input))
|
||||||
|
instructions.push(['DUP'])
|
||||||
|
instructions.push(['TYPE'])
|
||||||
|
instructions.push(['PUSH', 'function'])
|
||||||
|
instructions.push(['EQ'])
|
||||||
|
instructions.push(['JUMP_IF_TRUE', callLabel])
|
||||||
|
instructions.push(['DUP'])
|
||||||
|
instructions.push(['TYPE'])
|
||||||
|
instructions.push(['PUSH', 'native'])
|
||||||
|
instructions.push(['EQ'])
|
||||||
|
instructions.push(['JUMP_IF_TRUE', callLabel])
|
||||||
|
instructions.push(['JUMP', afterLabel])
|
||||||
|
instructions.push([`${callLabel}:`])
|
||||||
|
instructions.push(['PUSH', 0])
|
||||||
|
instructions.push(['PUSH', 0])
|
||||||
|
instructions.push(['CALL'])
|
||||||
|
instructions.push([`${afterLabel}:`])
|
||||||
|
|
||||||
|
return instructions
|
||||||
}
|
}
|
||||||
|
|
||||||
return [['TRY_CALL', value]]
|
return [['TRY_CALL', value]]
|
||||||
|
|
@ -314,7 +481,8 @@ export class Compiler {
|
||||||
PUSH 1 ; Named count
|
PUSH 1 ; Named count
|
||||||
CALL
|
CALL
|
||||||
*/
|
*/
|
||||||
case terms.FunctionCall: {
|
|
||||||
|
case 'FunctionCall': {
|
||||||
const { identifierNode, namedArgs, positionalArgs } = getFunctionCallParts(node, input)
|
const { identifierNode, namedArgs, positionalArgs } = getFunctionCallParts(node, input)
|
||||||
const instructions: ProgramItem[] = []
|
const instructions: ProgramItem[] = []
|
||||||
instructions.push(...this.#compileNode(identifierNode, input))
|
instructions.push(...this.#compileNode(identifierNode, input))
|
||||||
|
|
@ -336,16 +504,101 @@ export class Compiler {
|
||||||
return instructions
|
return instructions
|
||||||
}
|
}
|
||||||
|
|
||||||
case terms.ThenBlock:
|
case 'Block': {
|
||||||
case terms.SingleLineThenBlock: {
|
const children = getAllChildren(node)
|
||||||
const instructions = getAllChildren(node)
|
const instructions: ProgramItem[] = []
|
||||||
.map((child) => this.#compileNode(child, input))
|
|
||||||
.flat()
|
children.forEach((child, index) => {
|
||||||
|
instructions.push(...this.#compileNode(child, input))
|
||||||
|
// keep only the last expression's value
|
||||||
|
if (index < children.length - 1) {
|
||||||
|
instructions.push(['POP'])
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
return instructions
|
return instructions
|
||||||
}
|
}
|
||||||
|
|
||||||
case terms.IfExpr: {
|
case 'FunctionCallWithBlock': {
|
||||||
|
const [fn, _colon, ...block] = getAllChildren(node)
|
||||||
|
let instructions: ProgramItem[] = []
|
||||||
|
|
||||||
|
const fnLabel: Label = `.func_${this.fnLabelCount++}`
|
||||||
|
const afterLabel: Label = `.after_${fnLabel}`
|
||||||
|
|
||||||
|
instructions.push(['JUMP', afterLabel])
|
||||||
|
instructions.push([`${fnLabel}:`])
|
||||||
|
instructions.push(
|
||||||
|
...block
|
||||||
|
.filter((x) => x.type.name !== 'keyword')
|
||||||
|
.map((x) => this.#compileNode(x!, input))
|
||||||
|
.flat()
|
||||||
|
)
|
||||||
|
instructions.push(['RETURN'])
|
||||||
|
instructions.push([`${afterLabel}:`])
|
||||||
|
|
||||||
|
if (fn?.type.is('FunctionCallOrIdentifier')) {
|
||||||
|
instructions.push(['LOAD', input.slice(fn!.from, fn!.to)])
|
||||||
|
instructions.push(['MAKE_FUNCTION', [], fnLabel])
|
||||||
|
instructions.push(['PUSH', 1])
|
||||||
|
instructions.push(['PUSH', 0])
|
||||||
|
instructions.push(['CALL'])
|
||||||
|
} else if (fn?.type.is('FunctionCall')) {
|
||||||
|
let body = this.#compileNode(fn!, input)
|
||||||
|
const namedArgCount = (body[body.length - 2]![1] as number) * 2
|
||||||
|
const startSlice = body.length - namedArgCount - 3
|
||||||
|
|
||||||
|
body = [
|
||||||
|
...body.slice(0, startSlice),
|
||||||
|
['MAKE_FUNCTION', [], fnLabel],
|
||||||
|
...body.slice(startSlice),
|
||||||
|
]
|
||||||
|
|
||||||
|
// @ts-ignore
|
||||||
|
body[body.length - 3]![1] += 1
|
||||||
|
instructions.push(...body)
|
||||||
|
} else {
|
||||||
|
throw new Error(
|
||||||
|
`FunctionCallWithBlock: Expected FunctionCallOrIdentifier or FunctionCall`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return instructions
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'TryExpr': {
|
||||||
|
const { tryBlock, catchVariable, catchBody, finallyBody } = getTryExprParts(node, input)
|
||||||
|
|
||||||
|
return this.#compileTryCatchFinally(
|
||||||
|
() => this.#compileNode(tryBlock, input),
|
||||||
|
catchVariable,
|
||||||
|
catchBody,
|
||||||
|
finallyBody,
|
||||||
|
input
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'Throw':
|
||||||
|
case 'Not': {
|
||||||
|
const keyword = node.type.is('Throw') ? 'Throw' : 'Not'
|
||||||
|
const children = getAllChildren(node)
|
||||||
|
const [_throwKeyword, expression] = children
|
||||||
|
if (!expression) {
|
||||||
|
throw new CompilerError(
|
||||||
|
`${keyword} expected expression, got ${children.length} children`,
|
||||||
|
node.from,
|
||||||
|
node.to
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const instructions: ProgramItem[] = []
|
||||||
|
instructions.push(...this.#compileNode(expression, input))
|
||||||
|
instructions.push([keyword.toUpperCase()]) // THROW or NOT
|
||||||
|
|
||||||
|
return instructions
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'IfExpr': {
|
||||||
const { conditionNode, thenBlock, elseIfBlocks, elseThenBlock } = getIfExprParts(
|
const { conditionNode, thenBlock, elseIfBlocks, elseThenBlock } = getIfExprParts(
|
||||||
node,
|
node,
|
||||||
input
|
input
|
||||||
|
|
@ -354,19 +607,24 @@ export class Compiler {
|
||||||
instructions.push(...this.#compileNode(conditionNode, input))
|
instructions.push(...this.#compileNode(conditionNode, input))
|
||||||
this.ifLabelCount++
|
this.ifLabelCount++
|
||||||
const endLabel: Label = `.end_${this.ifLabelCount}`
|
const endLabel: Label = `.end_${this.ifLabelCount}`
|
||||||
|
const elseLabel: Label = `.else_${this.ifLabelCount}`
|
||||||
|
|
||||||
const thenBlockInstructions = this.#compileNode(thenBlock, input)
|
const thenBlockInstructions = this.#compileNode(thenBlock, input)
|
||||||
instructions.push(['JUMP_IF_FALSE', thenBlockInstructions.length + 1])
|
instructions.push(['JUMP_IF_FALSE', elseLabel])
|
||||||
instructions.push(...thenBlockInstructions)
|
instructions.push(...thenBlockInstructions)
|
||||||
instructions.push(['JUMP', endLabel])
|
instructions.push(['JUMP', endLabel])
|
||||||
|
|
||||||
|
instructions.push([`${elseLabel}:`])
|
||||||
|
|
||||||
// Else if
|
// Else if
|
||||||
elseIfBlocks.forEach(({ conditional, thenBlock }) => {
|
elseIfBlocks.forEach(({ conditional, thenBlock }, index) => {
|
||||||
instructions.push(...this.#compileNode(conditional, input))
|
instructions.push(...this.#compileNode(conditional, input))
|
||||||
|
const nextLabel: Label = `.elsif_${this.ifLabelCount}_${index}`
|
||||||
const elseIfInstructions = this.#compileNode(thenBlock, input)
|
const elseIfInstructions = this.#compileNode(thenBlock, input)
|
||||||
instructions.push(['JUMP_IF_FALSE', elseIfInstructions.length + 1])
|
instructions.push(['JUMP_IF_FALSE', nextLabel])
|
||||||
instructions.push(...elseIfInstructions)
|
instructions.push(...elseIfInstructions)
|
||||||
instructions.push(['JUMP', endLabel])
|
instructions.push(['JUMP', endLabel])
|
||||||
|
instructions.push([`${nextLabel}:`])
|
||||||
})
|
})
|
||||||
|
|
||||||
// Else
|
// Else
|
||||||
|
|
@ -383,7 +641,7 @@ export class Compiler {
|
||||||
}
|
}
|
||||||
|
|
||||||
// - `EQ`, `NEQ`, `LT`, `GT`, `LTE`, `GTE` - Pop 2, push boolean
|
// - `EQ`, `NEQ`, `LT`, `GT`, `LTE`, `GTE` - Pop 2, push boolean
|
||||||
case terms.ConditionalOp: {
|
case 'ConditionalOp': {
|
||||||
const instructions: ProgramItem[] = []
|
const instructions: ProgramItem[] = []
|
||||||
const { left, op, right } = getBinaryParts(node)
|
const { left, op, right } = getBinaryParts(node)
|
||||||
const leftInstructions: ProgramItem[] = this.#compileNode(left, input)
|
const leftInstructions: ProgramItem[] = this.#compileNode(left, input)
|
||||||
|
|
@ -415,22 +673,41 @@ export class Compiler {
|
||||||
instructions.push(...leftInstructions, ...rightInstructions, ['GTE'])
|
instructions.push(...leftInstructions, ...rightInstructions, ['GTE'])
|
||||||
break
|
break
|
||||||
|
|
||||||
case 'and':
|
case 'and': {
|
||||||
|
const skipLabel: Label = `.skip_${this.labelCount++}`
|
||||||
instructions.push(...leftInstructions)
|
instructions.push(...leftInstructions)
|
||||||
instructions.push(['DUP'])
|
instructions.push(['DUP'])
|
||||||
instructions.push(['JUMP_IF_FALSE', rightInstructions.length + 1])
|
instructions.push(['JUMP_IF_FALSE', skipLabel])
|
||||||
instructions.push(['POP'])
|
instructions.push(['POP'])
|
||||||
instructions.push(...rightInstructions)
|
instructions.push(...rightInstructions)
|
||||||
|
instructions.push([`${skipLabel}:`])
|
||||||
break
|
break
|
||||||
|
}
|
||||||
|
|
||||||
case 'or':
|
case 'or': {
|
||||||
|
const skipLabel: Label = `.skip_${this.labelCount++}`
|
||||||
instructions.push(...leftInstructions)
|
instructions.push(...leftInstructions)
|
||||||
instructions.push(['DUP'])
|
instructions.push(['DUP'])
|
||||||
instructions.push(['JUMP_IF_TRUE', rightInstructions.length + 1])
|
instructions.push(['JUMP_IF_TRUE', skipLabel])
|
||||||
instructions.push(['POP'])
|
instructions.push(['POP'])
|
||||||
instructions.push(...rightInstructions)
|
instructions.push(...rightInstructions)
|
||||||
|
instructions.push([`${skipLabel}:`])
|
||||||
break
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
case '??': {
|
||||||
|
// Nullish coalescing: return left if not null, else right
|
||||||
|
const skipLabel: Label = `.skip_${this.labelCount++}`
|
||||||
|
instructions.push(...leftInstructions)
|
||||||
|
instructions.push(['DUP'])
|
||||||
|
instructions.push(['PUSH', null])
|
||||||
|
instructions.push(['NEQ'])
|
||||||
|
instructions.push(['JUMP_IF_TRUE', skipLabel])
|
||||||
|
instructions.push(['POP'])
|
||||||
|
instructions.push(...rightInstructions)
|
||||||
|
instructions.push([`${skipLabel}:`])
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
default:
|
default:
|
||||||
throw new CompilerError(`Unsupported conditional operator: ${opValue}`, op.from, op.to)
|
throw new CompilerError(`Unsupported conditional operator: ${opValue}`, op.from, op.to)
|
||||||
|
|
@ -439,7 +716,7 @@ export class Compiler {
|
||||||
return instructions
|
return instructions
|
||||||
}
|
}
|
||||||
|
|
||||||
case terms.PipeExpr: {
|
case 'PipeExpr': {
|
||||||
const { pipedFunctionCall, pipeReceivers } = getPipeExprParts(node)
|
const { pipedFunctionCall, pipeReceivers } = getPipeExprParts(node)
|
||||||
if (!pipedFunctionCall || pipeReceivers.length === 0) {
|
if (!pipedFunctionCall || pipeReceivers.length === 0) {
|
||||||
throw new CompilerError('PipeExpr must have at least two operands', node.from, node.to)
|
throw new CompilerError('PipeExpr must have at least two operands', node.from, node.to)
|
||||||
|
|
@ -461,11 +738,11 @@ export class Compiler {
|
||||||
instructions.push(...this.#compileNode(identifierNode, input))
|
instructions.push(...this.#compileNode(identifierNode, input))
|
||||||
|
|
||||||
const isUnderscoreInPositionalArgs = positionalArgs.some(
|
const isUnderscoreInPositionalArgs = positionalArgs.some(
|
||||||
(arg) => arg.type.id === terms.Underscore
|
(arg) => arg.type.is('Underscore')
|
||||||
)
|
)
|
||||||
const isUnderscoreInNamedArgs = namedArgs.some((arg) => {
|
const isUnderscoreInNamedArgs = namedArgs.some((arg) => {
|
||||||
const { valueNode } = getNamedArgParts(arg, input)
|
const { valueNode } = getNamedArgParts(arg, input)
|
||||||
return valueNode.type.id === terms.Underscore
|
return valueNode.type.is('Underscore')
|
||||||
})
|
})
|
||||||
|
|
||||||
const shouldPushPositionalArg = !isUnderscoreInPositionalArgs && !isUnderscoreInNamedArgs
|
const shouldPushPositionalArg = !isUnderscoreInPositionalArgs && !isUnderscoreInNamedArgs
|
||||||
|
|
@ -476,7 +753,7 @@ export class Compiler {
|
||||||
}
|
}
|
||||||
|
|
||||||
positionalArgs.forEach((arg) => {
|
positionalArgs.forEach((arg) => {
|
||||||
if (arg.type.id === terms.Underscore) {
|
if (arg.type.is('Underscore')) {
|
||||||
instructions.push(['LOAD', pipeValName])
|
instructions.push(['LOAD', pipeValName])
|
||||||
} else {
|
} else {
|
||||||
instructions.push(...this.#compileNode(arg, input))
|
instructions.push(...this.#compileNode(arg, input))
|
||||||
|
|
@ -486,7 +763,7 @@ export class Compiler {
|
||||||
namedArgs.forEach((arg) => {
|
namedArgs.forEach((arg) => {
|
||||||
const { name, valueNode } = getNamedArgParts(arg, input)
|
const { name, valueNode } = getNamedArgParts(arg, input)
|
||||||
instructions.push(['PUSH', name])
|
instructions.push(['PUSH', name])
|
||||||
if (valueNode.type.id === terms.Underscore) {
|
if (valueNode.type.is('Underscore')) {
|
||||||
instructions.push(['LOAD', pipeValName])
|
instructions.push(['LOAD', pipeValName])
|
||||||
} else {
|
} else {
|
||||||
instructions.push(...this.#compileNode(valueNode, input))
|
instructions.push(...this.#compileNode(valueNode, input))
|
||||||
|
|
@ -501,14 +778,14 @@ export class Compiler {
|
||||||
return instructions
|
return instructions
|
||||||
}
|
}
|
||||||
|
|
||||||
case terms.Array: {
|
case 'Array': {
|
||||||
const children = getAllChildren(node)
|
const children = getAllChildren(node)
|
||||||
|
|
||||||
// We can easily parse [=] as an empty dict, but `[ = ]` is tougher.
|
// We can easily parse [=] as an empty dict, but `[ = ]` is tougher.
|
||||||
// = can be a valid word, and is also valid inside words, so for now we cheat
|
// = can be a valid word, and is also valid inside words, so for now we cheat
|
||||||
// and check for arrays that look like `[ = ]` to interpret them as
|
// and check for arrays that look like `[ = ]` to interpret them as
|
||||||
// empty dicts
|
// empty dicts
|
||||||
if (children.length === 1 && children[0]!.name === 'Word') {
|
if (children.length === 1 && children[0]!.type.is('Word')) {
|
||||||
const child = children[0]!
|
const child = children[0]!
|
||||||
if (input.slice(child.from, child.to) === '=') {
|
if (input.slice(child.from, child.to) === '=') {
|
||||||
return [['MAKE_DICT', 0]]
|
return [['MAKE_DICT', 0]]
|
||||||
|
|
@ -520,7 +797,7 @@ export class Compiler {
|
||||||
return instructions
|
return instructions
|
||||||
}
|
}
|
||||||
|
|
||||||
case terms.Dict: {
|
case 'Dict': {
|
||||||
const children = getAllChildren(node)
|
const children = getAllChildren(node)
|
||||||
const instructions: ProgramItem[] = []
|
const instructions: ProgramItem[] = []
|
||||||
|
|
||||||
|
|
@ -529,7 +806,7 @@ export class Compiler {
|
||||||
const valueNode = node.firstChild!.nextSibling
|
const valueNode = node.firstChild!.nextSibling
|
||||||
|
|
||||||
// name= -> name
|
// name= -> name
|
||||||
const key = input.slice(keyNode!.from, keyNode!.to).slice(0, -1)
|
const key = input.slice(keyNode!.from, keyNode!.to).replace(/\s*=$/, '')
|
||||||
instructions.push(['PUSH', key])
|
instructions.push(['PUSH', key])
|
||||||
|
|
||||||
instructions.push(...this.#compileNode(valueNode!, input))
|
instructions.push(...this.#compileNode(valueNode!, input))
|
||||||
|
|
@ -539,6 +816,53 @@ export class Compiler {
|
||||||
return instructions
|
return instructions
|
||||||
}
|
}
|
||||||
|
|
||||||
|
case 'WhileExpr': {
|
||||||
|
const [_while, test, _colon, block] = getAllChildren(node)
|
||||||
|
const instructions: ProgramItem[] = []
|
||||||
|
|
||||||
|
this.loopLabelCount++
|
||||||
|
const startLoop = `.loop_${this.loopLabelCount}:`
|
||||||
|
const endLoop = `.end_loop_${this.loopLabelCount}:`
|
||||||
|
|
||||||
|
instructions.push([`${startLoop}:`])
|
||||||
|
instructions.push(...this.#compileNode(test!, input))
|
||||||
|
instructions.push(['JUMP_IF_FALSE', endLoop])
|
||||||
|
instructions.push(...this.#compileNode(block!, input))
|
||||||
|
instructions.push(['JUMP', startLoop])
|
||||||
|
instructions.push([`${endLoop}:`])
|
||||||
|
|
||||||
|
return instructions
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'Import': {
|
||||||
|
const instructions: ProgramItem[] = []
|
||||||
|
const [_import, ...nodes] = getAllChildren(node)
|
||||||
|
const args = nodes.filter(node => node.type.is('Identifier'))
|
||||||
|
const namedArgs = nodes.filter(node => node.type.is('NamedArg'))
|
||||||
|
|
||||||
|
instructions.push(['LOAD', 'import'])
|
||||||
|
|
||||||
|
args.forEach((dict) =>
|
||||||
|
instructions.push(['PUSH', input.slice(dict.from, dict.to)])
|
||||||
|
)
|
||||||
|
|
||||||
|
namedArgs.forEach((arg) => {
|
||||||
|
const { name, valueNode } = getNamedArgParts(arg, input)
|
||||||
|
instructions.push(['PUSH', name])
|
||||||
|
instructions.push(...this.#compileNode(valueNode, input))
|
||||||
|
})
|
||||||
|
|
||||||
|
instructions.push(['PUSH', args.length])
|
||||||
|
instructions.push(['PUSH', namedArgs.length])
|
||||||
|
instructions.push(['CALL'])
|
||||||
|
|
||||||
|
return instructions
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'Comment': {
|
||||||
|
return [] // ignore comments
|
||||||
|
}
|
||||||
|
|
||||||
default:
|
default:
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
`Compiler doesn't know how to handle a "${node.type.name}" node.`,
|
`Compiler doesn't know how to handle a "${node.type.name}" node.`,
|
||||||
|
|
@ -547,4 +871,74 @@ export class Compiler {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#compileTryCatchFinally(
|
||||||
|
compileTryBody: () => ProgramItem[],
|
||||||
|
catchVariable: string | undefined,
|
||||||
|
catchBody: SyntaxNode | undefined,
|
||||||
|
finallyBody: SyntaxNode | undefined,
|
||||||
|
input: string
|
||||||
|
): ProgramItem[] {
|
||||||
|
const instructions: ProgramItem[] = []
|
||||||
|
this.tryLabelCount++
|
||||||
|
const catchLabel: Label = `.catch_${this.tryLabelCount}`
|
||||||
|
const finallyLabel: Label = finallyBody ? `.finally_${this.tryLabelCount}` : (null as any)
|
||||||
|
const endLabel: Label = `.end_try_${this.tryLabelCount}`
|
||||||
|
|
||||||
|
instructions.push(['PUSH_TRY', catchLabel])
|
||||||
|
instructions.push(...compileTryBody())
|
||||||
|
instructions.push(['POP_TRY'])
|
||||||
|
instructions.push(['JUMP', finallyBody ? finallyLabel : endLabel])
|
||||||
|
|
||||||
|
// catch block
|
||||||
|
instructions.push([`${catchLabel}:`])
|
||||||
|
if (catchBody && catchVariable) {
|
||||||
|
instructions.push(['STORE', catchVariable])
|
||||||
|
const catchInstructions = this.#compileNode(catchBody, input)
|
||||||
|
instructions.push(...catchInstructions)
|
||||||
|
instructions.push(['JUMP', finallyBody ? finallyLabel : endLabel])
|
||||||
|
} else {
|
||||||
|
// no catch block
|
||||||
|
if (finallyBody) {
|
||||||
|
instructions.push(['JUMP', finallyLabel])
|
||||||
|
} else {
|
||||||
|
instructions.push(['THROW'])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// finally block
|
||||||
|
if (finallyBody) {
|
||||||
|
instructions.push([`${finallyLabel}:`])
|
||||||
|
const finallyInstructions = this.#compileNode(finallyBody, input)
|
||||||
|
instructions.push(...finallyInstructions)
|
||||||
|
// finally doesn't return a value
|
||||||
|
instructions.push(['POP'])
|
||||||
|
}
|
||||||
|
|
||||||
|
instructions.push([`${endLabel}:`])
|
||||||
|
|
||||||
|
return instructions
|
||||||
|
}
|
||||||
|
|
||||||
|
#compileCurlyString(value: string, input: string): ProgramItem[] {
|
||||||
|
const instructions: ProgramItem[] = []
|
||||||
|
const nodes = tokenizeCurlyString(value)
|
||||||
|
|
||||||
|
nodes.forEach((node) => {
|
||||||
|
if (typeof node === 'string') {
|
||||||
|
instructions.push(['PUSH', node])
|
||||||
|
} else {
|
||||||
|
const [input, topNode] = node
|
||||||
|
let child = topNode.firstChild
|
||||||
|
while (child) {
|
||||||
|
instructions.push(...this.#compileNode(child, input))
|
||||||
|
child = child.nextSibling
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
instructions.push(['STR_CONCAT', nodes.length])
|
||||||
|
|
||||||
|
return instructions
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
178
src/compiler/tests/bitwise.test.ts
Normal file
178
src/compiler/tests/bitwise.test.ts
Normal file
|
|
@ -0,0 +1,178 @@
|
||||||
|
import { expect, describe, test } from 'bun:test'
|
||||||
|
|
||||||
|
describe('bitwise operators', () => {
|
||||||
|
describe('band (bitwise AND)', () => {
|
||||||
|
test('basic AND operation', () => {
|
||||||
|
expect('5 band 3').toEvaluateTo(1)
|
||||||
|
// 5 = 0101, 3 = 0011, result = 0001 = 1
|
||||||
|
})
|
||||||
|
|
||||||
|
test('AND with zero', () => {
|
||||||
|
expect('5 band 0').toEvaluateTo(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('AND with all bits set', () => {
|
||||||
|
expect('15 band 7').toEvaluateTo(7)
|
||||||
|
// 15 = 1111, 7 = 0111, result = 0111 = 7
|
||||||
|
})
|
||||||
|
|
||||||
|
test('AND in assignment', () => {
|
||||||
|
expect('x = 12 band 10').toEvaluateTo(8)
|
||||||
|
// 12 = 1100, 10 = 1010, result = 1000 = 8
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('bor (bitwise OR)', () => {
|
||||||
|
test('basic OR operation', () => {
|
||||||
|
expect('5 bor 3').toEvaluateTo(7)
|
||||||
|
// 5 = 0101, 3 = 0011, result = 0111 = 7
|
||||||
|
})
|
||||||
|
|
||||||
|
test('OR with zero', () => {
|
||||||
|
expect('5 bor 0').toEvaluateTo(5)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('OR with all bits set', () => {
|
||||||
|
expect('8 bor 4').toEvaluateTo(12)
|
||||||
|
// 8 = 1000, 4 = 0100, result = 1100 = 12
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('bxor (bitwise XOR)', () => {
|
||||||
|
test('basic XOR operation', () => {
|
||||||
|
expect('5 bxor 3').toEvaluateTo(6)
|
||||||
|
// 5 = 0101, 3 = 0011, result = 0110 = 6
|
||||||
|
})
|
||||||
|
|
||||||
|
test('XOR with itself returns zero', () => {
|
||||||
|
expect('5 bxor 5').toEvaluateTo(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('XOR with zero returns same value', () => {
|
||||||
|
expect('7 bxor 0').toEvaluateTo(7)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('XOR in assignment', () => {
|
||||||
|
expect('result = 8 bxor 12').toEvaluateTo(4)
|
||||||
|
// 8 = 1000, 12 = 1100, result = 0100 = 4
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('bnot (bitwise NOT)', () => {
|
||||||
|
test('NOT of positive number', () => {
|
||||||
|
expect('bnot 5').toEvaluateTo(-6)
|
||||||
|
// ~5 = -6 (two\'s complement)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('NOT of zero', () => {
|
||||||
|
expect('bnot 0').toEvaluateTo(-1)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('NOT of negative number', () => {
|
||||||
|
expect('bnot -1').toEvaluateTo(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('double NOT returns original', () => {
|
||||||
|
expect('bnot (bnot 5)').toEvaluateTo(5)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('<< (left shift)', () => {
|
||||||
|
test('basic left shift', () => {
|
||||||
|
expect('5 << 2').toEvaluateTo(20)
|
||||||
|
// 5 << 2 = 20
|
||||||
|
})
|
||||||
|
|
||||||
|
test('shift by zero', () => {
|
||||||
|
expect('5 << 0').toEvaluateTo(5)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('shift by one', () => {
|
||||||
|
expect('3 << 1').toEvaluateTo(6)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('large shift', () => {
|
||||||
|
expect('1 << 10').toEvaluateTo(1024)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('>> (signed right shift)', () => {
|
||||||
|
test('basic right shift', () => {
|
||||||
|
expect('20 >> 2').toEvaluateTo(5)
|
||||||
|
// 20 >> 2 = 5
|
||||||
|
})
|
||||||
|
|
||||||
|
test('shift by zero', () => {
|
||||||
|
expect('20 >> 0').toEvaluateTo(20)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('preserves sign for negative numbers', () => {
|
||||||
|
expect('-20 >> 2').toEvaluateTo(-5)
|
||||||
|
// Sign is preserved
|
||||||
|
})
|
||||||
|
|
||||||
|
test('negative number right shift', () => {
|
||||||
|
expect('-8 >> 1').toEvaluateTo(-4)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('>>> (unsigned right shift)', () => {
|
||||||
|
test('basic unsigned right shift', () => {
|
||||||
|
expect('20 >>> 2').toEvaluateTo(5)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('unsigned shift of -1', () => {
|
||||||
|
expect('-1 >>> 1').toEvaluateTo(2147483647)
|
||||||
|
// -1 >>> 1 = 2147483647 (unsigned, no sign extension)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('unsigned shift of negative number', () => {
|
||||||
|
expect('-8 >>> 1').toEvaluateTo(2147483644)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('compound expressions', () => {
|
||||||
|
test('multiple bitwise operations', () => {
|
||||||
|
expect('(5 band 3) bor (8 bxor 12)').toEvaluateTo(5)
|
||||||
|
// (5 & 3) | (8 ^ 12) = 1 | 4 = 5
|
||||||
|
})
|
||||||
|
|
||||||
|
test('bitwise with variables', () => {
|
||||||
|
expect(`
|
||||||
|
a = 5
|
||||||
|
b = 3
|
||||||
|
a bor b
|
||||||
|
`).toEvaluateTo(7)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('shift operations with variables', () => {
|
||||||
|
expect(`
|
||||||
|
x = 16
|
||||||
|
y = 2
|
||||||
|
x >> y
|
||||||
|
`).toEvaluateTo(4)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('mixing shifts and bitwise', () => {
|
||||||
|
expect('(8 << 1) band 15').toEvaluateTo(0)
|
||||||
|
// (8 << 1) & 15 = 16 & 15 = 0
|
||||||
|
})
|
||||||
|
|
||||||
|
test('mixing shifts and bitwise 2', () => {
|
||||||
|
expect('(7 << 1) band 15').toEvaluateTo(14)
|
||||||
|
// (7 << 1) & 15 = 14 & 15 = 14
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('precedence', () => {
|
||||||
|
test('bitwise has correct precedence with arithmetic', () => {
|
||||||
|
expect('1 + 2 band 3').toEvaluateTo(3)
|
||||||
|
// (1 + 2) & 3 = 3 & 3 = 3
|
||||||
|
})
|
||||||
|
|
||||||
|
test('shift has correct precedence', () => {
|
||||||
|
expect('4 + 8 << 1').toEvaluateTo(24)
|
||||||
|
// (4 + 8) << 1 = 12 << 1 = 24
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
@ -110,7 +110,10 @@ describe('compiler', () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
test('function call with no args', () => {
|
test('function call with no args', () => {
|
||||||
expect(`bloop = do: 'bloop' end; bloop`).toEvaluateTo('bloop')
|
expect(`bloop = do: 'bleep' end; bloop`).toEvaluateTo('bleep')
|
||||||
|
expect(`bloop = [ go=do: 'bleep' end ]; bloop.go`).toEvaluateTo('bleep')
|
||||||
|
expect(`bloop = [ go=do: 'bleep' end ]; abc = do x: x end; abc (bloop.go)`).toEvaluateTo('bleep')
|
||||||
|
expect(`num = ((math.random) * 10 + 1) | math.floor; num >= 1 and num <= 10 `).toEvaluateTo(true)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('function call with if statement and multiple expressions', () => {
|
test('function call with if statement and multiple expressions', () => {
|
||||||
|
|
@ -154,18 +157,18 @@ describe('compiler', () => {
|
||||||
end`).toEvaluateTo('white')
|
end`).toEvaluateTo('white')
|
||||||
})
|
})
|
||||||
|
|
||||||
test('if elseif', () => {
|
test('if else if', () => {
|
||||||
expect(`if false:
|
expect(`if false:
|
||||||
boromir
|
boromir
|
||||||
elseif true:
|
else if true:
|
||||||
frodo
|
frodo
|
||||||
end`).toEvaluateTo('frodo')
|
end`).toEvaluateTo('frodo')
|
||||||
})
|
})
|
||||||
|
|
||||||
test('if elseif else', () => {
|
test('if else if else', () => {
|
||||||
expect(`if false:
|
expect(`if false:
|
||||||
destroyed
|
destroyed
|
||||||
elseif true:
|
else if true:
|
||||||
fire
|
fire
|
||||||
else:
|
else:
|
||||||
darkness
|
darkness
|
||||||
|
|
@ -173,9 +176,9 @@ describe('compiler', () => {
|
||||||
|
|
||||||
expect(`if false:
|
expect(`if false:
|
||||||
king
|
king
|
||||||
elseif false:
|
else if false:
|
||||||
elf
|
elf
|
||||||
elseif true:
|
else if true:
|
||||||
dwarf
|
dwarf
|
||||||
else:
|
else:
|
||||||
scattered
|
scattered
|
||||||
|
|
@ -185,6 +188,16 @@ describe('compiler', () => {
|
||||||
test('single line if', () => {
|
test('single line if', () => {
|
||||||
expect(`if 3 < 9: shire end`).toEvaluateTo('shire')
|
expect(`if 3 < 9: shire end`).toEvaluateTo('shire')
|
||||||
})
|
})
|
||||||
|
|
||||||
|
test('if statement with function definition (bytecode labels)', () => {
|
||||||
|
expect(`
|
||||||
|
if false:
|
||||||
|
abc = do x: x end
|
||||||
|
else:
|
||||||
|
nope
|
||||||
|
end
|
||||||
|
`).toEvaluateTo('nope')
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('errors', () => {
|
describe('errors', () => {
|
||||||
|
|
@ -281,4 +294,232 @@ describe('dot get', () => {
|
||||||
test('use parens expr with dot-get', () => {
|
test('use parens expr with dot-get', () => {
|
||||||
expect(`a = 1; arr = array 'a' 'b' 'c'; arr.(1 + a)`).toEvaluateTo('c', { array })
|
expect(`a = 1; arr = array 'a' 'b' 'c'; arr.(1 + a)`).toEvaluateTo('c', { array })
|
||||||
})
|
})
|
||||||
|
|
||||||
|
test('chained dot get: two levels', () => {
|
||||||
|
expect(`obj = [inner=[value=42]]; obj.inner.value`).toEvaluateTo(42)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('chained dot get: three levels', () => {
|
||||||
|
expect(`obj = [a=[b=[c=123]]]; obj.a.b.c`).toEvaluateTo(123)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('chained dot get: four levels', () => {
|
||||||
|
expect(`obj = [w=[x=[y=[z='deep']]]]; obj.w.x.y.z`).toEvaluateTo('deep')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('chained dot get with numeric index', () => {
|
||||||
|
expect(`obj = [items=[1 2 3]]; obj.items.0`).toEvaluateTo(1)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('chained dot get in expression', () => {
|
||||||
|
expect(`config = [server=[port=3000]]; config.server.port + 1`).toEvaluateTo(3001)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('chained dot get as function argument', () => {
|
||||||
|
const double = (x: number) => x * 2
|
||||||
|
expect(`obj = [val=[num=21]]; double obj.val.num`).toEvaluateTo(42, { double })
|
||||||
|
})
|
||||||
|
|
||||||
|
test('chained dot get in binary operation', () => {
|
||||||
|
expect(`a = [x=[y=10]]; b = [x=[y=20]]; a.x.y + b.x.y`).toEvaluateTo(30)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('chained dot get with parens at end', () => {
|
||||||
|
expect(`idx = 1; obj = [items=[10 20 30]]; obj.items.(idx)`).toEvaluateTo(20)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('mixed chained and simple dot get', () => {
|
||||||
|
expect(`obj = [a=1 b=[c=2]]; obj.a + obj.b.c`).toEvaluateTo(3)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe('default params', () => {
|
||||||
|
test('function with single default parameter', () => {
|
||||||
|
expect('add1 = do x=1: x + 1 end; add1').toEvaluateTo(2)
|
||||||
|
expect('add1 = do x=1: x + 1 end; add1 5').toEvaluateTo(6)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('function with multiple default parameters', () => {
|
||||||
|
expect(`weird = do x='something' y=true: [x y] end; weird`).toEvaluateTo(['something', true])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('function with mixed parameters', () => {
|
||||||
|
expect('multiply = do x y=5: x * y end; multiply 5').toEvaluateTo(25)
|
||||||
|
expect('multiply = do x y=5: x * y end; multiply 5 2').toEvaluateTo(10)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('null triggers default value', () => {
|
||||||
|
expect('test = do n=true: n end; test').toEvaluateTo(true)
|
||||||
|
expect('test = do n=true: n end; test false').toEvaluateTo(false)
|
||||||
|
expect('test = do n=true: n end; test null').toEvaluateTo(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('null triggers default for named parameters', () => {
|
||||||
|
expect("greet = do name='World': name end; greet name=null").toEvaluateTo('World')
|
||||||
|
expect("greet = do name='World': name end; greet name='Bob'").toEvaluateTo('Bob')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('null triggers default with multiple parameters', () => {
|
||||||
|
expect('calc = do x=10 y=20: x + y end; calc null 5').toEvaluateTo(15)
|
||||||
|
expect('calc = do x=10 y=20: x + y end; calc 3 null').toEvaluateTo(23)
|
||||||
|
expect('calc = do x=10 y=20: x + y end; calc null null').toEvaluateTo(30)
|
||||||
|
})
|
||||||
|
|
||||||
|
test.skip('array default', () => {
|
||||||
|
expect('abc = do alpha=[a b c]: alpha end; abc').toEvaluateTo(['a', 'b', 'c'])
|
||||||
|
expect('abc = do alpha=[a b c]: alpha end; abc [x y z]').toEvaluateTo(['x', 'y', 'z'])
|
||||||
|
})
|
||||||
|
|
||||||
|
test.skip('dict default', () => {
|
||||||
|
expect('make-person = do person=[name=Bob age=60]: person end; make-person').toEvaluateTo({
|
||||||
|
name: 'Bob',
|
||||||
|
age: 60,
|
||||||
|
})
|
||||||
|
expect(
|
||||||
|
'make-person = do person=[name=Bob age=60]: person end; make-person [name=Jon age=21]'
|
||||||
|
).toEvaluateTo({ name: 'Jon', age: 21 })
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('Nullish coalescing operator (??)', () => {
|
||||||
|
test('returns left side when not null', () => {
|
||||||
|
expect('5 ?? 10').toEvaluateTo(5)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('returns right side when left is null', () => {
|
||||||
|
expect('null ?? 10').toEvaluateTo(10)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('returns left side when left is false', () => {
|
||||||
|
expect('false ?? 10').toEvaluateTo(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('returns left side when left is 0', () => {
|
||||||
|
expect('0 ?? 10').toEvaluateTo(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('returns left side when left is empty string', () => {
|
||||||
|
expect(`'' ?? 'default'`).toEvaluateTo('')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('chains left to right', () => {
|
||||||
|
expect('null ?? null ?? 42').toEvaluateTo(42)
|
||||||
|
expect('null ?? 10 ?? 20').toEvaluateTo(10)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('short-circuits evaluation', () => {
|
||||||
|
const throwError = () => { throw new Error('Should not evaluate') }
|
||||||
|
expect('5 ?? throw-error').toEvaluateTo(5, { 'throw-error': throwError })
|
||||||
|
})
|
||||||
|
|
||||||
|
test('works with variables', () => {
|
||||||
|
expect('x = null; x ?? 5').toEvaluateTo(5)
|
||||||
|
expect('y = 3; y ?? 5').toEvaluateTo(3)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('works with function calls', () => {
|
||||||
|
const getValue = () => null
|
||||||
|
const getDefault = () => 42
|
||||||
|
// Note: identifiers without parentheses refer to the function, not call it
|
||||||
|
// Use explicit call syntax to invoke the function
|
||||||
|
expect('(get-value) ?? (get-default)').toEvaluateTo(42, {
|
||||||
|
'get-value': getValue,
|
||||||
|
'get-default': getDefault
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('Nullish coalescing assignment (??=)', () => {
|
||||||
|
test('assigns when variable is null', () => {
|
||||||
|
expect('x = null; x ??= 5; x').toEvaluateTo(5)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('does not assign when variable is not null', () => {
|
||||||
|
expect('x = 3; x ??= 10; x').toEvaluateTo(3)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('does not assign when variable is false', () => {
|
||||||
|
expect('x = false; x ??= true; x').toEvaluateTo(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('does not assign when variable is 0', () => {
|
||||||
|
expect('x = 0; x ??= 100; x').toEvaluateTo(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('does not assign when variable is empty string', () => {
|
||||||
|
expect(`x = ''; x ??= 'default'; x`).toEvaluateTo('')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('returns the final value', () => {
|
||||||
|
expect('x = null; x ??= 5').toEvaluateTo(5)
|
||||||
|
expect('y = 3; y ??= 10').toEvaluateTo(3)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('short-circuits evaluation when not null', () => {
|
||||||
|
const throwError = () => { throw new Error('Should not evaluate') }
|
||||||
|
expect('x = 5; x ??= throw-error; x').toEvaluateTo(5, { 'throw-error': throwError })
|
||||||
|
})
|
||||||
|
|
||||||
|
test('works with expressions', () => {
|
||||||
|
expect('x = null; x ??= 2 + 3; x').toEvaluateTo(5)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('works with function calls', () => {
|
||||||
|
const getDefault = () => 42
|
||||||
|
expect('x = null; x ??= (get-default); x').toEvaluateTo(42, { 'get-default': getDefault })
|
||||||
|
})
|
||||||
|
|
||||||
|
test('throws when variable is undefined', () => {
|
||||||
|
expect(() => expect('undefined-var ??= 5').toEvaluateTo(null)).toThrow()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('Compound assignment operators', () => {
|
||||||
|
test('+=', () => {
|
||||||
|
expect('x = 5; x += 3; x').toEvaluateTo(8)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('-=', () => {
|
||||||
|
expect('x = 10; x -= 4; x').toEvaluateTo(6)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('*=', () => {
|
||||||
|
expect('x = 3; x *= 4; x').toEvaluateTo(12)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('/=', () => {
|
||||||
|
expect('x = 20; x /= 5; x').toEvaluateTo(4)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('%=', () => {
|
||||||
|
expect('x = 10; x %= 3; x').toEvaluateTo(1)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('import', () => {
|
||||||
|
test('imports single dict', () => {
|
||||||
|
expect(`import str; starts-with? abc a`).toEvaluateTo(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('imports multiple dicts', () => {
|
||||||
|
expect(`import str math list; map [1 2 3] do x: x * 2 end`).toEvaluateTo([2, 4, 6])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('imports non-prelude dicts', () => {
|
||||||
|
expect(`
|
||||||
|
abc = [a=true b=yes c=si]
|
||||||
|
import abc
|
||||||
|
abc.b
|
||||||
|
`).toEvaluateTo('yes')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('can specify imports', () => {
|
||||||
|
expect(`import str only=ends-with?; ref ends-with? | function?`).toEvaluateTo(true)
|
||||||
|
expect(`import str only=ends-with?; ref starts-with? | function?`).toEvaluateTo(false)
|
||||||
|
expect(`
|
||||||
|
abc = [a=true b=yes c=si]
|
||||||
|
import abc only=[a c]
|
||||||
|
[a c]
|
||||||
|
`).toEvaluateTo([true, 'si'])
|
||||||
|
})
|
||||||
|
})
|
||||||
311
src/compiler/tests/exceptions.test.ts
Normal file
311
src/compiler/tests/exceptions.test.ts
Normal file
|
|
@ -0,0 +1,311 @@
|
||||||
|
import { describe } from 'bun:test'
|
||||||
|
import { expect, test } from 'bun:test'
|
||||||
|
|
||||||
|
describe('exception handling', () => {
|
||||||
|
test('try with catch - no error thrown', () => {
|
||||||
|
expect(`
|
||||||
|
try:
|
||||||
|
42
|
||||||
|
catch err:
|
||||||
|
99
|
||||||
|
end
|
||||||
|
`).toEvaluateTo(42)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('try with catch - error thrown', () => {
|
||||||
|
expect(`
|
||||||
|
try:
|
||||||
|
throw 'something went wrong'
|
||||||
|
99
|
||||||
|
catch err:
|
||||||
|
err
|
||||||
|
end
|
||||||
|
`).toEvaluateTo('something went wrong')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('try with catch - catch variable binding', () => {
|
||||||
|
expect(`
|
||||||
|
try:
|
||||||
|
throw 100
|
||||||
|
catch my-error:
|
||||||
|
my-error + 50
|
||||||
|
end
|
||||||
|
`).toEvaluateTo(150)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('try with finally - no error', () => {
|
||||||
|
expect(`
|
||||||
|
x = 0
|
||||||
|
result = try:
|
||||||
|
x = 10
|
||||||
|
42
|
||||||
|
finally:
|
||||||
|
x = x + 5
|
||||||
|
end
|
||||||
|
x
|
||||||
|
`).toEvaluateTo(15)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('try with finally - return value from try', () => {
|
||||||
|
expect(`
|
||||||
|
x = 0
|
||||||
|
result = try:
|
||||||
|
x = 10
|
||||||
|
42
|
||||||
|
finally:
|
||||||
|
x = x + 5
|
||||||
|
999
|
||||||
|
end
|
||||||
|
result
|
||||||
|
`).toEvaluateTo(42)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('try with catch and finally - no error', () => {
|
||||||
|
expect(`
|
||||||
|
x = 0
|
||||||
|
try:
|
||||||
|
x = 10
|
||||||
|
42
|
||||||
|
catch err:
|
||||||
|
x = 999
|
||||||
|
0
|
||||||
|
finally:
|
||||||
|
x = x + 5
|
||||||
|
end
|
||||||
|
x
|
||||||
|
`).toEvaluateTo(15)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('try with catch and finally - error thrown', () => {
|
||||||
|
expect(`
|
||||||
|
x = 0
|
||||||
|
result = try:
|
||||||
|
x = 10
|
||||||
|
throw 'error'
|
||||||
|
99
|
||||||
|
catch err:
|
||||||
|
x = 20
|
||||||
|
err
|
||||||
|
finally:
|
||||||
|
x = x + 5
|
||||||
|
end
|
||||||
|
x
|
||||||
|
`).toEvaluateTo(25)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('try with catch and finally - return value from catch', () => {
|
||||||
|
expect(`
|
||||||
|
result = try:
|
||||||
|
throw 'oops'
|
||||||
|
catch err:
|
||||||
|
'caught'
|
||||||
|
finally:
|
||||||
|
'finally'
|
||||||
|
end
|
||||||
|
result
|
||||||
|
`).toEvaluateTo('caught')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('throw statement with string', () => {
|
||||||
|
expect(`
|
||||||
|
try:
|
||||||
|
throw 'error message'
|
||||||
|
catch err:
|
||||||
|
err
|
||||||
|
end
|
||||||
|
`).toEvaluateTo('error message')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('throw statement with number', () => {
|
||||||
|
expect(`
|
||||||
|
try:
|
||||||
|
throw 404
|
||||||
|
catch err:
|
||||||
|
err
|
||||||
|
end
|
||||||
|
`).toEvaluateTo(404)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('throw statement with dict', () => {
|
||||||
|
expect(`
|
||||||
|
try:
|
||||||
|
throw [code=500 message=failed]
|
||||||
|
catch e:
|
||||||
|
e
|
||||||
|
end
|
||||||
|
`).toEvaluateTo({ code: 500, message: 'failed' })
|
||||||
|
})
|
||||||
|
|
||||||
|
test('uncaught exception fails', () => {
|
||||||
|
expect(`throw 'uncaught error'`).toFailEvaluation()
|
||||||
|
})
|
||||||
|
|
||||||
|
test('single-line try catch', () => {
|
||||||
|
expect(`result = try: throw 'err' catch e: 'handled' end; result`).toEvaluateTo('handled')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('nested try blocks - inner catches', () => {
|
||||||
|
expect(`
|
||||||
|
try:
|
||||||
|
result = try:
|
||||||
|
throw 'inner error'
|
||||||
|
catch err:
|
||||||
|
err
|
||||||
|
end
|
||||||
|
result
|
||||||
|
catch outer:
|
||||||
|
'outer'
|
||||||
|
end
|
||||||
|
`).toEvaluateTo('inner error')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('nested try blocks - outer catches', () => {
|
||||||
|
expect(`
|
||||||
|
try:
|
||||||
|
try:
|
||||||
|
throw 'inner error'
|
||||||
|
catch err:
|
||||||
|
throw 'outer error'
|
||||||
|
end
|
||||||
|
catch outer:
|
||||||
|
outer
|
||||||
|
end
|
||||||
|
`).toEvaluateTo('outer error')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('try as expression', () => {
|
||||||
|
expect(`
|
||||||
|
x = try: 10 catch err: 0 end
|
||||||
|
y = try: throw 'err' catch err: 20 end
|
||||||
|
x + y
|
||||||
|
`).toEvaluateTo(30)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('function-level exception handling', () => {
|
||||||
|
test('function with catch - no error', () => {
|
||||||
|
expect(`
|
||||||
|
read-file = do path:
|
||||||
|
path
|
||||||
|
catch e:
|
||||||
|
'default'
|
||||||
|
end
|
||||||
|
|
||||||
|
read-file test.txt
|
||||||
|
`).toEvaluateTo('test.txt')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('function with catch - error thrown', () => {
|
||||||
|
expect(`
|
||||||
|
read-file = do path:
|
||||||
|
throw 'file not found'
|
||||||
|
catch e:
|
||||||
|
'default'
|
||||||
|
end
|
||||||
|
|
||||||
|
read-file test.txt
|
||||||
|
`).toEvaluateTo('default')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('function with catch - error variable binding', () => {
|
||||||
|
expect(`
|
||||||
|
safe-call = do:
|
||||||
|
throw 'operation failed'
|
||||||
|
catch err:
|
||||||
|
err
|
||||||
|
end
|
||||||
|
|
||||||
|
safe-call
|
||||||
|
`).toEvaluateTo('operation failed')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('function with finally - always runs', () => {
|
||||||
|
expect(`
|
||||||
|
counter = 0
|
||||||
|
increment-task = do:
|
||||||
|
result = 42
|
||||||
|
result
|
||||||
|
finally:
|
||||||
|
counter = counter + 1
|
||||||
|
end
|
||||||
|
|
||||||
|
x = increment-task
|
||||||
|
y = increment-task
|
||||||
|
counter
|
||||||
|
`).toEvaluateTo(2)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('function with finally - return value from body', () => {
|
||||||
|
expect(`
|
||||||
|
get-value = do:
|
||||||
|
100
|
||||||
|
finally:
|
||||||
|
999
|
||||||
|
end
|
||||||
|
|
||||||
|
get-value
|
||||||
|
`).toEvaluateTo(100)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('function with catch and finally', () => {
|
||||||
|
expect(`
|
||||||
|
cleanup-count = 0
|
||||||
|
safe-op = do should-fail:
|
||||||
|
if should-fail:
|
||||||
|
throw 'failed'
|
||||||
|
end
|
||||||
|
'success'
|
||||||
|
catch e:
|
||||||
|
'caught'
|
||||||
|
finally:
|
||||||
|
cleanup-count = cleanup-count + 1
|
||||||
|
end
|
||||||
|
|
||||||
|
result1 = safe-op false
|
||||||
|
result2 = safe-op true
|
||||||
|
cleanup-count
|
||||||
|
`).toEvaluateTo(2)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('function with catch and finally - catch return value', () => {
|
||||||
|
expect(`
|
||||||
|
safe-fail = do:
|
||||||
|
throw 'always fails'
|
||||||
|
catch e:
|
||||||
|
'error handled'
|
||||||
|
finally:
|
||||||
|
noop = 1
|
||||||
|
end
|
||||||
|
|
||||||
|
safe-fail
|
||||||
|
`).toEvaluateTo('error handled')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('function without catch/finally still works', () => {
|
||||||
|
expect(`
|
||||||
|
regular = do x:
|
||||||
|
x + 10
|
||||||
|
end
|
||||||
|
|
||||||
|
regular 5
|
||||||
|
`).toEvaluateTo(15)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('nested functions with catch', () => {
|
||||||
|
expect(`
|
||||||
|
inner = do:
|
||||||
|
throw 'inner error'
|
||||||
|
catch e:
|
||||||
|
'inner caught'
|
||||||
|
end
|
||||||
|
|
||||||
|
outer = do:
|
||||||
|
inner
|
||||||
|
catch e:
|
||||||
|
'outer caught'
|
||||||
|
end
|
||||||
|
|
||||||
|
outer
|
||||||
|
`).toEvaluateTo('inner caught')
|
||||||
|
})
|
||||||
|
})
|
||||||
55
src/compiler/tests/function-blocks.test.ts
Normal file
55
src/compiler/tests/function-blocks.test.ts
Normal file
|
|
@ -0,0 +1,55 @@
|
||||||
|
import { expect, describe, test } from 'bun:test'
|
||||||
|
|
||||||
|
describe('single line function blocks', () => {
|
||||||
|
test('work with no args', () => {
|
||||||
|
expect(`trap = do x: x end; trap: true end`).toEvaluateTo(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('work with one arg', () => {
|
||||||
|
expect(`trap = do x y: [ x (y) ] end; trap EXIT: true end`).toEvaluateTo(['EXIT', true])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('work with named args', () => {
|
||||||
|
expect(`attach = do signal fn: [ signal (fn) ] end; attach signal='exit': true end`).toEvaluateTo(['exit', true])
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
test('work with dot-get', () => {
|
||||||
|
expect(`signals = [trap=do x y: [x (y)] end]; signals.trap 'EXIT': true end`).toEvaluateTo(['EXIT', true])
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('multi line function blocks', () => {
|
||||||
|
test('work with no args', () => {
|
||||||
|
expect(`
|
||||||
|
trap = do x: x end
|
||||||
|
trap:
|
||||||
|
true
|
||||||
|
end`).toEvaluateTo(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('work with one arg', () => {
|
||||||
|
expect(`
|
||||||
|
trap = do x y: [ x (y) ] end
|
||||||
|
trap EXIT:
|
||||||
|
true
|
||||||
|
end`).toEvaluateTo(['EXIT', true])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('work with named args', () => {
|
||||||
|
expect(`
|
||||||
|
attach = do signal fn: [ signal (fn) ] end
|
||||||
|
attach signal='exit':
|
||||||
|
true
|
||||||
|
end`).toEvaluateTo(['exit', true])
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
test('work with dot-get', () => {
|
||||||
|
expect(`
|
||||||
|
signals = [trap=do x y: [x (y)] end]
|
||||||
|
signals.trap 'EXIT':
|
||||||
|
true
|
||||||
|
end`).toEvaluateTo(['EXIT', true])
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
@ -1,6 +1,55 @@
|
||||||
import { describe } from 'bun:test'
|
import { describe } from 'bun:test'
|
||||||
import { expect, test } from 'bun:test'
|
import { expect, test } from 'bun:test'
|
||||||
|
|
||||||
|
describe('number literals', () => {
|
||||||
|
test('binary literals', () => {
|
||||||
|
expect('0b110').toEvaluateTo(6)
|
||||||
|
expect('0b1010').toEvaluateTo(10)
|
||||||
|
expect('0b11111111').toEvaluateTo(255)
|
||||||
|
expect('0b0').toEvaluateTo(0)
|
||||||
|
expect('0b1').toEvaluateTo(1)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('hex literals', () => {
|
||||||
|
expect('0xdeadbeef').toEvaluateTo(0xdeadbeef)
|
||||||
|
expect('0xdeadbeef').toEvaluateTo(3735928559)
|
||||||
|
expect('0xFF').toEvaluateTo(255)
|
||||||
|
expect('0xff').toEvaluateTo(255)
|
||||||
|
expect('0x10').toEvaluateTo(16)
|
||||||
|
expect('0x0').toEvaluateTo(0)
|
||||||
|
expect('0xABCDEF').toEvaluateTo(0xabcdef)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('octal literals', () => {
|
||||||
|
expect('0o644').toEvaluateTo(420)
|
||||||
|
expect('0o755').toEvaluateTo(493)
|
||||||
|
expect('0o777').toEvaluateTo(511)
|
||||||
|
expect('0o10').toEvaluateTo(8)
|
||||||
|
expect('0o0').toEvaluateTo(0)
|
||||||
|
expect('0o123').toEvaluateTo(83)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('decimal literals still work', () => {
|
||||||
|
expect('42').toEvaluateTo(42)
|
||||||
|
expect('3.14').toEvaluateTo(3.14)
|
||||||
|
expect('0').toEvaluateTo(0)
|
||||||
|
expect('999999').toEvaluateTo(999999)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('negative hex, binary, and octal', () => {
|
||||||
|
expect('-0xFF').toEvaluateTo(-255)
|
||||||
|
expect('-0b1010').toEvaluateTo(-10)
|
||||||
|
expect('-0o755').toEvaluateTo(-493)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('positive prefix', () => {
|
||||||
|
expect('+0xFF').toEvaluateTo(255)
|
||||||
|
expect('+0b110').toEvaluateTo(6)
|
||||||
|
expect('+0o644').toEvaluateTo(420)
|
||||||
|
expect('+42').toEvaluateTo(42)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
describe('array literals', () => {
|
describe('array literals', () => {
|
||||||
test('work with numbers', () => {
|
test('work with numbers', () => {
|
||||||
expect('[1 2 3]').toEvaluateTo([1, 2, 3])
|
expect('[1 2 3]').toEvaluateTo([1, 2, 3])
|
||||||
|
|
@ -66,8 +115,8 @@ describe('array literals', () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
test('comments within arrays', () => {
|
test('comments within arrays', () => {
|
||||||
expect(`[1 # first
|
expect(`[1
|
||||||
2 # second
|
2
|
||||||
]`).toEvaluateTo([1, 2])
|
]`).toEvaluateTo([1, 2])
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
@ -102,18 +151,22 @@ describe('array literals', () => {
|
||||||
describe('dict literals', () => {
|
describe('dict literals', () => {
|
||||||
test('work with numbers', () => {
|
test('work with numbers', () => {
|
||||||
expect('[a=1 b=2 c=3]').toEvaluateTo({ a: 1, b: 2, c: 3 })
|
expect('[a=1 b=2 c=3]').toEvaluateTo({ a: 1, b: 2, c: 3 })
|
||||||
|
expect('[a = 1 b = 2 c = 3]').toEvaluateTo({ a: 1, b: 2, c: 3 })
|
||||||
})
|
})
|
||||||
|
|
||||||
test('work with strings', () => {
|
test('work with strings', () => {
|
||||||
expect("[a='one' b='two' c='three']").toEvaluateTo({ a: 'one', b: 'two', c: 'three' })
|
expect("[a='one' b='two' c='three']").toEvaluateTo({ a: 'one', b: 'two', c: 'three' })
|
||||||
|
expect("[a = 'one' b = 'two' c = 'three']").toEvaluateTo({ a: 'one', b: 'two', c: 'three' })
|
||||||
})
|
})
|
||||||
|
|
||||||
test('work with identifiers', () => {
|
test('work with identifiers', () => {
|
||||||
expect('[a=one b=two c=three]').toEvaluateTo({ a: 'one', b: 'two', c: 'three' })
|
expect('[a=one b=two c=three]').toEvaluateTo({ a: 'one', b: 'two', c: 'three' })
|
||||||
|
expect('[a = one b = two c = three]').toEvaluateTo({ a: 'one', b: 'two', c: 'three' })
|
||||||
})
|
})
|
||||||
|
|
||||||
test('can be nested', () => {
|
test('can be nested', () => {
|
||||||
expect('[a=one b=[two [c=three]]]').toEvaluateTo({ a: 'one', b: ['two', { c: 'three' }] })
|
expect('[a=one b=[two [c=three]]]').toEvaluateTo({ a: 'one', b: ['two', { c: 'three' }] })
|
||||||
|
expect('[a = one b = [two [c = three]]]').toEvaluateTo({ a: 'one', b: ['two', { c: 'three' }] })
|
||||||
})
|
})
|
||||||
|
|
||||||
test('can span multiple lines', () => {
|
test('can span multiple lines', () => {
|
||||||
|
|
@ -122,6 +175,12 @@ describe('dict literals', () => {
|
||||||
b=2
|
b=2
|
||||||
c=3
|
c=3
|
||||||
]`).toEvaluateTo({ a: 1, b: 2, c: 3 })
|
]`).toEvaluateTo({ a: 1, b: 2, c: 3 })
|
||||||
|
|
||||||
|
expect(`[
|
||||||
|
a = 1
|
||||||
|
b = 2
|
||||||
|
c = 3
|
||||||
|
]`).toEvaluateTo({ a: 1, b: 2, c: 3 })
|
||||||
})
|
})
|
||||||
|
|
||||||
test('empty dict', () => {
|
test('empty dict', () => {
|
||||||
|
|
@ -141,10 +200,12 @@ describe('dict literals', () => {
|
||||||
|
|
||||||
test('semicolons as separators', () => {
|
test('semicolons as separators', () => {
|
||||||
expect('[a=1; b=2; c=3]').toEvaluateTo({ a: 1, b: 2, c: 3 })
|
expect('[a=1; b=2; c=3]').toEvaluateTo({ a: 1, b: 2, c: 3 })
|
||||||
|
expect('[a = 1; b = 2; c = 3]').toEvaluateTo({ a: 1, b: 2, c: 3 })
|
||||||
})
|
})
|
||||||
|
|
||||||
test('expressions in dicts', () => {
|
test('expressions in dicts', () => {
|
||||||
expect('[a=(1 + 2) b=(3 * 4)]').toEvaluateTo({ a: 3, b: 12 })
|
expect('[a=(1 + 2) b=(3 * 4)]').toEvaluateTo({ a: 3, b: 12 })
|
||||||
|
expect('[a = (1 + 2) b = (3 * 4)]').toEvaluateTo({ a: 3, b: 12 })
|
||||||
})
|
})
|
||||||
|
|
||||||
test('empty lines within dicts', () => {
|
test('empty lines within dicts', () => {
|
||||||
|
|
@ -155,3 +216,69 @@ describe('dict literals', () => {
|
||||||
c=3]`).toEvaluateTo({ a: 1, b: 2, c: 3 })
|
c=3]`).toEvaluateTo({ a: 1, b: 2, c: 3 })
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe('curly strings', () => {
|
||||||
|
test('work on one line', () => {
|
||||||
|
expect('{ one two three }').toEvaluateTo(" one two three ")
|
||||||
|
})
|
||||||
|
|
||||||
|
test('work on multiple lines', () => {
|
||||||
|
expect(`{
|
||||||
|
one
|
||||||
|
two
|
||||||
|
three
|
||||||
|
}`).toEvaluateTo("\n one\n two\n three\n ")
|
||||||
|
})
|
||||||
|
|
||||||
|
test('can contain other curlies', () => {
|
||||||
|
expect(`{
|
||||||
|
{ one }
|
||||||
|
two
|
||||||
|
{ three }
|
||||||
|
}`).toEvaluateTo("\n { one }\n two\n { three }\n ")
|
||||||
|
})
|
||||||
|
|
||||||
|
test('interpolates variables', () => {
|
||||||
|
expect(`name = Bob; { Hello $name! }`).toEvaluateTo(` Hello Bob! `)
|
||||||
|
})
|
||||||
|
|
||||||
|
test("doesn't interpolate escaped variables ", () => {
|
||||||
|
expect(`name = Bob; { Hello \\$name }`).toEvaluateTo(` Hello $name `)
|
||||||
|
expect(`a = 1; b = 2; { sum is \\$(a + b)! }`).toEvaluateTo(` sum is $(a + b)! `)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('interpolates expressions', () => {
|
||||||
|
expect(`a = 1; b = 2; { sum is $(a + b)! }`).toEvaluateTo(` sum is 3! `)
|
||||||
|
expect(`a = 1; b = 2; { sum is { $(a + b) }! }`).toEvaluateTo(` sum is { 3 }! `)
|
||||||
|
expect(`a = 1; b = 2; { sum is $(a + (b * b))! }`).toEvaluateTo(` sum is 5! `)
|
||||||
|
expect(`{ This is $({twisted}). }`).toEvaluateTo(` This is twisted. `)
|
||||||
|
expect(`{ This is $({{twisted}}). }`).toEvaluateTo(` This is {twisted}. `)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('interpolation edge cases', () => {
|
||||||
|
expect(`{[a=1 b=2 c={wild}]}`).toEvaluateTo(`[a=1 b=2 c={wild}]`)
|
||||||
|
expect(`a = 1;b = 2;c = 3;{$a $b $c}`).toEvaluateTo(`1 2 3`)
|
||||||
|
expect(`a = 1;b = 2;c = 3;{$(a)$(b)$(c)}`).toEvaluateTo(`123`)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('double quoted strings', () => {
|
||||||
|
test("work", () => {
|
||||||
|
expect(`"hello world"`).toEvaluateTo('hello world')
|
||||||
|
})
|
||||||
|
|
||||||
|
test("don't interpolate", () => {
|
||||||
|
expect(`"hello $world"`).toEvaluateTo('hello $world')
|
||||||
|
expect(`"hello $(1 + 2)"`).toEvaluateTo('hello $(1 + 2)')
|
||||||
|
})
|
||||||
|
|
||||||
|
test("equal regular strings", () => {
|
||||||
|
expect(`"hello world" == 'hello world'`).toEvaluateTo(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
test("can contain newlines", () => {
|
||||||
|
expect(`
|
||||||
|
"hello
|
||||||
|
world"`).toEvaluateTo('hello\n world')
|
||||||
|
})
|
||||||
|
})
|
||||||
292
src/compiler/tests/native-exceptions.test.ts
Normal file
292
src/compiler/tests/native-exceptions.test.ts
Normal file
|
|
@ -0,0 +1,292 @@
|
||||||
|
import { describe, test, expect } from 'bun:test'
|
||||||
|
import { Compiler } from '#compiler/compiler'
|
||||||
|
import { VM } from 'reefvm'
|
||||||
|
|
||||||
|
describe('Native Function Exceptions', () => {
|
||||||
|
test('native function error caught by try/catch', async () => {
|
||||||
|
const code = `
|
||||||
|
result = try:
|
||||||
|
failing-fn
|
||||||
|
catch e:
|
||||||
|
'caught: ' + e
|
||||||
|
end
|
||||||
|
|
||||||
|
result
|
||||||
|
`
|
||||||
|
|
||||||
|
const compiler = new Compiler(code)
|
||||||
|
const vm = new VM(compiler.bytecode)
|
||||||
|
|
||||||
|
vm.set('failing-fn', () => {
|
||||||
|
throw new Error('native function failed')
|
||||||
|
})
|
||||||
|
|
||||||
|
const result = await vm.run()
|
||||||
|
expect(result).toEqual({ type: 'string', value: 'caught: native function failed' })
|
||||||
|
})
|
||||||
|
|
||||||
|
test('async native function error caught by try/catch', async () => {
|
||||||
|
const code = `
|
||||||
|
result = try:
|
||||||
|
async-fail
|
||||||
|
catch e:
|
||||||
|
'async caught: ' + e
|
||||||
|
end
|
||||||
|
|
||||||
|
result
|
||||||
|
`
|
||||||
|
|
||||||
|
const compiler = new Compiler(code)
|
||||||
|
const vm = new VM(compiler.bytecode)
|
||||||
|
|
||||||
|
vm.set('async-fail', async () => {
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 1))
|
||||||
|
throw new Error('async error')
|
||||||
|
})
|
||||||
|
|
||||||
|
const result = await vm.run()
|
||||||
|
expect(result).toEqual({ type: 'string', value: 'async caught: async error' })
|
||||||
|
})
|
||||||
|
|
||||||
|
test('native function with arguments throwing error', async () => {
|
||||||
|
const code = `
|
||||||
|
result = try:
|
||||||
|
read-file missing.txt
|
||||||
|
catch e:
|
||||||
|
'default content'
|
||||||
|
end
|
||||||
|
|
||||||
|
result
|
||||||
|
`
|
||||||
|
|
||||||
|
const compiler = new Compiler(code)
|
||||||
|
const vm = new VM(compiler.bytecode)
|
||||||
|
|
||||||
|
vm.set('read-file', (path: string) => {
|
||||||
|
if (path === 'missing.txt') {
|
||||||
|
throw new Error('file not found')
|
||||||
|
}
|
||||||
|
return 'file contents'
|
||||||
|
})
|
||||||
|
|
||||||
|
const result = await vm.run()
|
||||||
|
expect(result).toEqual({ type: 'string', value: 'default content' })
|
||||||
|
})
|
||||||
|
|
||||||
|
test('native function error with finally block', async () => {
|
||||||
|
const code = `
|
||||||
|
cleanup-count = 0
|
||||||
|
|
||||||
|
result = try:
|
||||||
|
failing-fn
|
||||||
|
catch e:
|
||||||
|
'error handled'
|
||||||
|
finally:
|
||||||
|
cleanup-count = cleanup-count + 1
|
||||||
|
end
|
||||||
|
|
||||||
|
cleanup-count
|
||||||
|
`
|
||||||
|
|
||||||
|
const compiler = new Compiler(code)
|
||||||
|
const vm = new VM(compiler.bytecode)
|
||||||
|
|
||||||
|
vm.set('failing-fn', () => {
|
||||||
|
throw new Error('native error')
|
||||||
|
})
|
||||||
|
|
||||||
|
const result = await vm.run()
|
||||||
|
expect(result).toEqual({ type: 'number', value: 1 })
|
||||||
|
})
|
||||||
|
|
||||||
|
test('native function error without catch propagates', async () => {
|
||||||
|
const code = `
|
||||||
|
failing-fn
|
||||||
|
`
|
||||||
|
|
||||||
|
const compiler = new Compiler(code)
|
||||||
|
const vm = new VM(compiler.bytecode)
|
||||||
|
|
||||||
|
vm.set('failing-fn', () => {
|
||||||
|
throw new Error('uncaught error')
|
||||||
|
})
|
||||||
|
|
||||||
|
await expect(vm.run()).rejects.toThrow('uncaught error')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('native function in function-level catch', async () => {
|
||||||
|
const code = `
|
||||||
|
safe-read = do path:
|
||||||
|
read-file path
|
||||||
|
catch e:
|
||||||
|
'default: ' + e
|
||||||
|
end
|
||||||
|
|
||||||
|
result = safe-read missing.txt
|
||||||
|
result
|
||||||
|
`
|
||||||
|
|
||||||
|
const compiler = new Compiler(code)
|
||||||
|
const vm = new VM(compiler.bytecode)
|
||||||
|
|
||||||
|
vm.set('read-file', (path: string) => {
|
||||||
|
throw new Error('file not found: ' + path)
|
||||||
|
})
|
||||||
|
|
||||||
|
const result = await vm.run()
|
||||||
|
expect(result).toEqual({ type: 'string', value: 'default: file not found: missing.txt' })
|
||||||
|
})
|
||||||
|
|
||||||
|
test('nested native function errors', async () => {
|
||||||
|
const code = `
|
||||||
|
result = try:
|
||||||
|
try:
|
||||||
|
inner-fail
|
||||||
|
catch e:
|
||||||
|
throw 'wrapped: ' + e
|
||||||
|
end
|
||||||
|
catch e:
|
||||||
|
'outer caught: ' + e
|
||||||
|
end
|
||||||
|
|
||||||
|
result
|
||||||
|
`
|
||||||
|
|
||||||
|
const compiler = new Compiler(code)
|
||||||
|
const vm = new VM(compiler.bytecode)
|
||||||
|
|
||||||
|
vm.set('inner-fail', () => {
|
||||||
|
throw new Error('inner error')
|
||||||
|
})
|
||||||
|
|
||||||
|
const result = await vm.run()
|
||||||
|
expect(result).toEqual({ type: 'string', value: 'outer caught: wrapped: inner error' })
|
||||||
|
})
|
||||||
|
|
||||||
|
test('native function error with multiple named args', async () => {
|
||||||
|
const code = `
|
||||||
|
result = try:
|
||||||
|
process-file path=missing.txt mode=strict
|
||||||
|
catch e:
|
||||||
|
'error: ' + e
|
||||||
|
end
|
||||||
|
|
||||||
|
result
|
||||||
|
`
|
||||||
|
|
||||||
|
const compiler = new Compiler(code)
|
||||||
|
const vm = new VM(compiler.bytecode)
|
||||||
|
|
||||||
|
vm.set('process-file', (path: string, mode: string = 'lenient') => {
|
||||||
|
if (mode === 'strict' && path === 'missing.txt') {
|
||||||
|
throw new Error('strict mode: file required')
|
||||||
|
}
|
||||||
|
return 'processed'
|
||||||
|
})
|
||||||
|
|
||||||
|
const result = await vm.run()
|
||||||
|
expect(result).toEqual({ type: 'string', value: 'error: strict mode: file required' })
|
||||||
|
})
|
||||||
|
|
||||||
|
test('native function returning normally after other functions threw', async () => {
|
||||||
|
const code = `
|
||||||
|
result1 = try:
|
||||||
|
failing-fn
|
||||||
|
catch e:
|
||||||
|
'caught'
|
||||||
|
end
|
||||||
|
|
||||||
|
result2 = success-fn
|
||||||
|
|
||||||
|
result1 + ' then ' + result2
|
||||||
|
`
|
||||||
|
|
||||||
|
const compiler = new Compiler(code)
|
||||||
|
const vm = new VM(compiler.bytecode)
|
||||||
|
|
||||||
|
vm.set('failing-fn', () => {
|
||||||
|
throw new Error('error')
|
||||||
|
})
|
||||||
|
|
||||||
|
vm.set('success-fn', () => {
|
||||||
|
return 'success'
|
||||||
|
})
|
||||||
|
|
||||||
|
const result = await vm.run()
|
||||||
|
expect(result).toEqual({ type: 'string', value: 'caught then success' })
|
||||||
|
})
|
||||||
|
|
||||||
|
test('native function error message preserved', async () => {
|
||||||
|
const code = `
|
||||||
|
result = try:
|
||||||
|
throw-custom-message
|
||||||
|
catch e:
|
||||||
|
e
|
||||||
|
end
|
||||||
|
|
||||||
|
result
|
||||||
|
`
|
||||||
|
|
||||||
|
const compiler = new Compiler(code)
|
||||||
|
const vm = new VM(compiler.bytecode)
|
||||||
|
|
||||||
|
vm.set('throw-custom-message', () => {
|
||||||
|
throw new Error('This is a very specific error message with details')
|
||||||
|
})
|
||||||
|
|
||||||
|
const result = await vm.run()
|
||||||
|
expect(result).toEqual({
|
||||||
|
type: 'string',
|
||||||
|
value: 'This is a very specific error message with details'
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
test('native function throwing non-Error value', async () => {
|
||||||
|
const code = `
|
||||||
|
result = try:
|
||||||
|
throw-string
|
||||||
|
catch e:
|
||||||
|
'caught: ' + e
|
||||||
|
end
|
||||||
|
|
||||||
|
result
|
||||||
|
`
|
||||||
|
|
||||||
|
const compiler = new Compiler(code)
|
||||||
|
const vm = new VM(compiler.bytecode)
|
||||||
|
|
||||||
|
vm.set('throw-string', () => {
|
||||||
|
throw 'plain string error'
|
||||||
|
})
|
||||||
|
|
||||||
|
const result = await vm.run()
|
||||||
|
expect(result).toEqual({ type: 'string', value: 'caught: plain string error' })
|
||||||
|
})
|
||||||
|
|
||||||
|
test('multiple native function calls with mixed success/failure', async () => {
|
||||||
|
const code = `
|
||||||
|
r1 = try: success-fn catch e: 'error' end
|
||||||
|
r2 = try: failing-fn catch e: 'caught' end
|
||||||
|
r3 = try: success-fn catch e: 'error' end
|
||||||
|
|
||||||
|
results = [r1 r2 r3]
|
||||||
|
results
|
||||||
|
`
|
||||||
|
|
||||||
|
const compiler = new Compiler(code)
|
||||||
|
const vm = new VM(compiler.bytecode)
|
||||||
|
|
||||||
|
vm.set('success-fn', () => 'ok')
|
||||||
|
vm.set('failing-fn', () => {
|
||||||
|
throw new Error('failed')
|
||||||
|
})
|
||||||
|
|
||||||
|
const result = await vm.run()
|
||||||
|
expect(result.type).toBe('array')
|
||||||
|
const arr = result.value as any[]
|
||||||
|
expect(arr.length).toBe(3)
|
||||||
|
expect(arr[0]).toEqual({ type: 'string', value: 'ok' })
|
||||||
|
expect(arr[1]).toEqual({ type: 'string', value: 'caught' })
|
||||||
|
expect(arr[2]).toEqual({ type: 'string', value: 'ok' })
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
@ -78,4 +78,43 @@ describe('pipe expressions', () => {
|
||||||
div = do a b: a / b end
|
div = do a b: a / b end
|
||||||
sub 3 1 | div (sub 110 9 | sub 1) _ | div 5`).toEvaluateTo(10)
|
sub 3 1 | div (sub 110 9 | sub 1) _ | div 5`).toEvaluateTo(10)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
test('pipe with prelude functions (list.reverse and list.map)', () => {
|
||||||
|
expect(`
|
||||||
|
double = do x: x * 2 end
|
||||||
|
range 1 3 | list.reverse | list.map double
|
||||||
|
`).toEvaluateTo([6, 4, 2])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('pipe with prelude function (echo)', () => {
|
||||||
|
expect(`
|
||||||
|
get-msg = do: 'hello' end
|
||||||
|
get-msg | length
|
||||||
|
`).toEvaluateTo(5)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('string literals can be piped', () => {
|
||||||
|
expect(`'hey there' | str.to-upper`).toEvaluateTo('HEY THERE')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('number literals can be piped', () => {
|
||||||
|
expect(`42 | str.trim`).toEvaluateTo('42')
|
||||||
|
expect(`4.22 | str.trim`).toEvaluateTo('4.22')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('null literals can be piped', () => {
|
||||||
|
expect(`null | type`).toEvaluateTo('null')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('boolean literals can be piped', () => {
|
||||||
|
expect(`true | str.to-upper`).toEvaluateTo('TRUE')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('array literals can be piped', () => {
|
||||||
|
expect(`[1 2 3] | str.join '-'`).toEvaluateTo('1-2-3')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('dict literals can be piped', () => {
|
||||||
|
expect(`[a=1 b=2 c=3] | dict.values | list.sort | str.join '-'`).toEvaluateTo('1-2-3')
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
|
||||||
115
src/compiler/tests/ribbit.test.ts
Normal file
115
src/compiler/tests/ribbit.test.ts
Normal file
|
|
@ -0,0 +1,115 @@
|
||||||
|
import { expect, describe, test, beforeEach } from 'bun:test'
|
||||||
|
|
||||||
|
const buffer: string[] = []
|
||||||
|
|
||||||
|
const ribbitGlobals = {
|
||||||
|
ribbit: async (cb: Function) => {
|
||||||
|
await cb()
|
||||||
|
return buffer.join("\n")
|
||||||
|
},
|
||||||
|
tag: async (tagFn: Function, atDefaults = {}) => {
|
||||||
|
return (atNamed = {}, ...args: any[]) => tagFn(Object.assign({}, atDefaults, atNamed), ...args)
|
||||||
|
},
|
||||||
|
head: (atNamed: {}, ...args: any[]) => tag('head', atNamed, ...args),
|
||||||
|
title: (atNamed: {}, ...args: any[]) => tag('title', atNamed, ...args),
|
||||||
|
meta: (atNamed: {}, ...args: any[]) => tag('meta', atNamed, ...args),
|
||||||
|
p: (atNamed: {}, ...args: any[]) => tag('p', atNamed, ...args),
|
||||||
|
h1: (atNamed: {}, ...args: any[]) => tag('h1', atNamed, ...args),
|
||||||
|
h2: (atNamed: {}, ...args: any[]) => tag('h2', atNamed, ...args),
|
||||||
|
b: (atNamed: {}, ...args: any[]) => tag('b', atNamed, ...args),
|
||||||
|
ul: (atNamed: {}, ...args: any[]) => tag('ul', atNamed, ...args),
|
||||||
|
li: (atNamed: {}, ...args: any[]) => tag('li', atNamed, ...args),
|
||||||
|
nospace: () => NOSPACE_TOKEN,
|
||||||
|
echo: (...args: any[]) => console.log(...args)
|
||||||
|
}
|
||||||
|
|
||||||
|
function raw(fn: Function) { (fn as any).raw = true }
|
||||||
|
|
||||||
|
const tagBlock = async (tagName: string, props = {}, fn: Function) => {
|
||||||
|
const attrs = Object.entries(props).map(([key, value]) => `${key}="${value}"`)
|
||||||
|
const space = attrs.length ? ' ' : ''
|
||||||
|
|
||||||
|
buffer.push(`<${tagName}${space}${attrs.join(' ')}>`)
|
||||||
|
await fn()
|
||||||
|
buffer.push(`</${tagName}>`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const tagCall = (tagName: string, atNamed = {}, ...args: any[]) => {
|
||||||
|
const attrs = Object.entries(atNamed).map(([key, value]) => `${key}="${value}"`)
|
||||||
|
const space = attrs.length ? ' ' : ''
|
||||||
|
const children = args
|
||||||
|
.reverse()
|
||||||
|
.map(a => a === TAG_TOKEN ? buffer.pop() : a)
|
||||||
|
.reverse().join(' ')
|
||||||
|
.replaceAll(` ${NOSPACE_TOKEN} `, '')
|
||||||
|
|
||||||
|
if (SELF_CLOSING.includes(tagName))
|
||||||
|
buffer.push(`<${tagName}${space}${attrs.join(' ')} />`)
|
||||||
|
else
|
||||||
|
buffer.push(`<${tagName}${space}${attrs.join(' ')}>${children}</${tagName}>`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const tag = async (tagName: string, atNamed = {}, ...args: any[]) => {
|
||||||
|
if (typeof args[0] === 'function') {
|
||||||
|
await tagBlock(tagName, atNamed, args[0])
|
||||||
|
} else {
|
||||||
|
tagCall(tagName, atNamed, ...args)
|
||||||
|
return TAG_TOKEN
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const NOSPACE_TOKEN = '!!ribbit-nospace!!'
|
||||||
|
const TAG_TOKEN = '!!ribbit-tag!!'
|
||||||
|
const SELF_CLOSING = ["area", "base", "br", "col", "embed", "hr", "img", "input", "link", "meta", "param", "source", "track", "wbr"]
|
||||||
|
|
||||||
|
describe('ribbit', () => {
|
||||||
|
beforeEach(() => buffer.length = 0)
|
||||||
|
|
||||||
|
test('head tag', () => {
|
||||||
|
expect(`
|
||||||
|
ribbit:
|
||||||
|
head:
|
||||||
|
title What up
|
||||||
|
meta charset=UTF-8
|
||||||
|
meta name=viewport content='width=device-width, initial-scale=1, viewport-fit=cover'
|
||||||
|
end
|
||||||
|
end
|
||||||
|
`).toEvaluateTo(`<head>
|
||||||
|
<title>What up</title>
|
||||||
|
<meta charset="UTF-8" />
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1, viewport-fit=cover" />
|
||||||
|
</head>`, ribbitGlobals)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('custom tags', () => {
|
||||||
|
expect(`
|
||||||
|
list = tag ul class='list'
|
||||||
|
ribbit:
|
||||||
|
list:
|
||||||
|
li border-bottom='1px solid black' one
|
||||||
|
li two
|
||||||
|
li three
|
||||||
|
end
|
||||||
|
end`).toEvaluateTo(`<ul class="list">
|
||||||
|
<li border-bottom="1px solid black">one</li>
|
||||||
|
<li>two</li>
|
||||||
|
<li>three</li>
|
||||||
|
</ul>`, ribbitGlobals)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('inline expressions', () => {
|
||||||
|
expect(`
|
||||||
|
ribbit:
|
||||||
|
p class=container:
|
||||||
|
h1 class=bright style='font-family: helvetica' Heya
|
||||||
|
h2 man that is (b wild) (nospace) !
|
||||||
|
p Double the fun.
|
||||||
|
end
|
||||||
|
end`).toEvaluateTo(
|
||||||
|
`<p class="container">
|
||||||
|
<h1 class="bright" style="font-family: helvetica">Heya</h1>
|
||||||
|
<h2>man that is <b>wild</b>!</h2>
|
||||||
|
<p>Double the fun.</p>
|
||||||
|
</p>`, ribbitGlobals)
|
||||||
|
})
|
||||||
|
})
|
||||||
48
src/compiler/tests/while.test.ts
Normal file
48
src/compiler/tests/while.test.ts
Normal file
|
|
@ -0,0 +1,48 @@
|
||||||
|
import { describe } from 'bun:test'
|
||||||
|
import { expect, test } from 'bun:test'
|
||||||
|
|
||||||
|
describe('while', () => {
|
||||||
|
test('basic variable', () => {
|
||||||
|
expect(`
|
||||||
|
a = true
|
||||||
|
b = ''
|
||||||
|
while a:
|
||||||
|
a = false
|
||||||
|
b = done
|
||||||
|
end
|
||||||
|
b`)
|
||||||
|
.toEvaluateTo('done')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('basic expression', () => {
|
||||||
|
expect(`
|
||||||
|
a = 0
|
||||||
|
while a < 10:
|
||||||
|
a += 1
|
||||||
|
end
|
||||||
|
a`)
|
||||||
|
.toEvaluateTo(10)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('compound expression', () => {
|
||||||
|
expect(`
|
||||||
|
a = 1
|
||||||
|
b = 0
|
||||||
|
while a > 0 and b < 100:
|
||||||
|
b += 1
|
||||||
|
end
|
||||||
|
b`)
|
||||||
|
.toEvaluateTo(100)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('returns value', () => {
|
||||||
|
expect(`
|
||||||
|
a = 0
|
||||||
|
ret = while a < 10:
|
||||||
|
a += 1
|
||||||
|
done
|
||||||
|
end
|
||||||
|
ret`)
|
||||||
|
.toEvaluateTo('done')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
@ -1,9 +1,9 @@
|
||||||
import { CompilerError } from '#compiler/compilerError.ts'
|
import { CompilerError } from '#compiler/compilerError.ts'
|
||||||
import * as terms from '#parser/shrimp.terms'
|
import type { SyntaxNode, Tree } from '#parser/node'
|
||||||
import type { SyntaxNode, Tree } from '@lezer/common'
|
|
||||||
|
|
||||||
export const checkTreeForErrors = (tree: Tree): CompilerError[] => {
|
export const checkTreeForErrors = (tree: Tree): CompilerError[] => {
|
||||||
const errors: CompilerError[] = []
|
const errors: CompilerError[] = []
|
||||||
|
|
||||||
tree.iterate({
|
tree.iterate({
|
||||||
enter: (node) => {
|
enter: (node) => {
|
||||||
if (node.type.isError) {
|
if (node.type.isError) {
|
||||||
|
|
@ -22,7 +22,8 @@ export const getAllChildren = (node: SyntaxNode): SyntaxNode[] => {
|
||||||
children.push(child)
|
children.push(child)
|
||||||
child = child.nextSibling
|
child = child.nextSibling
|
||||||
}
|
}
|
||||||
return children
|
|
||||||
|
return children.filter((n) => !n.type.is('Comment'))
|
||||||
}
|
}
|
||||||
|
|
||||||
export const getBinaryParts = (node: SyntaxNode) => {
|
export const getBinaryParts = (node: SyntaxNode) => {
|
||||||
|
|
@ -49,14 +50,15 @@ export const getAssignmentParts = (node: SyntaxNode) => {
|
||||||
}
|
}
|
||||||
|
|
||||||
// array destructuring
|
// array destructuring
|
||||||
if (left && left.type.id === terms.Array) {
|
if (left && left.type.is('Array')) {
|
||||||
const identifiers = getAllChildren(left).filter(child => child.type.id === terms.Identifier)
|
const identifiers = getAllChildren(left).filter((child) => child.type.is('Identifier'))
|
||||||
return { arrayPattern: identifiers, right }
|
return { arrayPattern: identifiers, right }
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!left || left.type.id !== terms.AssignableIdentifier) {
|
if (!left || !left.type.is('AssignableIdentifier')) {
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
`Assign left child must be an AssignableIdentifier or Array, got ${left ? left.type.name : 'none'}`,
|
`Assign left child must be an AssignableIdentifier or Array, got ${left ? left.type.name : 'none'
|
||||||
|
}`,
|
||||||
node.from,
|
node.from,
|
||||||
node.to
|
node.to
|
||||||
)
|
)
|
||||||
|
|
@ -65,22 +67,44 @@ export const getAssignmentParts = (node: SyntaxNode) => {
|
||||||
return { identifier: left, right }
|
return { identifier: left, right }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const getCompoundAssignmentParts = (node: SyntaxNode) => {
|
||||||
|
const children = getAllChildren(node)
|
||||||
|
const [left, operator, right] = children
|
||||||
|
|
||||||
|
if (!left || !left.type.is('AssignableIdentifier')) {
|
||||||
|
throw new CompilerError(
|
||||||
|
`CompoundAssign left child must be an AssignableIdentifier, got ${left ? left.type.name : 'none'
|
||||||
|
}`,
|
||||||
|
node.from,
|
||||||
|
node.to
|
||||||
|
)
|
||||||
|
} else if (!operator || !right) {
|
||||||
|
throw new CompilerError(
|
||||||
|
`CompoundAssign expected 3 children, got ${children.length}`,
|
||||||
|
node.from,
|
||||||
|
node.to
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return { identifier: left, operator, right }
|
||||||
|
}
|
||||||
|
|
||||||
export const getFunctionDefParts = (node: SyntaxNode, input: string) => {
|
export const getFunctionDefParts = (node: SyntaxNode, input: string) => {
|
||||||
const children = getAllChildren(node)
|
const children = getAllChildren(node)
|
||||||
const [fnKeyword, paramsNode, colon, ...bodyNodes] = children
|
const [fnKeyword, paramsNode, colon, ...rest] = children
|
||||||
|
|
||||||
if (!fnKeyword || !paramsNode || !colon || !bodyNodes) {
|
if (!fnKeyword || !paramsNode || !colon || !rest) {
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
`FunctionDef expected 5 children, got ${children.length}`,
|
`FunctionDef expected at least 4 children, got ${children.length}`,
|
||||||
node.from,
|
node.from,
|
||||||
node.to
|
node.to
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const paramNames = getAllChildren(paramsNode).map((param) => {
|
const paramNames = getAllChildren(paramsNode).map((param) => {
|
||||||
if (param.type.id !== terms.Identifier) {
|
if (!param.type.is('Identifier') && !param.type.is('NamedParam')) {
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
`FunctionDef params must be Identifier, got ${param.type.name}`,
|
`FunctionDef params must be Identifier or NamedParam, got ${param.type.name}`,
|
||||||
param.from,
|
param.from,
|
||||||
param.to
|
param.to
|
||||||
)
|
)
|
||||||
|
|
@ -88,8 +112,48 @@ export const getFunctionDefParts = (node: SyntaxNode, input: string) => {
|
||||||
return input.slice(param.from, param.to)
|
return input.slice(param.from, param.to)
|
||||||
})
|
})
|
||||||
|
|
||||||
const bodyWithoutEnd = bodyNodes.slice(0, -1)
|
// Separate body nodes from catch/finally/end
|
||||||
return { paramNames, bodyNodes: bodyWithoutEnd }
|
const bodyNodes: SyntaxNode[] = []
|
||||||
|
let catchExpr: SyntaxNode | undefined
|
||||||
|
let catchVariable: string | undefined
|
||||||
|
let catchBody: SyntaxNode | undefined
|
||||||
|
let finallyExpr: SyntaxNode | undefined
|
||||||
|
let finallyBody: SyntaxNode | undefined
|
||||||
|
|
||||||
|
for (const child of rest) {
|
||||||
|
if (child.type.is('CatchExpr')) {
|
||||||
|
catchExpr = child
|
||||||
|
const catchChildren = getAllChildren(child)
|
||||||
|
const [_catchKeyword, identifierNode, _colon, body] = catchChildren
|
||||||
|
if (!identifierNode || !body) {
|
||||||
|
throw new CompilerError(
|
||||||
|
`CatchExpr expected identifier and body, got ${catchChildren.length} children`,
|
||||||
|
child.from,
|
||||||
|
child.to
|
||||||
|
)
|
||||||
|
}
|
||||||
|
catchVariable = input.slice(identifierNode.from, identifierNode.to)
|
||||||
|
catchBody = body
|
||||||
|
} else if (child.type.is('FinallyExpr')) {
|
||||||
|
finallyExpr = child
|
||||||
|
const finallyChildren = getAllChildren(child)
|
||||||
|
const [_finallyKeyword, _colon, body] = finallyChildren
|
||||||
|
if (!body) {
|
||||||
|
throw new CompilerError(
|
||||||
|
`FinallyExpr expected body, got ${finallyChildren.length} children`,
|
||||||
|
child.from,
|
||||||
|
child.to
|
||||||
|
)
|
||||||
|
}
|
||||||
|
finallyBody = body
|
||||||
|
} else if (child.type.name === 'keyword' && input.slice(child.from, child.to) === 'end') {
|
||||||
|
// Skip the end keyword
|
||||||
|
} else {
|
||||||
|
bodyNodes.push(child)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { paramNames, bodyNodes, catchVariable, catchBody, finallyBody }
|
||||||
}
|
}
|
||||||
|
|
||||||
export const getFunctionCallParts = (node: SyntaxNode, input: string) => {
|
export const getFunctionCallParts = (node: SyntaxNode, input: string) => {
|
||||||
|
|
@ -99,9 +163,9 @@ export const getFunctionCallParts = (node: SyntaxNode, input: string) => {
|
||||||
throw new CompilerError(`FunctionCall expected at least 1 child, got 0`, node.from, node.to)
|
throw new CompilerError(`FunctionCall expected at least 1 child, got 0`, node.from, node.to)
|
||||||
}
|
}
|
||||||
|
|
||||||
const namedArgs = args.filter((arg) => arg.type.id === terms.NamedArg)
|
const namedArgs = args.filter((arg) => arg.type.is('NamedArg'))
|
||||||
const positionalArgs = args
|
const positionalArgs = args
|
||||||
.filter((arg) => arg.type.id === terms.PositionalArg)
|
.filter((arg) => arg.type.is('PositionalArg'))
|
||||||
.map((arg) => {
|
.map((arg) => {
|
||||||
const child = arg.firstChild
|
const child = arg.firstChild
|
||||||
if (!child) throw new CompilerError(`PositionalArg has no child`, arg.from, arg.to)
|
if (!child) throw new CompilerError(`PositionalArg has no child`, arg.from, arg.to)
|
||||||
|
|
@ -142,14 +206,14 @@ export const getIfExprParts = (node: SyntaxNode, input: string) => {
|
||||||
rest.forEach((child) => {
|
rest.forEach((child) => {
|
||||||
const parts = getAllChildren(child)
|
const parts = getAllChildren(child)
|
||||||
|
|
||||||
if (child.type.id === terms.ElseExpr) {
|
if (child.type.is('ElseExpr')) {
|
||||||
if (parts.length !== 3) {
|
if (parts.length !== 3) {
|
||||||
const message = `ElseExpr expected 1 child, got ${parts.length}`
|
const message = `ElseExpr expected 1 child, got ${parts.length}`
|
||||||
throw new CompilerError(message, child.from, child.to)
|
throw new CompilerError(message, child.from, child.to)
|
||||||
}
|
}
|
||||||
elseThenBlock = parts.at(-1)
|
elseThenBlock = parts.at(-1)
|
||||||
} else if (child.type.id === terms.ElseIfExpr) {
|
} else if (child.type.is('ElseIfExpr')) {
|
||||||
const [_keyword, conditional, _colon, thenBlock] = parts
|
const [_else, _if, conditional, _colon, thenBlock] = parts
|
||||||
if (!conditional || !thenBlock) {
|
if (!conditional || !thenBlock) {
|
||||||
const names = parts.map((p) => p.type.name).join(', ')
|
const names = parts.map((p) => p.type.name).join(', ')
|
||||||
const message = `ElseIfExpr expected conditional and thenBlock, got ${names}`
|
const message = `ElseIfExpr expected conditional and thenBlock, got ${names}`
|
||||||
|
|
@ -183,18 +247,21 @@ export const getStringParts = (node: SyntaxNode, input: string) => {
|
||||||
// The text is just between the quotes
|
// The text is just between the quotes
|
||||||
const parts = children.filter((child) => {
|
const parts = children.filter((child) => {
|
||||||
return (
|
return (
|
||||||
child.type.id === terms.StringFragment ||
|
child.type.is('StringFragment') ||
|
||||||
child.type.id === terms.Interpolation ||
|
child.type.is('Interpolation') ||
|
||||||
child.type.id === terms.EscapeSeq
|
child.type.is('EscapeSeq') ||
|
||||||
|
child.type.is('CurlyString')
|
||||||
|
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
// Validate each part is the expected type
|
// Validate each part is the expected type
|
||||||
parts.forEach((part) => {
|
parts.forEach((part) => {
|
||||||
if (
|
if (
|
||||||
part.type.id !== terms.StringFragment &&
|
part.type.is('StringFragment') &&
|
||||||
part.type.id !== terms.Interpolation &&
|
part.type.is('Interpolation') &&
|
||||||
part.type.id !== terms.EscapeSeq
|
part.type.is('EscapeSeq') &&
|
||||||
|
part.type.is('CurlyString')
|
||||||
) {
|
) {
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
`String child must be StringFragment, Interpolation, or EscapeSeq, got ${part.type.name}`,
|
`String child must be StringFragment, Interpolation, or EscapeSeq, got ${part.type.name}`,
|
||||||
|
|
@ -204,7 +271,12 @@ export const getStringParts = (node: SyntaxNode, input: string) => {
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
return { parts, hasInterpolation: parts.length > 0 }
|
// hasInterpolation means the string has interpolation ($var) or escape sequences (\n)
|
||||||
|
// A simple string like 'hello' has one StringFragment but no interpolation
|
||||||
|
const hasInterpolation = parts.some(
|
||||||
|
(p) => p.type.is('Interpolation') || p.type.is('EscapeSeq')
|
||||||
|
)
|
||||||
|
return { parts, hasInterpolation }
|
||||||
}
|
}
|
||||||
|
|
||||||
export const getDotGetParts = (node: SyntaxNode, input: string) => {
|
export const getDotGetParts = (node: SyntaxNode, input: string) => {
|
||||||
|
|
@ -219,7 +291,7 @@ export const getDotGetParts = (node: SyntaxNode, input: string) => {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (object.type.id !== terms.IdentifierBeforeDot) {
|
if (!object.type.is('IdentifierBeforeDot')) {
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
`DotGet object must be an IdentifierBeforeDot, got ${object.type.name}`,
|
`DotGet object must be an IdentifierBeforeDot, got ${object.type.name}`,
|
||||||
object.from,
|
object.from,
|
||||||
|
|
@ -227,9 +299,9 @@ export const getDotGetParts = (node: SyntaxNode, input: string) => {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (![terms.Identifier, terms.Number, terms.ParenExpr].includes(property.type.id)) {
|
if (!['Identifier', 'Number', 'ParenExpr', 'DotGet'].includes(property.type.name)) {
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
`DotGet property must be an Identifier or Number, got ${property.type.name}`,
|
`DotGet property must be an Identifier, Number, ParenExpr, or DotGet, got ${property.type.name}`,
|
||||||
property.from,
|
property.from,
|
||||||
property.to
|
property.to
|
||||||
)
|
)
|
||||||
|
|
@ -237,5 +309,64 @@ export const getDotGetParts = (node: SyntaxNode, input: string) => {
|
||||||
|
|
||||||
const objectName = input.slice(object.from, object.to)
|
const objectName = input.slice(object.from, object.to)
|
||||||
|
|
||||||
return { objectName, property }
|
return { object, objectName, property }
|
||||||
|
}
|
||||||
|
|
||||||
|
export const getTryExprParts = (node: SyntaxNode, input: string) => {
|
||||||
|
const children = getAllChildren(node)
|
||||||
|
|
||||||
|
// First child is always 'try' keyword, second is colon, third is Block
|
||||||
|
const [tryKeyword, _colon, tryBlock, ...rest] = children
|
||||||
|
|
||||||
|
if (!tryKeyword || !tryBlock) {
|
||||||
|
throw new CompilerError(
|
||||||
|
`TryExpr expected at least 3 children, got ${children.length}`,
|
||||||
|
node.from,
|
||||||
|
node.to
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
let catchExpr: SyntaxNode | undefined
|
||||||
|
let catchVariable: string | undefined
|
||||||
|
let catchBody: SyntaxNode | undefined
|
||||||
|
let finallyExpr: SyntaxNode | undefined
|
||||||
|
let finallyBody: SyntaxNode | undefined
|
||||||
|
|
||||||
|
rest.forEach((child) => {
|
||||||
|
if (child.type.is('CatchExpr')) {
|
||||||
|
catchExpr = child
|
||||||
|
const catchChildren = getAllChildren(child)
|
||||||
|
const [_catchKeyword, identifierNode, _colon, body] = catchChildren
|
||||||
|
if (!identifierNode || !body) {
|
||||||
|
throw new CompilerError(
|
||||||
|
`CatchExpr expected identifier and body, got ${catchChildren.length} children`,
|
||||||
|
child.from,
|
||||||
|
child.to
|
||||||
|
)
|
||||||
|
}
|
||||||
|
catchVariable = input.slice(identifierNode.from, identifierNode.to)
|
||||||
|
catchBody = body
|
||||||
|
} else if (child.type.is('FinallyExpr')) {
|
||||||
|
finallyExpr = child
|
||||||
|
const finallyChildren = getAllChildren(child)
|
||||||
|
const [_finallyKeyword, _colon, body] = finallyChildren
|
||||||
|
if (!body) {
|
||||||
|
throw new CompilerError(
|
||||||
|
`FinallyExpr expected body, got ${finallyChildren.length} children`,
|
||||||
|
child.from,
|
||||||
|
child.to
|
||||||
|
)
|
||||||
|
}
|
||||||
|
finallyBody = body
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
return {
|
||||||
|
tryBlock,
|
||||||
|
catchExpr,
|
||||||
|
catchVariable,
|
||||||
|
catchBody,
|
||||||
|
finallyExpr,
|
||||||
|
finallyBody,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -245,7 +245,7 @@ const commandShapes: CommandShape[] = [
|
||||||
] as const
|
] as const
|
||||||
|
|
||||||
let commandSource = () => commandShapes
|
let commandSource = () => commandShapes
|
||||||
export const setCommandSource = (do: () => CommandShape[]) => {
|
export const setCommandSource = (fn: () => CommandShape[]) => {
|
||||||
commandSource = fn
|
commandSource = fn
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
112
src/index.ts
Normal file
112
src/index.ts
Normal file
|
|
@ -0,0 +1,112 @@
|
||||||
|
import { readFileSync } from 'fs'
|
||||||
|
import { VM, fromValue, toValue, isValue, type Bytecode } from 'reefvm'
|
||||||
|
import { Compiler } from '#compiler/compiler'
|
||||||
|
import { parse } from '#parser/parser2'
|
||||||
|
import { Tree } from '#parser/node'
|
||||||
|
import { globals as parserGlobals, setGlobals as setParserGlobals } from '#parser/parser2'
|
||||||
|
import { globals as prelude } from '#prelude'
|
||||||
|
|
||||||
|
export { Compiler } from '#compiler/compiler'
|
||||||
|
export { parse } from '#parser/parser2'
|
||||||
|
export { type SyntaxNode, Tree } from '#parser/node'
|
||||||
|
export { globals as prelude } from '#prelude'
|
||||||
|
export { type Value, type Bytecode } from 'reefvm'
|
||||||
|
export { toValue, fromValue, isValue, Scope, VM, bytecodeToString } from 'reefvm'
|
||||||
|
|
||||||
|
export class Shrimp {
|
||||||
|
vm: VM
|
||||||
|
private globals?: Record<string, any>
|
||||||
|
|
||||||
|
constructor(globals?: Record<string, any>) {
|
||||||
|
const emptyBytecode = { instructions: [], constants: [], labels: new Map() }
|
||||||
|
this.vm = new VM(emptyBytecode, Object.assign({}, prelude, globals ?? {}))
|
||||||
|
this.globals = globals
|
||||||
|
}
|
||||||
|
|
||||||
|
get(name: string): any {
|
||||||
|
const value = this.vm.scope.get(name)
|
||||||
|
return value ? fromValue(value, this.vm) : null
|
||||||
|
}
|
||||||
|
|
||||||
|
set(name: string, value: any) {
|
||||||
|
this.vm.scope.set(name, toValue(value, this.vm))
|
||||||
|
}
|
||||||
|
|
||||||
|
has(name: string): boolean {
|
||||||
|
return this.vm.scope.has(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
async call(name: string, ...args: any[]): Promise<any> {
|
||||||
|
const result = await this.vm.call(name, ...args)
|
||||||
|
return isValue(result) ? fromValue(result, this.vm) : result
|
||||||
|
}
|
||||||
|
|
||||||
|
parse(code: string): Tree {
|
||||||
|
return parseCode(code, this.globals)
|
||||||
|
}
|
||||||
|
|
||||||
|
compile(code: string): Bytecode {
|
||||||
|
return compileCode(code, this.globals)
|
||||||
|
}
|
||||||
|
|
||||||
|
async run(code: string | Bytecode, locals?: Record<string, any>): Promise<any> {
|
||||||
|
let bytecode
|
||||||
|
|
||||||
|
if (typeof code === 'string') {
|
||||||
|
const compiler = new Compiler(code, Object.keys(Object.assign({}, prelude, this.globals ?? {}, locals ?? {})))
|
||||||
|
bytecode = compiler.bytecode
|
||||||
|
} else {
|
||||||
|
bytecode = code
|
||||||
|
}
|
||||||
|
|
||||||
|
if (locals) this.vm.pushScope(locals)
|
||||||
|
this.vm.appendBytecode(bytecode)
|
||||||
|
await this.vm.continue()
|
||||||
|
if (locals) this.vm.popScope()
|
||||||
|
|
||||||
|
return this.vm.stack.length ? fromValue(this.vm.stack.at(-1)!, this.vm) : null
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function runFile(path: string, globals?: Record<string, any>): Promise<any> {
|
||||||
|
const code = readFileSync(path, 'utf-8')
|
||||||
|
return await runCode(code, globals)
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function runCode(code: string, globals?: Record<string, any>): Promise<any> {
|
||||||
|
return await runBytecode(compileCode(code, globals), globals)
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function runBytecode(bytecode: Bytecode, globals?: Record<string, any>): Promise<any> {
|
||||||
|
const vm = new VM(bytecode, Object.assign({}, prelude, globals))
|
||||||
|
await vm.run()
|
||||||
|
return vm.stack.length ? fromValue(vm.stack[vm.stack.length - 1]!, vm) : null
|
||||||
|
}
|
||||||
|
|
||||||
|
export function compileFile(path: string, globals?: Record<string, any>): Bytecode {
|
||||||
|
const code = readFileSync(path, 'utf-8')
|
||||||
|
return compileCode(code, globals)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function compileCode(code: string, globals?: Record<string, any>): Bytecode {
|
||||||
|
const globalNames = [...Object.keys(prelude), ...(globals ? Object.keys(globals) : [])]
|
||||||
|
const compiler = new Compiler(code, globalNames)
|
||||||
|
return compiler.bytecode
|
||||||
|
}
|
||||||
|
|
||||||
|
export function parseFile(path: string, globals?: Record<string, any>): Tree {
|
||||||
|
const code = readFileSync(path, 'utf-8')
|
||||||
|
return parseCode(code, globals)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function parseCode(code: string, globals?: Record<string, any>): Tree {
|
||||||
|
const oldGlobals = [...parserGlobals]
|
||||||
|
const globalNames = [...Object.keys(prelude), ...(globals ? Object.keys(globals) : [])]
|
||||||
|
|
||||||
|
setParserGlobals(globalNames)
|
||||||
|
const result = parse(code)
|
||||||
|
setParserGlobals(oldGlobals)
|
||||||
|
|
||||||
|
return new Tree(result)
|
||||||
|
}
|
||||||
62
src/parser/curlyTokenizer.ts
Normal file
62
src/parser/curlyTokenizer.ts
Normal file
|
|
@ -0,0 +1,62 @@
|
||||||
|
import { parse } from '#parser/parser2'
|
||||||
|
import type { SyntaxNode } from '#parser/node'
|
||||||
|
import { isIdentStart, isIdentChar } from './tokenizer2'
|
||||||
|
|
||||||
|
// Turns a { curly string } into strings and nodes for interpolation
|
||||||
|
export const tokenizeCurlyString = (value: string): (string | [string, SyntaxNode])[] => {
|
||||||
|
let pos = 1
|
||||||
|
let start = 1
|
||||||
|
let char = value[pos]
|
||||||
|
const tokens: (string | [string, SyntaxNode])[] = []
|
||||||
|
|
||||||
|
while (pos < value.length) {
|
||||||
|
if (char === '$') {
|
||||||
|
// escaped \$
|
||||||
|
if (value[pos - 1] === '\\' && value[pos - 2] !== '\\') {
|
||||||
|
tokens.push(value.slice(start, pos - 1))
|
||||||
|
start = pos
|
||||||
|
char = value[++pos]
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
tokens.push(value.slice(start, pos))
|
||||||
|
start = pos
|
||||||
|
|
||||||
|
if (value[pos + 1] === '(') {
|
||||||
|
pos++ // slip opening '('
|
||||||
|
|
||||||
|
char = value[++pos]
|
||||||
|
if (!char) break
|
||||||
|
|
||||||
|
let depth = 0
|
||||||
|
while (char) {
|
||||||
|
if (char === '(') depth++
|
||||||
|
if (char === ')') depth--
|
||||||
|
if (depth < 0) break
|
||||||
|
char = value[++pos]
|
||||||
|
}
|
||||||
|
|
||||||
|
const input = value.slice(start + 2, pos) // skip '$('
|
||||||
|
tokens.push([input, parse(input)])
|
||||||
|
start = pos + 1 // start after ')'
|
||||||
|
} else {
|
||||||
|
char = value[++pos]
|
||||||
|
if (!char) break
|
||||||
|
if (!isIdentStart(char.charCodeAt(0))) break
|
||||||
|
|
||||||
|
while (char && isIdentChar(char.charCodeAt(0)))
|
||||||
|
char = value[++pos]
|
||||||
|
|
||||||
|
const input = value.slice(start + 1, pos) // skip '$'
|
||||||
|
tokens.push([input, parse(input)])
|
||||||
|
start = pos-- // backtrack and start over
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
char = value[++pos]
|
||||||
|
}
|
||||||
|
|
||||||
|
tokens.push(value.slice(start, pos - 1))
|
||||||
|
|
||||||
|
return tokens
|
||||||
|
}
|
||||||
|
|
@ -5,6 +5,7 @@ export const highlighting = styleTags({
|
||||||
Number: tags.number,
|
Number: tags.number,
|
||||||
String: tags.string,
|
String: tags.string,
|
||||||
Boolean: tags.bool,
|
Boolean: tags.bool,
|
||||||
|
Do: tags.keyword,
|
||||||
keyword: tags.keyword,
|
keyword: tags.keyword,
|
||||||
end: tags.keyword,
|
end: tags.keyword,
|
||||||
':': tags.keyword,
|
':': tags.keyword,
|
||||||
|
|
@ -15,4 +16,5 @@ export const highlighting = styleTags({
|
||||||
Command: tags.function(tags.variableName),
|
Command: tags.function(tags.variableName),
|
||||||
'Params/Identifier': tags.definition(tags.variableName),
|
'Params/Identifier': tags.definition(tags.variableName),
|
||||||
Paren: tags.paren,
|
Paren: tags.paren,
|
||||||
|
Comment: tags.comment,
|
||||||
})
|
})
|
||||||
|
|
|
||||||
270
src/parser/node.ts
Normal file
270
src/parser/node.ts
Normal file
|
|
@ -0,0 +1,270 @@
|
||||||
|
import { type Token, TokenType } from './tokenizer2'
|
||||||
|
|
||||||
|
export type NodeType =
|
||||||
|
| 'Program'
|
||||||
|
| 'Block'
|
||||||
|
|
||||||
|
| 'FunctionCall'
|
||||||
|
| 'FunctionCallOrIdentifier'
|
||||||
|
| 'FunctionCallWithBlock'
|
||||||
|
| 'PositionalArg'
|
||||||
|
| 'NamedArg'
|
||||||
|
| 'NamedArgPrefix'
|
||||||
|
|
||||||
|
| 'FunctionDef'
|
||||||
|
| 'Params'
|
||||||
|
| 'NamedParam'
|
||||||
|
|
||||||
|
| 'Null'
|
||||||
|
| 'Boolean'
|
||||||
|
| 'Number'
|
||||||
|
| 'String'
|
||||||
|
| 'StringFragment'
|
||||||
|
| 'CurlyString'
|
||||||
|
| 'DoubleQuote'
|
||||||
|
| 'EscapeSeq'
|
||||||
|
| 'Interpolation'
|
||||||
|
| 'Regex'
|
||||||
|
| 'Identifier'
|
||||||
|
| 'AssignableIdentifier'
|
||||||
|
| 'IdentifierBeforeDot'
|
||||||
|
| 'Word'
|
||||||
|
| 'Array'
|
||||||
|
| 'Dict'
|
||||||
|
| 'Comment'
|
||||||
|
|
||||||
|
| 'BinOp'
|
||||||
|
| 'ConditionalOp'
|
||||||
|
| 'ParenExpr'
|
||||||
|
| 'Assign'
|
||||||
|
| 'CompoundAssign'
|
||||||
|
| 'DotGet'
|
||||||
|
| 'PipeExpr'
|
||||||
|
|
||||||
|
| 'IfExpr'
|
||||||
|
| 'ElseIfExpr'
|
||||||
|
| 'ElseExpr'
|
||||||
|
| 'WhileExpr'
|
||||||
|
| 'TryExpr'
|
||||||
|
| 'CatchExpr'
|
||||||
|
| 'FinallyExpr'
|
||||||
|
| 'Throw'
|
||||||
|
|
||||||
|
| 'Not'
|
||||||
|
| 'Eq'
|
||||||
|
| 'Modulo'
|
||||||
|
| 'Plus'
|
||||||
|
| 'Star'
|
||||||
|
| 'Slash'
|
||||||
|
|
||||||
|
| 'Import'
|
||||||
|
| 'Do'
|
||||||
|
| 'Underscore'
|
||||||
|
| 'colon'
|
||||||
|
| 'keyword'
|
||||||
|
| 'operator'
|
||||||
|
|
||||||
|
// TODO: remove this when we switch from lezer
|
||||||
|
export const operators: Record<string, any> = {
|
||||||
|
// Logic
|
||||||
|
'and': 'And',
|
||||||
|
'or': 'Or',
|
||||||
|
|
||||||
|
// Bitwise
|
||||||
|
'band': 'Band',
|
||||||
|
'bor': 'Bor',
|
||||||
|
'bxor': 'Bxor',
|
||||||
|
'>>>': 'Ushr',
|
||||||
|
'>>': 'Shr',
|
||||||
|
'<<': 'Shl',
|
||||||
|
|
||||||
|
// Comparison
|
||||||
|
'>=': 'Gte',
|
||||||
|
'<=': 'Lte',
|
||||||
|
'>': 'Gt',
|
||||||
|
'<': 'Lt',
|
||||||
|
'!=': 'Neq',
|
||||||
|
'==': 'EqEq',
|
||||||
|
|
||||||
|
// Compound assignment operators
|
||||||
|
'??=': 'NullishEq',
|
||||||
|
'+=': 'PlusEq',
|
||||||
|
'-=': 'MinusEq',
|
||||||
|
'*=': 'StarEq',
|
||||||
|
'/=': 'SlashEq',
|
||||||
|
'%=': 'ModuloEq',
|
||||||
|
|
||||||
|
// Nullish coalescing
|
||||||
|
'??': 'NullishCoalesce',
|
||||||
|
|
||||||
|
// Math
|
||||||
|
'*': 'Star',
|
||||||
|
'**': 'StarStar',
|
||||||
|
'=': 'Eq',
|
||||||
|
'/': 'Slash',
|
||||||
|
'+': 'Plus',
|
||||||
|
'-': 'Minus',
|
||||||
|
'%': 'Modulo',
|
||||||
|
|
||||||
|
// Dotget
|
||||||
|
'.': 'Dot',
|
||||||
|
|
||||||
|
// Pipe
|
||||||
|
'|': 'operator',
|
||||||
|
}
|
||||||
|
|
||||||
|
export class Tree {
|
||||||
|
constructor(public topNode: SyntaxNode) { }
|
||||||
|
|
||||||
|
get length(): number {
|
||||||
|
return this.topNode.to
|
||||||
|
}
|
||||||
|
|
||||||
|
cursor() {
|
||||||
|
return {
|
||||||
|
type: this.topNode.type,
|
||||||
|
from: this.topNode.from,
|
||||||
|
to: this.topNode.to,
|
||||||
|
node: this.topNode,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
iterate(options: { enter: (node: SyntaxNode) => void }) {
|
||||||
|
const iter = (node: SyntaxNode) => {
|
||||||
|
for (const n of node.children) iter(n)
|
||||||
|
options.enter(node)
|
||||||
|
}
|
||||||
|
|
||||||
|
iter(this.topNode)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class SyntaxNode {
|
||||||
|
#type: NodeType
|
||||||
|
#isError = false
|
||||||
|
from: number
|
||||||
|
to: number
|
||||||
|
parent: SyntaxNode | null
|
||||||
|
children: SyntaxNode[] = []
|
||||||
|
|
||||||
|
constructor(type: NodeType, from: number, to: number, parent: SyntaxNode | null = null) {
|
||||||
|
this.#type = type
|
||||||
|
this.from = from
|
||||||
|
this.to = to
|
||||||
|
this.parent = parent
|
||||||
|
}
|
||||||
|
|
||||||
|
static from(token: Token, parent?: SyntaxNode): SyntaxNode {
|
||||||
|
return new SyntaxNode(TokenType[token.type] as NodeType, token.from, token.to, parent ?? null)
|
||||||
|
}
|
||||||
|
|
||||||
|
get type(): { type: NodeType, name: NodeType, isError: boolean, is: (other: NodeType) => boolean } {
|
||||||
|
return {
|
||||||
|
type: this.#type,
|
||||||
|
name: this.#type,
|
||||||
|
isError: this.#isError,
|
||||||
|
is: (other: NodeType) => other === this.#type
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
set type(name: NodeType) {
|
||||||
|
this.#type = name
|
||||||
|
}
|
||||||
|
|
||||||
|
get name(): string {
|
||||||
|
return this.type.name
|
||||||
|
}
|
||||||
|
|
||||||
|
get isError(): boolean {
|
||||||
|
return this.#isError
|
||||||
|
}
|
||||||
|
|
||||||
|
set isError(err: boolean) {
|
||||||
|
this.#isError = err
|
||||||
|
}
|
||||||
|
|
||||||
|
get firstChild(): SyntaxNode | null {
|
||||||
|
return this.children[0] ?? null
|
||||||
|
}
|
||||||
|
|
||||||
|
get lastChild(): SyntaxNode | null {
|
||||||
|
return this.children.at(-1) ?? null
|
||||||
|
}
|
||||||
|
|
||||||
|
get nextSibling(): SyntaxNode | null {
|
||||||
|
if (!this.parent) return null
|
||||||
|
const siblings = this.parent.children
|
||||||
|
const index = siblings.indexOf(this)
|
||||||
|
return index >= 0 && index < siblings.length - 1 ? siblings[index + 1]! : null
|
||||||
|
}
|
||||||
|
|
||||||
|
get prevSibling(): SyntaxNode | null {
|
||||||
|
if (!this.parent) return null
|
||||||
|
const siblings = this.parent.children
|
||||||
|
const index = siblings.indexOf(this)
|
||||||
|
return index > 0 ? siblings[index - 1]! : null
|
||||||
|
}
|
||||||
|
|
||||||
|
add(node: SyntaxNode) {
|
||||||
|
node.parent = this
|
||||||
|
this.children.push(node)
|
||||||
|
}
|
||||||
|
|
||||||
|
push(...nodes: SyntaxNode[]): SyntaxNode {
|
||||||
|
nodes.forEach(child => child.parent = this)
|
||||||
|
this.children.push(...nodes)
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
toString(): string {
|
||||||
|
return this.type.name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Operator precedence (binding power) - higher = tighter binding
|
||||||
|
export const precedence: Record<string, number> = {
|
||||||
|
// Logical
|
||||||
|
'or': 10,
|
||||||
|
'and': 20,
|
||||||
|
|
||||||
|
// Comparison
|
||||||
|
'==': 30,
|
||||||
|
'!=': 30,
|
||||||
|
'<': 30,
|
||||||
|
'>': 30,
|
||||||
|
'<=': 30,
|
||||||
|
'>=': 30,
|
||||||
|
|
||||||
|
// Nullish coalescing
|
||||||
|
'??': 35,
|
||||||
|
|
||||||
|
// Bitwise shifts (lower precedence than addition)
|
||||||
|
'<<': 37,
|
||||||
|
'>>': 37,
|
||||||
|
'>>>': 37,
|
||||||
|
|
||||||
|
// Addition/Subtraction
|
||||||
|
'+': 40,
|
||||||
|
'-': 40,
|
||||||
|
|
||||||
|
// Bitwise AND/OR/XOR (higher precedence than addition)
|
||||||
|
'band': 45,
|
||||||
|
'bor': 45,
|
||||||
|
'bxor': 45,
|
||||||
|
|
||||||
|
// Multiplication/Division/Modulo
|
||||||
|
'*': 50,
|
||||||
|
'/': 50,
|
||||||
|
'%': 50,
|
||||||
|
|
||||||
|
// Exponentiation (right-associative)
|
||||||
|
'**': 60,
|
||||||
|
}
|
||||||
|
|
||||||
|
export const conditionals = new Set([
|
||||||
|
'==', '!=', '<', '>', '<=', '>=', '??', 'and', 'or'
|
||||||
|
])
|
||||||
|
|
||||||
|
export const compounds = [
|
||||||
|
'??=', '+=', '-=', '*=', '/=', '%='
|
||||||
|
]
|
||||||
|
|
@ -1,82 +0,0 @@
|
||||||
import { ExternalTokenizer, InputStream } from '@lezer/lr'
|
|
||||||
import * as terms from './shrimp.terms'
|
|
||||||
|
|
||||||
type Operator = { str: string; tokenName: keyof typeof terms }
|
|
||||||
const operators: Array<Operator> = [
|
|
||||||
{ str: 'and', tokenName: 'And' },
|
|
||||||
{ str: 'or', tokenName: 'Or' },
|
|
||||||
{ str: '>=', tokenName: 'Gte' },
|
|
||||||
{ str: '<=', tokenName: 'Lte' },
|
|
||||||
{ str: '!=', tokenName: 'Neq' },
|
|
||||||
{ str: '==', tokenName: 'EqEq' },
|
|
||||||
|
|
||||||
// // Single-char operators
|
|
||||||
{ str: '*', tokenName: 'Star' },
|
|
||||||
{ str: '=', tokenName: 'Eq' },
|
|
||||||
{ str: '/', tokenName: 'Slash' },
|
|
||||||
{ str: '+', tokenName: 'Plus' },
|
|
||||||
{ str: '-', tokenName: 'Minus' },
|
|
||||||
{ str: '>', tokenName: 'Gt' },
|
|
||||||
{ str: '<', tokenName: 'Lt' },
|
|
||||||
{ str: '%', tokenName: 'Modulo' },
|
|
||||||
]
|
|
||||||
|
|
||||||
export const operatorTokenizer = new ExternalTokenizer((input: InputStream) => {
|
|
||||||
for (let operator of operators) {
|
|
||||||
if (!matchesString(input, 0, operator.str)) continue
|
|
||||||
const afterOpPos = operator.str.length
|
|
||||||
const charAfterOp = input.peek(afterOpPos)
|
|
||||||
if (!isWhitespace(charAfterOp)) continue
|
|
||||||
|
|
||||||
// Accept the operator token
|
|
||||||
const token = terms[operator.tokenName]
|
|
||||||
if (token === undefined) {
|
|
||||||
throw new Error(`Unknown token name: ${operator.tokenName}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
input.advance(afterOpPos)
|
|
||||||
input.acceptToken(token)
|
|
||||||
|
|
||||||
return
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
const isWhitespace = (ch: number): boolean => {
|
|
||||||
return matchesChar(ch, [' ', '\t', '\n'])
|
|
||||||
}
|
|
||||||
|
|
||||||
const matchesChar = (ch: number, chars: (string | number)[]): boolean => {
|
|
||||||
for (const c of chars) {
|
|
||||||
if (typeof c === 'number') {
|
|
||||||
if (ch === c) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
} else if (ch === c.charCodeAt(0)) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
const matchesString = (input: InputStream, pos: number, str: string): boolean => {
|
|
||||||
for (let i = 0; i < str.length; i++) {
|
|
||||||
if (input.peek(pos + i) !== str.charCodeAt(i)) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
const peek = (numChars: number, input: InputStream): string => {
|
|
||||||
let result = ''
|
|
||||||
for (let i = 0; i < numChars; i++) {
|
|
||||||
const ch = input.peek(i)
|
|
||||||
if (ch === -1) {
|
|
||||||
result += 'EOF'
|
|
||||||
break
|
|
||||||
} else {
|
|
||||||
result += String.fromCharCode(ch)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
991
src/parser/parser2.ts
Normal file
991
src/parser/parser2.ts
Normal file
|
|
@ -0,0 +1,991 @@
|
||||||
|
import { CompilerError } from '#compiler/compilerError'
|
||||||
|
import { Scanner, type Token, TokenType } from './tokenizer2'
|
||||||
|
import { SyntaxNode, operators, precedence, conditionals, compounds } from './node'
|
||||||
|
import { parseString } from './stringParser'
|
||||||
|
|
||||||
|
const $T = TokenType
|
||||||
|
|
||||||
|
// tell the dotGet searcher about builtin globals
|
||||||
|
export const globals: string[] = []
|
||||||
|
export const setGlobals = (newGlobals: string[] | Record<string, any>) => {
|
||||||
|
globals.length = 0
|
||||||
|
globals.push(...(Array.isArray(newGlobals) ? newGlobals : Object.keys(newGlobals)))
|
||||||
|
}
|
||||||
|
|
||||||
|
export const parse = (input: string): SyntaxNode => {
|
||||||
|
const parser = new Parser()
|
||||||
|
return parser.parse(input)
|
||||||
|
}
|
||||||
|
|
||||||
|
class Scope {
|
||||||
|
parent?: Scope
|
||||||
|
set = new Set<string>()
|
||||||
|
|
||||||
|
constructor(parent?: Scope) {
|
||||||
|
this.parent = parent
|
||||||
|
|
||||||
|
// no parent means this is global scope
|
||||||
|
if (!parent) for (const name of globals) this.add(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
add(key: string) {
|
||||||
|
this.set.add(key)
|
||||||
|
}
|
||||||
|
|
||||||
|
has(key: string): boolean {
|
||||||
|
return this.set.has(key) || this.parent?.has(key) || false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class Parser {
|
||||||
|
tokens: Token[] = []
|
||||||
|
pos = 0
|
||||||
|
inParens = 0
|
||||||
|
input = ''
|
||||||
|
scope = new Scope
|
||||||
|
inTestExpr = false
|
||||||
|
|
||||||
|
parse(input: string): SyntaxNode {
|
||||||
|
const scanner = new Scanner()
|
||||||
|
this.tokens = scanner.tokenize(input)
|
||||||
|
this.pos = 0
|
||||||
|
this.input = input
|
||||||
|
this.scope = new Scope()
|
||||||
|
this.inTestExpr = false
|
||||||
|
|
||||||
|
const node = new SyntaxNode('Program', 0, input.length)
|
||||||
|
|
||||||
|
while (!this.isEOF()) {
|
||||||
|
if (this.is($T.Newline) || this.is($T.Semicolon)) {
|
||||||
|
this.next()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const prevPos = this.pos
|
||||||
|
const stmt = this.statement()
|
||||||
|
if (stmt) node.add(stmt)
|
||||||
|
|
||||||
|
if (this.pos === prevPos && !this.isEOF())
|
||||||
|
throw `parser didn't advance - you need to call next()\n\n ${this.input}\n`
|
||||||
|
}
|
||||||
|
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// parse foundation nodes - statements, expressions
|
||||||
|
//
|
||||||
|
|
||||||
|
// statement is a line of code
|
||||||
|
statement(): SyntaxNode | null {
|
||||||
|
if (this.is($T.Comment))
|
||||||
|
return this.comment()
|
||||||
|
|
||||||
|
while (this.is($T.Newline) || this.is($T.Semicolon))
|
||||||
|
this.next()
|
||||||
|
|
||||||
|
if (this.isEOF() || this.isExprEndKeyword())
|
||||||
|
return null
|
||||||
|
|
||||||
|
return this.expression()
|
||||||
|
}
|
||||||
|
|
||||||
|
// expressions can be found in four places:
|
||||||
|
// 1. line of code
|
||||||
|
// 2. right side of assignment
|
||||||
|
// 3. if/while conditions
|
||||||
|
// 4. inside (parens)
|
||||||
|
expression(allowPipe = true): SyntaxNode {
|
||||||
|
let expr
|
||||||
|
|
||||||
|
// x = value
|
||||||
|
if (this.is($T.Identifier) && (
|
||||||
|
this.nextIs($T.Operator, '=') || compounds.some(x => this.nextIs($T.Operator, x))
|
||||||
|
))
|
||||||
|
expr = this.assign()
|
||||||
|
|
||||||
|
// if, while, do, etc
|
||||||
|
else if (this.is($T.Keyword))
|
||||||
|
expr = this.keywords()
|
||||||
|
|
||||||
|
// dotget
|
||||||
|
else if (this.nextIs($T.Operator, '.'))
|
||||||
|
expr = this.dotGetFunctionCall()
|
||||||
|
|
||||||
|
// echo hello world
|
||||||
|
else if (this.is($T.Identifier) && !this.nextIs($T.Operator) && !this.nextIsExprEnd())
|
||||||
|
expr = this.functionCall()
|
||||||
|
|
||||||
|
// bare-function-call
|
||||||
|
else if (this.is($T.Identifier) && this.nextIsExprEnd())
|
||||||
|
expr = this.functionCallOrIdentifier()
|
||||||
|
|
||||||
|
// everything else
|
||||||
|
else
|
||||||
|
expr = this.exprWithPrecedence()
|
||||||
|
|
||||||
|
// check for destructuring
|
||||||
|
if (expr.type.is('Array') && this.is($T.Operator, '='))
|
||||||
|
return this.destructure(expr)
|
||||||
|
|
||||||
|
// check for parens function call
|
||||||
|
// ex: (ref my-func) my-arg
|
||||||
|
if (expr.type.is('ParenExpr') && !this.isExprEnd())
|
||||||
|
expr = this.functionCall(expr)
|
||||||
|
|
||||||
|
// if dotget is followed by binary operator, continue parsing as binary expression
|
||||||
|
if (expr.type.is('DotGet') && this.is($T.Operator) && !this.is($T.Operator, '|'))
|
||||||
|
expr = this.dotGetBinOp(expr)
|
||||||
|
|
||||||
|
// one | echo
|
||||||
|
if (allowPipe && this.isPipe())
|
||||||
|
return this.pipe(expr)
|
||||||
|
|
||||||
|
// regular
|
||||||
|
else
|
||||||
|
return expr
|
||||||
|
}
|
||||||
|
|
||||||
|
// piping | stuff | is | cool
|
||||||
|
pipe(left: SyntaxNode): SyntaxNode {
|
||||||
|
const canLookPastNewlines = this.inParens === 0
|
||||||
|
const parts: SyntaxNode[] = [left]
|
||||||
|
|
||||||
|
while (this.isPipe()) {
|
||||||
|
// consume newlines before pipe (only if not in parens)
|
||||||
|
if (canLookPastNewlines) {
|
||||||
|
while (this.is($T.Newline)) this.next()
|
||||||
|
}
|
||||||
|
|
||||||
|
const pipeOp = this.op('|')
|
||||||
|
pipeOp.type = 'operator'
|
||||||
|
parts.push(pipeOp)
|
||||||
|
|
||||||
|
// consume newlines after pipe (only if not in parens)
|
||||||
|
if (canLookPastNewlines) {
|
||||||
|
while (this.is($T.Newline)) this.next()
|
||||||
|
}
|
||||||
|
|
||||||
|
// parse right side - don't allow nested pipes
|
||||||
|
parts.push(this.expression(false))
|
||||||
|
}
|
||||||
|
|
||||||
|
const node = new SyntaxNode('PipeExpr', parts[0]!.from, parts.at(-1)!.to)
|
||||||
|
return node.push(...parts)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Pratt parser - parses expressions with precedence climbing
|
||||||
|
// bp = binding precedence
|
||||||
|
exprWithPrecedence(minBp = 0): SyntaxNode {
|
||||||
|
let left = this.value()
|
||||||
|
|
||||||
|
// infix operators with precedence
|
||||||
|
while (this.is($T.Operator)) {
|
||||||
|
const op = this.current().value!
|
||||||
|
const bp = precedence[op]
|
||||||
|
|
||||||
|
// operator has lower precedence than required, stop
|
||||||
|
if (bp === undefined || bp < minBp) break
|
||||||
|
|
||||||
|
const opNode = this.op()
|
||||||
|
|
||||||
|
// right-associative operators (like **) use same bp, others use bp + 1
|
||||||
|
const nextMinBp = op === '**' ? bp : bp + 1
|
||||||
|
|
||||||
|
// parse right-hand side with higher precedence
|
||||||
|
const right = this.exprWithPrecedence(nextMinBp)
|
||||||
|
|
||||||
|
const nodeType = conditionals.has(op) ? 'ConditionalOp' : 'BinOp'
|
||||||
|
const node = new SyntaxNode(nodeType, left.from, right.to)
|
||||||
|
|
||||||
|
node.push(left, opNode, right)
|
||||||
|
left = node
|
||||||
|
}
|
||||||
|
|
||||||
|
return left
|
||||||
|
}
|
||||||
|
|
||||||
|
// if, while, do, etc
|
||||||
|
keywords(): SyntaxNode {
|
||||||
|
if (this.is($T.Keyword, 'if'))
|
||||||
|
return this.if()
|
||||||
|
|
||||||
|
if (this.is($T.Keyword, 'while'))
|
||||||
|
return this.while()
|
||||||
|
|
||||||
|
if (this.is($T.Keyword, 'do'))
|
||||||
|
return this.do()
|
||||||
|
|
||||||
|
if (this.is($T.Keyword, 'try'))
|
||||||
|
return this.try()
|
||||||
|
|
||||||
|
if (this.is($T.Keyword, 'throw'))
|
||||||
|
return this.throw()
|
||||||
|
|
||||||
|
if (this.is($T.Keyword, 'not'))
|
||||||
|
return this.not()
|
||||||
|
|
||||||
|
if (this.is($T.Keyword, 'import'))
|
||||||
|
return this.import()
|
||||||
|
|
||||||
|
return this.expect($T.Keyword, 'if/while/do/import') as never
|
||||||
|
}
|
||||||
|
|
||||||
|
// value can be an atom or a (parens that gets turned into an atom)
|
||||||
|
// values are used in a few places:
|
||||||
|
// 1. function arguments
|
||||||
|
// 2. array/dict members
|
||||||
|
// 3. binary operations
|
||||||
|
// 4. anywhere an expression can be used
|
||||||
|
value(): SyntaxNode {
|
||||||
|
if (this.is($T.OpenParen))
|
||||||
|
return this.parens()
|
||||||
|
|
||||||
|
if (this.is($T.OpenBracket))
|
||||||
|
return this.arrayOrDict()
|
||||||
|
|
||||||
|
// dotget
|
||||||
|
if (this.nextIs($T.Operator, '.'))
|
||||||
|
return this.dotGet()
|
||||||
|
|
||||||
|
return this.atom()
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// parse specific nodes
|
||||||
|
//
|
||||||
|
|
||||||
|
// raw determines whether we just want the SyntaxNodes or we want to
|
||||||
|
// wrap them in a PositionalArg
|
||||||
|
arg(raw = false): SyntaxNode {
|
||||||
|
// 'do' is a special function arg - it doesn't need to be wrapped
|
||||||
|
// in parens. otherwise, args are regular value()s
|
||||||
|
const val = this.is($T.Keyword, 'do') ? this.do() : this.value()
|
||||||
|
|
||||||
|
if (raw) {
|
||||||
|
return val
|
||||||
|
} else {
|
||||||
|
const arg = new SyntaxNode('PositionalArg', val.from, val.to)
|
||||||
|
if (val.isError) arg.isError = true
|
||||||
|
arg.add(val)
|
||||||
|
return arg
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// [ 1 2 3 ]
|
||||||
|
array(): SyntaxNode {
|
||||||
|
const open = this.expect($T.OpenBracket)
|
||||||
|
|
||||||
|
const values = []
|
||||||
|
while (!this.is($T.CloseBracket) && !this.isEOF()) {
|
||||||
|
if (this.is($T.Semicolon) || this.is($T.Newline)) {
|
||||||
|
this.next()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.is($T.Comment)) {
|
||||||
|
values.push(this.comment())
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
values.push(this.value())
|
||||||
|
}
|
||||||
|
|
||||||
|
const close = this.expect($T.CloseBracket)
|
||||||
|
|
||||||
|
const node = new SyntaxNode('Array', open.from, close.to)
|
||||||
|
return node.push(...values)
|
||||||
|
}
|
||||||
|
|
||||||
|
// which are we dealing with? ignores leading newlines and comments
|
||||||
|
arrayOrDict(): SyntaxNode {
|
||||||
|
let peek = 1
|
||||||
|
let curr = this.peek(peek++)
|
||||||
|
let isDict = false
|
||||||
|
|
||||||
|
while (curr && curr.type !== $T.CloseBracket) {
|
||||||
|
// definitely a dict
|
||||||
|
if (curr.type === $T.NamedArgPrefix) {
|
||||||
|
isDict = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
// empty dict
|
||||||
|
if (curr.type === $T.Operator && curr.value === '=') {
|
||||||
|
isDict = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
// [ a = true ]
|
||||||
|
const next = this.peek(peek)
|
||||||
|
if (next?.type === $T.Operator && next.value === '=') {
|
||||||
|
isDict = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
// probably an array
|
||||||
|
if (curr.type !== $T.Comment && curr.type !== $T.Semicolon && curr.type !== $T.Newline)
|
||||||
|
break
|
||||||
|
|
||||||
|
curr = this.peek(peek++)
|
||||||
|
}
|
||||||
|
|
||||||
|
return isDict ? this.dict() : this.array()
|
||||||
|
}
|
||||||
|
|
||||||
|
// x = true
|
||||||
|
assign(): SyntaxNode {
|
||||||
|
const ident = this.assignableIdentifier()
|
||||||
|
const opToken = this.current()!
|
||||||
|
const op = this.op()
|
||||||
|
const expr = this.expression()
|
||||||
|
|
||||||
|
const node = new SyntaxNode(
|
||||||
|
opToken.value === '=' ? 'Assign' : 'CompoundAssign',
|
||||||
|
ident.from,
|
||||||
|
expr.to
|
||||||
|
)
|
||||||
|
|
||||||
|
return node.push(ident, op, expr)
|
||||||
|
}
|
||||||
|
|
||||||
|
// identifier used in assignment (TODO: legacy lezer quirk)
|
||||||
|
assignableIdentifier(): SyntaxNode {
|
||||||
|
const token = this.expect($T.Identifier)
|
||||||
|
this.scope.add(token.value!)
|
||||||
|
const node = SyntaxNode.from(token)
|
||||||
|
node.type = 'AssignableIdentifier'
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
|
||||||
|
// atoms are the basic building blocks: literals, identifiers, words
|
||||||
|
atom(): SyntaxNode {
|
||||||
|
if (this.is($T.String))
|
||||||
|
return this.string()
|
||||||
|
|
||||||
|
if (this.isAny($T.Null, $T.Boolean, $T.Number, $T.Identifier, $T.Word, $T.Regex, $T.Underscore))
|
||||||
|
return SyntaxNode.from(this.next())
|
||||||
|
|
||||||
|
const next = this.next()
|
||||||
|
throw new CompilerError(`Unexpected token: ${TokenType[next.type]}`, next.from, next.to)
|
||||||
|
}
|
||||||
|
|
||||||
|
// blocks in if, do, special calls, etc
|
||||||
|
// `: something end`
|
||||||
|
//
|
||||||
|
// `blockNode` determines whether we return [colon, BlockNode, end] or
|
||||||
|
// just a list of statements like [colon, stmt1, stmt2, end]
|
||||||
|
block(blockNode = true): SyntaxNode[] {
|
||||||
|
const stmts: SyntaxNode[] = []
|
||||||
|
const colon = this.colon()
|
||||||
|
|
||||||
|
while (!this.isExprEndKeyword() && !this.isEOF()) {
|
||||||
|
const stmt = this.statement()
|
||||||
|
if (stmt) stmts.push(stmt)
|
||||||
|
}
|
||||||
|
|
||||||
|
const out = [colon]
|
||||||
|
|
||||||
|
if (blockNode) {
|
||||||
|
const block = new SyntaxNode('Block', stmts[0]!.from, stmts.at(-1)!.to)
|
||||||
|
block.push(...stmts)
|
||||||
|
out.push(block)
|
||||||
|
} else {
|
||||||
|
out.push(...stmts)
|
||||||
|
}
|
||||||
|
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
// catch err: block
|
||||||
|
catch(): SyntaxNode {
|
||||||
|
const keyword = this.keyword('catch')
|
||||||
|
|
||||||
|
let catchVar
|
||||||
|
if (this.is($T.Identifier))
|
||||||
|
catchVar = this.identifier()
|
||||||
|
|
||||||
|
const block = this.block()
|
||||||
|
|
||||||
|
const node = new SyntaxNode('CatchExpr', keyword.from, block.at(-1)!.to)
|
||||||
|
|
||||||
|
node.push(keyword)
|
||||||
|
if (catchVar) node.push(catchVar)
|
||||||
|
return node.push(...block)
|
||||||
|
}
|
||||||
|
|
||||||
|
// colon
|
||||||
|
colon(): SyntaxNode {
|
||||||
|
const colon = SyntaxNode.from(this.expect($T.Colon))
|
||||||
|
colon.type = 'colon' // TODO lezer legacy
|
||||||
|
return colon
|
||||||
|
}
|
||||||
|
|
||||||
|
// # comment
|
||||||
|
comment(): SyntaxNode {
|
||||||
|
return SyntaxNode.from(this.expect($T.Comment))
|
||||||
|
}
|
||||||
|
|
||||||
|
// [ a b c ] = [ 1 2 3 ]
|
||||||
|
destructure(array: SyntaxNode): SyntaxNode {
|
||||||
|
const eq = this.op('=')
|
||||||
|
const val = this.expression()
|
||||||
|
|
||||||
|
for (const ident of array.children) {
|
||||||
|
const varName = this.input.slice(ident.from, ident.to)
|
||||||
|
this.scope.add(varName)
|
||||||
|
}
|
||||||
|
|
||||||
|
const node = new SyntaxNode('Assign', array.from, val.to)
|
||||||
|
return node.push(array, eq, val)
|
||||||
|
}
|
||||||
|
|
||||||
|
// [ a=1 b=true c='three' ]
|
||||||
|
dict(): SyntaxNode {
|
||||||
|
const open = this.expect($T.OpenBracket)
|
||||||
|
let isError = false
|
||||||
|
|
||||||
|
// empty dict [=] or [ = ]
|
||||||
|
if (this.is($T.Operator, '=') && this.nextIs($T.CloseBracket)) {
|
||||||
|
const _op = this.next()
|
||||||
|
const close = this.next()
|
||||||
|
return new SyntaxNode('Dict', open.from, close.to)
|
||||||
|
}
|
||||||
|
|
||||||
|
const values = []
|
||||||
|
while (!this.is($T.CloseBracket) && !this.isEOF()) {
|
||||||
|
if (this.is($T.Semicolon) || this.is($T.Newline)) {
|
||||||
|
this.next()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.is($T.Comment)) {
|
||||||
|
values.push(this.comment())
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// check for named arg with space after it (vs connected)
|
||||||
|
if (this.nextIs($T.Operator, '=')) {
|
||||||
|
const ident = this.identifier()
|
||||||
|
const op = this.op('=')
|
||||||
|
const prefix = new SyntaxNode('NamedArgPrefix', ident.from, op.to)
|
||||||
|
|
||||||
|
if (this.is($T.CloseBracket) || this.is($T.Semicolon) || this.is($T.Newline)) {
|
||||||
|
const node = new SyntaxNode('NamedArg', ident.from, op.to)
|
||||||
|
node.isError = true
|
||||||
|
isError = true
|
||||||
|
values.push(node.push(prefix))
|
||||||
|
} else {
|
||||||
|
const val = this.arg(true)
|
||||||
|
const node = new SyntaxNode('NamedArg', ident.from, val.to)
|
||||||
|
values.push(node.push(prefix, val))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
const arg = this.is($T.NamedArgPrefix) ? this.namedArg() : this.arg()
|
||||||
|
if (arg.isError) isError = true
|
||||||
|
values.push(arg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const close = this.expect($T.CloseBracket)
|
||||||
|
|
||||||
|
const node = new SyntaxNode('Dict', open.from, close.to)
|
||||||
|
node.isError = isError
|
||||||
|
return node.push(...values)
|
||||||
|
}
|
||||||
|
|
||||||
|
// FunctionDef `do x y: something end`
|
||||||
|
do(): SyntaxNode {
|
||||||
|
const doNode = this.keyword('do')
|
||||||
|
doNode.type = 'Do'
|
||||||
|
this.scope = new Scope(this.scope)
|
||||||
|
|
||||||
|
const params = []
|
||||||
|
while (!this.is($T.Colon) && !this.isExprEnd()) {
|
||||||
|
let varName = this.current().value!
|
||||||
|
if (varName.endsWith('=')) varName = varName.slice(0, varName.length - 1)
|
||||||
|
this.scope.add(varName)
|
||||||
|
|
||||||
|
let arg
|
||||||
|
if (this.is($T.Identifier))
|
||||||
|
arg = this.identifier()
|
||||||
|
else if (this.is($T.NamedArgPrefix))
|
||||||
|
arg = this.namedParam()
|
||||||
|
else
|
||||||
|
throw new CompilerError(`Expected Identifier or NamedArgPrefix, got ${TokenType[this.current().type]}`, this.current().from, this.current().to)
|
||||||
|
|
||||||
|
params.push(arg)
|
||||||
|
}
|
||||||
|
|
||||||
|
const block = this.block(false)
|
||||||
|
let catchNode, finalNode
|
||||||
|
|
||||||
|
if (this.is($T.Keyword, 'catch'))
|
||||||
|
catchNode = this.catch()
|
||||||
|
|
||||||
|
if (this.is($T.Keyword, 'finally'))
|
||||||
|
finalNode = this.finally()
|
||||||
|
|
||||||
|
const end = this.keyword('end')
|
||||||
|
|
||||||
|
let last = block.at(-1)
|
||||||
|
if (finalNode) last = finalNode.children.at(-1)!
|
||||||
|
else if (catchNode) last = catchNode.children.at(-1)!
|
||||||
|
|
||||||
|
const node = new SyntaxNode('FunctionDef', doNode.from, last!.to)
|
||||||
|
|
||||||
|
node.add(doNode)
|
||||||
|
|
||||||
|
const paramsNode = new SyntaxNode(
|
||||||
|
'Params',
|
||||||
|
params[0]?.from ?? 0,
|
||||||
|
params.at(-1)?.to ?? 0
|
||||||
|
)
|
||||||
|
|
||||||
|
if (params.length) paramsNode.push(...params)
|
||||||
|
node.add(paramsNode)
|
||||||
|
|
||||||
|
this.scope = this.scope.parent!
|
||||||
|
|
||||||
|
node.push(...block)
|
||||||
|
|
||||||
|
if (catchNode) node.push(catchNode)
|
||||||
|
if (finalNode) node.push(finalNode)
|
||||||
|
|
||||||
|
return node.push(end)
|
||||||
|
}
|
||||||
|
|
||||||
|
// config.path
|
||||||
|
dotGet(): SyntaxNode {
|
||||||
|
const left = this.identifier()
|
||||||
|
const ident = this.input.slice(left.from, left.to)
|
||||||
|
|
||||||
|
// not in scope, just return Word
|
||||||
|
if (!this.scope.has(ident))
|
||||||
|
return this.word(left)
|
||||||
|
|
||||||
|
if (left.type.is('Identifier')) left.type = 'IdentifierBeforeDot'
|
||||||
|
|
||||||
|
let parts = []
|
||||||
|
while (this.is($T.Operator, '.')) {
|
||||||
|
this.next()
|
||||||
|
parts.push(this.is($T.OpenParen) ? this.parens() : this.atom())
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO lezer legacy - we can do a flat DotGet if we remove this
|
||||||
|
const nodes = parts.length > 1 ? collapseDotGets(parts) : undefined
|
||||||
|
|
||||||
|
const node = new SyntaxNode('DotGet', left.from, parts.at(-1)!.to)
|
||||||
|
return nodes ? node.push(left, nodes!) : node.push(left, ...parts)
|
||||||
|
}
|
||||||
|
|
||||||
|
// continue parsing dotget/word binary operation
|
||||||
|
dotGetBinOp(left: SyntaxNode): SyntaxNode {
|
||||||
|
while (this.is($T.Operator) && !this.is($T.Operator, '|')) {
|
||||||
|
const op = this.current().value!
|
||||||
|
const bp = precedence[op]
|
||||||
|
if (bp === undefined) break
|
||||||
|
|
||||||
|
const opNode = this.op()
|
||||||
|
const right = this.exprWithPrecedence(bp + 1)
|
||||||
|
|
||||||
|
const nodeType = conditionals.has(op) ? 'ConditionalOp' : 'BinOp'
|
||||||
|
const node = new SyntaxNode(nodeType, left.from, right.to)
|
||||||
|
node.push(left, opNode, right)
|
||||||
|
left = node
|
||||||
|
}
|
||||||
|
return left
|
||||||
|
}
|
||||||
|
|
||||||
|
// dotget in a statement/expression (something.blah) or (something.blah arg1)
|
||||||
|
dotGetFunctionCall(): SyntaxNode {
|
||||||
|
const dotGet = this.dotGet()
|
||||||
|
|
||||||
|
// if followed by a binary operator (not pipe), return dotGet/Word as-is for expression parser
|
||||||
|
if (this.is($T.Operator) && !this.is($T.Operator, '|'))
|
||||||
|
return dotGet
|
||||||
|
|
||||||
|
// dotget not in scope, regular Word
|
||||||
|
if (dotGet.type.is('Word')) return dotGet
|
||||||
|
|
||||||
|
if (this.isExprEnd())
|
||||||
|
return this.functionCallOrIdentifier(dotGet)
|
||||||
|
else
|
||||||
|
return this.functionCall(dotGet)
|
||||||
|
}
|
||||||
|
|
||||||
|
// can be used in functions or try block
|
||||||
|
finally(): SyntaxNode {
|
||||||
|
const keyword = this.keyword('finally')
|
||||||
|
const block = this.block()
|
||||||
|
const node = new SyntaxNode('FinallyExpr', keyword.from, block.at(-1)!.to)
|
||||||
|
|
||||||
|
return node.push(keyword, ...block)
|
||||||
|
}
|
||||||
|
|
||||||
|
// you're lookin at it
|
||||||
|
functionCall(fn?: SyntaxNode): SyntaxNode {
|
||||||
|
const ident = fn ?? this.identifier()
|
||||||
|
let isError = false
|
||||||
|
|
||||||
|
const args: SyntaxNode[] = []
|
||||||
|
while (!this.isExprEnd()) {
|
||||||
|
const arg = this.is($T.NamedArgPrefix) ? this.namedArg() : this.arg()
|
||||||
|
if (arg.isError) isError = true
|
||||||
|
args.push(arg)
|
||||||
|
}
|
||||||
|
|
||||||
|
const node = new SyntaxNode('FunctionCall', ident.from, (args.at(-1) || ident).to)
|
||||||
|
node.push(ident, ...args)
|
||||||
|
|
||||||
|
if (isError) node.isError = true
|
||||||
|
|
||||||
|
if (!this.inTestExpr && this.is($T.Colon)) {
|
||||||
|
const block = this.block()
|
||||||
|
const end = this.keyword('end')
|
||||||
|
const blockNode = new SyntaxNode('FunctionCallWithBlock', node.from, end.to)
|
||||||
|
return blockNode.push(node, ...block, end)
|
||||||
|
}
|
||||||
|
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
|
||||||
|
// bare identifier in an expression
|
||||||
|
functionCallOrIdentifier(inner?: SyntaxNode) {
|
||||||
|
if (!inner && this.nextIs($T.Operator, '.')) {
|
||||||
|
inner = this.dotGet()
|
||||||
|
|
||||||
|
// if the dotGet was just a Word, bail
|
||||||
|
if (inner.type.is('Word')) return inner
|
||||||
|
}
|
||||||
|
|
||||||
|
inner ??= this.identifier()
|
||||||
|
|
||||||
|
const wrapper = new SyntaxNode('FunctionCallOrIdentifier', inner.from, inner.to)
|
||||||
|
wrapper.push(inner)
|
||||||
|
|
||||||
|
if (!this.inTestExpr && this.is($T.Colon)) {
|
||||||
|
const block = this.block()
|
||||||
|
const end = this.keyword('end')
|
||||||
|
const node = new SyntaxNode('FunctionCallWithBlock', wrapper.from, end.to)
|
||||||
|
return node.push(wrapper, ...block, end)
|
||||||
|
}
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
}
|
||||||
|
|
||||||
|
// function and variable names
|
||||||
|
identifier(): SyntaxNode {
|
||||||
|
return SyntaxNode.from(this.expect($T.Identifier))
|
||||||
|
}
|
||||||
|
|
||||||
|
// if something: blah end
|
||||||
|
// if something: blah else: blah end
|
||||||
|
// if something: blah else if something: blah else: blah end
|
||||||
|
if(): SyntaxNode {
|
||||||
|
const ifNode = this.keyword('if')
|
||||||
|
const test = this.testExpr()
|
||||||
|
const ifBlock = this.block()
|
||||||
|
|
||||||
|
const node = new SyntaxNode('IfExpr', ifNode.from, ifBlock.at(-1)!.to)
|
||||||
|
node.push(ifNode, test)
|
||||||
|
node.push(...ifBlock)
|
||||||
|
|
||||||
|
while (this.is($T.Keyword, 'else') && this.nextIs($T.Keyword, 'if')) {
|
||||||
|
const elseWord = this.keyword('else')
|
||||||
|
const ifWord = this.keyword('if')
|
||||||
|
const elseIfTest = this.testExpr()
|
||||||
|
const elseIfBlock = this.block()
|
||||||
|
const elseIfNode = new SyntaxNode('ElseIfExpr', elseWord.from, elseIfBlock.at(-1)!.to)
|
||||||
|
elseIfNode.push(elseWord, ifWord, elseIfTest)
|
||||||
|
elseIfNode.push(...elseIfBlock)
|
||||||
|
node.push(elseIfNode)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.is($T.Keyword, 'else') && this.nextIs($T.Colon)) {
|
||||||
|
const elseWord = this.keyword('else')
|
||||||
|
const elseBlock = this.block()
|
||||||
|
const elseNode = new SyntaxNode('ElseExpr', elseWord.from, elseBlock.at(-1)!.to)
|
||||||
|
elseNode.push(elseWord)
|
||||||
|
elseNode.push(...elseBlock)
|
||||||
|
node.push(elseNode)
|
||||||
|
}
|
||||||
|
|
||||||
|
return node.push(this.keyword('end'))
|
||||||
|
}
|
||||||
|
|
||||||
|
import(): SyntaxNode {
|
||||||
|
const keyword = this.keyword('import')
|
||||||
|
|
||||||
|
const args: SyntaxNode[] = []
|
||||||
|
while (!this.isExprEnd()) {
|
||||||
|
if (this.is($T.NamedArgPrefix)) {
|
||||||
|
const prefix = SyntaxNode.from(this.next())
|
||||||
|
const val = this.value()
|
||||||
|
const arg = new SyntaxNode('NamedArg', prefix.from, val.to)
|
||||||
|
arg.push(prefix, val)
|
||||||
|
args.push(arg)
|
||||||
|
} else {
|
||||||
|
args.push(this.identifier())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const node = new SyntaxNode('Import', keyword.from, args.at(-1)!.to)
|
||||||
|
node.add(keyword)
|
||||||
|
return node.push(...args)
|
||||||
|
}
|
||||||
|
|
||||||
|
// if, while, do, etc
|
||||||
|
keyword(name: string): SyntaxNode {
|
||||||
|
const node = SyntaxNode.from(this.expect($T.Keyword, name))
|
||||||
|
node.type = 'keyword' // TODO lezer legacy
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
|
||||||
|
// abc= true
|
||||||
|
namedArg(): SyntaxNode {
|
||||||
|
const prefix = SyntaxNode.from(this.expect($T.NamedArgPrefix))
|
||||||
|
|
||||||
|
if (this.isExprEnd()) {
|
||||||
|
const node = new SyntaxNode('NamedArg', prefix.from, prefix.to)
|
||||||
|
node.isError = true
|
||||||
|
return node.push(prefix)
|
||||||
|
}
|
||||||
|
|
||||||
|
const val = this.arg(true)
|
||||||
|
const node = new SyntaxNode('NamedArg', prefix.from, val.to)
|
||||||
|
return node.push(prefix, val)
|
||||||
|
}
|
||||||
|
|
||||||
|
// abc= null|true|123|'hi'
|
||||||
|
namedParam(): SyntaxNode {
|
||||||
|
const prefix = SyntaxNode.from(this.expect($T.NamedArgPrefix))
|
||||||
|
const val = this.value()
|
||||||
|
|
||||||
|
if (!['Null', 'Boolean', 'Number', 'String'].includes(val.type.name))
|
||||||
|
throw new CompilerError(`Default value must be null, boolean, number, or string, got ${val.type.name}`, val.from, val.to)
|
||||||
|
|
||||||
|
const node = new SyntaxNode('NamedParam', prefix.from, val.to)
|
||||||
|
return node.push(prefix, val)
|
||||||
|
}
|
||||||
|
|
||||||
|
// not blah
|
||||||
|
not(): SyntaxNode {
|
||||||
|
const keyword = this.keyword('not')
|
||||||
|
const val = this.expression()
|
||||||
|
const node = new SyntaxNode('Not', keyword.from, val.to)
|
||||||
|
return node.push(keyword, val)
|
||||||
|
}
|
||||||
|
|
||||||
|
// operators like + - =
|
||||||
|
op(op?: string): SyntaxNode {
|
||||||
|
const token = op ? this.expect($T.Operator, op) : this.expect($T.Operator)
|
||||||
|
const name = operators[token.value!]
|
||||||
|
if (!name) throw new CompilerError(`Operator not registered: ${token.value!}`, token.from, token.to)
|
||||||
|
return new SyntaxNode(name, token.from, token.to)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ( expressions in parens )
|
||||||
|
parens(): SyntaxNode {
|
||||||
|
this.inParens++
|
||||||
|
const open = this.expect($T.OpenParen)
|
||||||
|
const child = this.expression()
|
||||||
|
const close = this.expect($T.CloseParen)
|
||||||
|
this.inParens--
|
||||||
|
|
||||||
|
const node = new SyntaxNode('ParenExpr', open.from, close.to)
|
||||||
|
node.add(child)
|
||||||
|
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
|
||||||
|
// 'hell yes' "hell no" { hell if i know }
|
||||||
|
string(): SyntaxNode {
|
||||||
|
const token = this.expect($T.String)
|
||||||
|
return parseString(this.input, token.from, token.to, this)
|
||||||
|
}
|
||||||
|
|
||||||
|
// if TEST: blah end
|
||||||
|
testExpr(): SyntaxNode {
|
||||||
|
this.inTestExpr = true
|
||||||
|
const expr = this.expression()
|
||||||
|
this.inTestExpr = false
|
||||||
|
return expr
|
||||||
|
}
|
||||||
|
|
||||||
|
// throw blah
|
||||||
|
throw(): SyntaxNode {
|
||||||
|
const keyword = this.keyword('throw')
|
||||||
|
const val = this.expression()
|
||||||
|
const node = new SyntaxNode('Throw', keyword.from, val.to)
|
||||||
|
return node.push(keyword, val)
|
||||||
|
}
|
||||||
|
|
||||||
|
// try: blah catch e: blah end
|
||||||
|
try(): SyntaxNode {
|
||||||
|
const tryNode = this.keyword('try')
|
||||||
|
const tryBlock = this.block()
|
||||||
|
let last = tryBlock.at(-1)
|
||||||
|
let catchNode, finalNode
|
||||||
|
|
||||||
|
if (this.is($T.Keyword, 'catch'))
|
||||||
|
catchNode = this.catch()
|
||||||
|
|
||||||
|
if (this.is($T.Keyword, 'finally'))
|
||||||
|
finalNode = this.finally()
|
||||||
|
|
||||||
|
const end = this.keyword('end')
|
||||||
|
|
||||||
|
if (finalNode) last = finalNode.children.at(-1)
|
||||||
|
else if (catchNode) last = catchNode.children.at(-1)
|
||||||
|
|
||||||
|
const node = new SyntaxNode('TryExpr', tryNode.from, last!.to)
|
||||||
|
node.push(tryNode, ...tryBlock)
|
||||||
|
|
||||||
|
if (catchNode)
|
||||||
|
node.push(catchNode)
|
||||||
|
|
||||||
|
if (finalNode)
|
||||||
|
node.push(finalNode)
|
||||||
|
|
||||||
|
return node.push(end)
|
||||||
|
}
|
||||||
|
|
||||||
|
// while test: blah end
|
||||||
|
while(): SyntaxNode {
|
||||||
|
const keyword = this.keyword('while')
|
||||||
|
const test = this.testExpr()
|
||||||
|
const block = this.block()
|
||||||
|
const end = this.keyword('end')
|
||||||
|
|
||||||
|
const node = new SyntaxNode('WhileExpr', keyword.from, end.to)
|
||||||
|
return node.push(keyword, test, ...block, end)
|
||||||
|
}
|
||||||
|
|
||||||
|
// readme.txt (when `readme` isn't in scope)
|
||||||
|
word(start?: SyntaxNode): SyntaxNode {
|
||||||
|
const parts = [start ?? this.expect($T.Word)]
|
||||||
|
|
||||||
|
while (this.is($T.Operator, '.')) {
|
||||||
|
this.next()
|
||||||
|
if (this.isAny($T.Word, $T.Identifier, $T.Number))
|
||||||
|
parts.push(this.next())
|
||||||
|
}
|
||||||
|
|
||||||
|
return new SyntaxNode('Word', parts[0]!.from, parts.at(-1)!.to)
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// helpers
|
||||||
|
//
|
||||||
|
|
||||||
|
current(): Token {
|
||||||
|
return this.tokens[this.pos] || { type: TokenType.Newline, from: 0, to: 0 }
|
||||||
|
}
|
||||||
|
|
||||||
|
peek(offset = 1): Token | undefined {
|
||||||
|
return this.tokens[this.pos + offset]
|
||||||
|
}
|
||||||
|
|
||||||
|
// look past newlines to check for a specific token
|
||||||
|
peekPastNewlines(type: TokenType, value?: string): boolean {
|
||||||
|
let offset = 1
|
||||||
|
let peek = this.peek(offset)
|
||||||
|
|
||||||
|
while (peek && peek.type === $T.Newline)
|
||||||
|
peek = this.peek(++offset)
|
||||||
|
|
||||||
|
if (!peek || peek.type !== type) return false
|
||||||
|
if (value !== undefined && peek.value !== value) return false
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
next(): Token {
|
||||||
|
const token = this.current()
|
||||||
|
this.pos++
|
||||||
|
return token
|
||||||
|
}
|
||||||
|
|
||||||
|
is(type: TokenType, value?: string): boolean {
|
||||||
|
const token = this.current()
|
||||||
|
if (!token || token.type !== type) return false
|
||||||
|
if (value !== undefined && token.value !== value) return false
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
isAny(...type: TokenType[]): boolean {
|
||||||
|
return type.some(x => this.is(x))
|
||||||
|
}
|
||||||
|
|
||||||
|
nextIs(type: TokenType, value?: string): boolean {
|
||||||
|
const token = this.peek()
|
||||||
|
if (!token || token.type !== type) return false
|
||||||
|
if (value !== undefined && token.value !== value) return false
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
nextIsAny(...type: TokenType[]): boolean {
|
||||||
|
return type.some(x => this.nextIs(x))
|
||||||
|
}
|
||||||
|
|
||||||
|
isExprEnd(): boolean {
|
||||||
|
return this.isAny($T.Colon, $T.Semicolon, $T.Newline, $T.CloseParen, $T.CloseBracket) ||
|
||||||
|
this.is($T.Operator, '|') ||
|
||||||
|
this.isExprEndKeyword() || !this.current()
|
||||||
|
}
|
||||||
|
|
||||||
|
nextIsExprEnd(): boolean {
|
||||||
|
// pipes act like expression end for function arg parsing
|
||||||
|
if (this.nextIs($T.Operator, '|'))
|
||||||
|
return true
|
||||||
|
|
||||||
|
return this.nextIsAny($T.Colon, $T.Semicolon, $T.Newline, $T.CloseBracket, $T.CloseParen) ||
|
||||||
|
this.nextIs($T.Keyword, 'end') || this.nextIs($T.Keyword, 'else') ||
|
||||||
|
this.nextIs($T.Keyword, 'catch') || this.nextIs($T.Keyword, 'finally') ||
|
||||||
|
!this.peek()
|
||||||
|
}
|
||||||
|
|
||||||
|
isExprEndKeyword(): boolean {
|
||||||
|
return this.is($T.Keyword, 'end') || this.is($T.Keyword, 'else') ||
|
||||||
|
this.is($T.Keyword, 'catch') || this.is($T.Keyword, 'finally')
|
||||||
|
}
|
||||||
|
|
||||||
|
isPipe(): boolean {
|
||||||
|
// inside parens, only look for pipes on same line (don't look past newlines)
|
||||||
|
const canLookPastNewlines = this.inParens === 0
|
||||||
|
|
||||||
|
return this.is($T.Operator, '|') ||
|
||||||
|
(canLookPastNewlines && this.peekPastNewlines($T.Operator, '|'))
|
||||||
|
}
|
||||||
|
|
||||||
|
expect(type: TokenType, value?: string): Token | never {
|
||||||
|
if (!this.is(type, value)) {
|
||||||
|
const token = this.current()
|
||||||
|
throw new CompilerError(`Expected ${TokenType[type]}${value ? ` "${value}"` : ''}, got ${TokenType[token?.type || 0]}${token?.value ? ` "${token.value}"` : ''} at position ${this.pos}`, token.from, token.to)
|
||||||
|
}
|
||||||
|
return this.next()
|
||||||
|
}
|
||||||
|
|
||||||
|
isEOF(): boolean {
|
||||||
|
return this.pos >= this.tokens.length
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO lezer legacy
|
||||||
|
function collapseDotGets(origNodes: SyntaxNode[]): SyntaxNode {
|
||||||
|
const nodes = [...origNodes]
|
||||||
|
let right = nodes.pop()!
|
||||||
|
|
||||||
|
while (nodes.length > 0) {
|
||||||
|
const left = nodes.pop()!
|
||||||
|
|
||||||
|
if (left.type.is('Identifier')) left.type = 'IdentifierBeforeDot'
|
||||||
|
|
||||||
|
const dot = new SyntaxNode("DotGet", left.from, right.to)
|
||||||
|
dot.push(left, right)
|
||||||
|
|
||||||
|
right = dot
|
||||||
|
}
|
||||||
|
|
||||||
|
return right
|
||||||
|
}
|
||||||
|
|
@ -1,100 +0,0 @@
|
||||||
import { ContextTracker, InputStream } from '@lezer/lr'
|
|
||||||
import * as terms from './shrimp.terms'
|
|
||||||
|
|
||||||
export class Scope {
|
|
||||||
constructor(public parent: Scope | null, public vars = new Set<string>()) {}
|
|
||||||
|
|
||||||
has(name: string): boolean {
|
|
||||||
return this.vars.has(name) || (this.parent?.has(name) ?? false)
|
|
||||||
}
|
|
||||||
|
|
||||||
hash(): number {
|
|
||||||
let h = 0
|
|
||||||
for (const name of this.vars) {
|
|
||||||
for (let i = 0; i < name.length; i++) {
|
|
||||||
h = (h << 5) - h + name.charCodeAt(i)
|
|
||||||
h |= 0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (this.parent) {
|
|
||||||
h = (h << 5) - h + this.parent.hash()
|
|
||||||
h |= 0
|
|
||||||
}
|
|
||||||
return h
|
|
||||||
}
|
|
||||||
|
|
||||||
// Static methods that return new Scopes (immutable operations)
|
|
||||||
|
|
||||||
static add(scope: Scope, ...names: string[]): Scope {
|
|
||||||
const newVars = new Set(scope.vars)
|
|
||||||
names.forEach((name) => newVars.add(name))
|
|
||||||
return new Scope(scope.parent, newVars)
|
|
||||||
}
|
|
||||||
|
|
||||||
push(): Scope {
|
|
||||||
return new Scope(this, new Set())
|
|
||||||
}
|
|
||||||
|
|
||||||
pop(): Scope {
|
|
||||||
return this.parent ?? this
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Tracker context that combines Scope with temporary pending identifiers
|
|
||||||
class TrackerContext {
|
|
||||||
constructor(public scope: Scope, public pendingIds: string[] = []) {}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Extract identifier text from input stream
|
|
||||||
const readIdentifierText = (input: InputStream, start: number, end: number): string => {
|
|
||||||
let text = ''
|
|
||||||
for (let i = start; i < end; i++) {
|
|
||||||
const offset = i - input.pos
|
|
||||||
const ch = input.peek(offset)
|
|
||||||
if (ch === -1) break
|
|
||||||
text += String.fromCharCode(ch)
|
|
||||||
}
|
|
||||||
return text
|
|
||||||
}
|
|
||||||
|
|
||||||
let inParams = false
|
|
||||||
|
|
||||||
export const trackScope = new ContextTracker<TrackerContext>({
|
|
||||||
start: new TrackerContext(new Scope(null, new Set())),
|
|
||||||
|
|
||||||
shift(context, term, stack, input) {
|
|
||||||
if (term == terms.Do) inParams = true
|
|
||||||
|
|
||||||
if (term === terms.AssignableIdentifier) {
|
|
||||||
const text = readIdentifierText(input, input.pos, stack.pos)
|
|
||||||
return new TrackerContext(Scope.add(context.scope, text), context.pendingIds)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (inParams && term === terms.Identifier) {
|
|
||||||
const text = readIdentifierText(input, input.pos, stack.pos)
|
|
||||||
return new TrackerContext(context.scope, [...context.pendingIds, text])
|
|
||||||
}
|
|
||||||
|
|
||||||
return context
|
|
||||||
},
|
|
||||||
|
|
||||||
reduce(context, term) {
|
|
||||||
if (term === terms.Params) {
|
|
||||||
inParams = false
|
|
||||||
let newScope = context.scope.push()
|
|
||||||
if (context.pendingIds.length > 0) {
|
|
||||||
newScope = Scope.add(newScope, ...context.pendingIds)
|
|
||||||
}
|
|
||||||
return new TrackerContext(newScope, [])
|
|
||||||
}
|
|
||||||
|
|
||||||
// Pop scope when exiting function
|
|
||||||
if (term === terms.FunctionDef) {
|
|
||||||
return new TrackerContext(context.scope.pop(), [])
|
|
||||||
}
|
|
||||||
|
|
||||||
return context
|
|
||||||
},
|
|
||||||
|
|
||||||
hash: (context) => context.scope.hash(),
|
|
||||||
})
|
|
||||||
|
|
@ -1,222 +0,0 @@
|
||||||
@external propSource highlighting from "./highlight"
|
|
||||||
|
|
||||||
@context trackScope from "./scopeTracker"
|
|
||||||
|
|
||||||
@skip { space | comment }
|
|
||||||
|
|
||||||
@top Program { item* }
|
|
||||||
|
|
||||||
@external tokens operatorTokenizer from "./operatorTokenizer" { Star, Slash, Plus, Minus, And, Or, Eq, EqEq, Neq, Lt, Lte, Gt, Gte, Modulo }
|
|
||||||
|
|
||||||
@tokens {
|
|
||||||
@precedence { Number Regex }
|
|
||||||
|
|
||||||
StringFragment { !['\\$]+ }
|
|
||||||
NamedArgPrefix { $[a-z]+ "=" }
|
|
||||||
Number { ("-" | "+")? $[0-9]+ ('.' $[0-9]+)? }
|
|
||||||
Boolean { "true" | "false" }
|
|
||||||
newlineOrSemicolon { "\n" | ";" }
|
|
||||||
eof { @eof }
|
|
||||||
space { " " | "\t" }
|
|
||||||
comment { "#" ![\n]* }
|
|
||||||
leftParen { "(" }
|
|
||||||
rightParen { ")" }
|
|
||||||
colon[closedBy="end", @name="colon"] { ":" }
|
|
||||||
Underscore { "_" }
|
|
||||||
Regex { "//" (![/\\\n[] | "\\" ![\n] | "[" (![\n\\\]] | "\\" ![\n])* "]")+ ("//" $[gimsuy]*)? } // Stolen from the lezer JavaScript grammar
|
|
||||||
"|"[@name=operator]
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@external tokens tokenizer from "./tokenizer" { Identifier, AssignableIdentifier, Word, IdentifierBeforeDot }
|
|
||||||
@external specialize {Identifier} specializeKeyword from "./tokenizer" { Do }
|
|
||||||
|
|
||||||
@precedence {
|
|
||||||
pipe @left,
|
|
||||||
or @left,
|
|
||||||
and @left,
|
|
||||||
comparison @left,
|
|
||||||
multiplicative @left,
|
|
||||||
additive @left,
|
|
||||||
call
|
|
||||||
}
|
|
||||||
|
|
||||||
item {
|
|
||||||
consumeToTerminator newlineOrSemicolon |
|
|
||||||
consumeToTerminator eof |
|
|
||||||
newlineOrSemicolon // allow blank lines
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
consumeToTerminator {
|
|
||||||
PipeExpr |
|
|
||||||
ambiguousFunctionCall |
|
|
||||||
IfExpr |
|
|
||||||
FunctionDef |
|
|
||||||
Assign |
|
|
||||||
BinOp |
|
|
||||||
ConditionalOp |
|
|
||||||
expressionWithoutIdentifier
|
|
||||||
}
|
|
||||||
|
|
||||||
PipeExpr {
|
|
||||||
pipeOperand (!pipe "|" pipeOperand)+
|
|
||||||
}
|
|
||||||
|
|
||||||
pipeOperand {
|
|
||||||
FunctionCall | FunctionCallOrIdentifier
|
|
||||||
}
|
|
||||||
|
|
||||||
FunctionCallOrIdentifier {
|
|
||||||
DotGet | Identifier
|
|
||||||
}
|
|
||||||
|
|
||||||
ambiguousFunctionCall {
|
|
||||||
FunctionCall | FunctionCallOrIdentifier
|
|
||||||
}
|
|
||||||
|
|
||||||
FunctionCall {
|
|
||||||
(DotGet | Identifier | ParenExpr) arg+
|
|
||||||
}
|
|
||||||
|
|
||||||
arg {
|
|
||||||
PositionalArg | NamedArg
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
PositionalArg {
|
|
||||||
expression | FunctionDef | Underscore
|
|
||||||
}
|
|
||||||
|
|
||||||
NamedArg {
|
|
||||||
NamedArgPrefix (expression | FunctionDef | Underscore)
|
|
||||||
}
|
|
||||||
|
|
||||||
FunctionDef {
|
|
||||||
singleLineFunctionDef | multilineFunctionDef
|
|
||||||
}
|
|
||||||
|
|
||||||
singleLineFunctionDef {
|
|
||||||
Do Params colon consumeToTerminator @specialize[@name=keyword]<Identifier, "end">
|
|
||||||
}
|
|
||||||
|
|
||||||
multilineFunctionDef {
|
|
||||||
Do Params colon newlineOrSemicolon block @specialize[@name=keyword]<Identifier, "end">
|
|
||||||
}
|
|
||||||
|
|
||||||
IfExpr {
|
|
||||||
singleLineIf | multilineIf
|
|
||||||
}
|
|
||||||
|
|
||||||
singleLineIf {
|
|
||||||
@specialize[@name=keyword]<Identifier, "if"> (ConditionalOp | expression) colon SingleLineThenBlock @specialize[@name=keyword]<Identifier, "end">
|
|
||||||
}
|
|
||||||
|
|
||||||
multilineIf {
|
|
||||||
@specialize[@name=keyword]<Identifier, "if"> (ConditionalOp | expression) colon newlineOrSemicolon ThenBlock ElseIfExpr* ElseExpr? @specialize[@name=keyword]<Identifier, "end">
|
|
||||||
}
|
|
||||||
|
|
||||||
ElseIfExpr {
|
|
||||||
@specialize[@name=keyword]<Identifier, "elseif"> (ConditionalOp | expression) colon newlineOrSemicolon ThenBlock
|
|
||||||
}
|
|
||||||
|
|
||||||
ElseExpr {
|
|
||||||
@specialize[@name=keyword]<Identifier, "else"> colon newlineOrSemicolon ThenBlock
|
|
||||||
}
|
|
||||||
|
|
||||||
ThenBlock {
|
|
||||||
block
|
|
||||||
}
|
|
||||||
|
|
||||||
SingleLineThenBlock {
|
|
||||||
consumeToTerminator
|
|
||||||
}
|
|
||||||
|
|
||||||
ConditionalOp {
|
|
||||||
expression !comparison EqEq expression |
|
|
||||||
expression !comparison Neq expression |
|
|
||||||
expression !comparison Lt expression |
|
|
||||||
expression !comparison Lte expression |
|
|
||||||
expression !comparison Gt expression |
|
|
||||||
expression !comparison Gte expression |
|
|
||||||
(expression | ConditionalOp) !and And (expression | ConditionalOp) |
|
|
||||||
(expression | ConditionalOp) !or Or (expression | ConditionalOp)
|
|
||||||
}
|
|
||||||
|
|
||||||
Params {
|
|
||||||
Identifier*
|
|
||||||
}
|
|
||||||
|
|
||||||
Assign {
|
|
||||||
(AssignableIdentifier | Array) Eq consumeToTerminator
|
|
||||||
}
|
|
||||||
|
|
||||||
BinOp {
|
|
||||||
expression !multiplicative Modulo expression |
|
|
||||||
(expression | BinOp) !multiplicative Star (expression | BinOp) |
|
|
||||||
(expression | BinOp) !multiplicative Slash (expression | BinOp) |
|
|
||||||
(expression | BinOp) !additive Plus (expression | BinOp) |
|
|
||||||
(expression | BinOp) !additive Minus (expression | BinOp)
|
|
||||||
}
|
|
||||||
|
|
||||||
ParenExpr {
|
|
||||||
leftParen (ambiguousFunctionCall | BinOp | expressionWithoutIdentifier | ConditionalOp | PipeExpr | FunctionDef) rightParen
|
|
||||||
}
|
|
||||||
|
|
||||||
expression {
|
|
||||||
expressionWithoutIdentifier | DotGet | Identifier
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@local tokens {
|
|
||||||
dot { "." }
|
|
||||||
}
|
|
||||||
|
|
||||||
@skip {} {
|
|
||||||
DotGet {
|
|
||||||
IdentifierBeforeDot dot (Number | Identifier | ParenExpr)
|
|
||||||
}
|
|
||||||
|
|
||||||
String { "'" stringContent* "'" }
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
stringContent {
|
|
||||||
StringFragment |
|
|
||||||
Interpolation |
|
|
||||||
EscapeSeq
|
|
||||||
}
|
|
||||||
|
|
||||||
Interpolation {
|
|
||||||
"$" Identifier |
|
|
||||||
"$" ParenExpr
|
|
||||||
}
|
|
||||||
|
|
||||||
EscapeSeq {
|
|
||||||
"\\" ("$" | "n" | "t" | "r" | "\\" | "'")
|
|
||||||
}
|
|
||||||
|
|
||||||
Dict {
|
|
||||||
"[=]" |
|
|
||||||
"[" newlineOrSemicolon* NamedArg (newlineOrSemicolon | NamedArg)* "]"
|
|
||||||
}
|
|
||||||
|
|
||||||
Array {
|
|
||||||
"[" newlineOrSemicolon* (expression (newlineOrSemicolon | expression)*)? "]"
|
|
||||||
}
|
|
||||||
|
|
||||||
// We need expressionWithoutIdentifier to avoid conflicts in consumeToTerminator.
|
|
||||||
// Without this, when parsing "my-var" at statement level, the parser can't decide:
|
|
||||||
// - ambiguousFunctionCall → FunctionCallOrIdentifier → Identifier
|
|
||||||
// - expression → Identifier
|
|
||||||
// Both want the same Identifier token! So we use expressionWithoutIdentifier
|
|
||||||
// to remove Identifier from the second path, forcing standalone identifiers
|
|
||||||
// to go through ambiguousFunctionCall (which is what we want semantically).
|
|
||||||
// Yes, it is annoying and I gave up trying to use GLR to fix it.
|
|
||||||
expressionWithoutIdentifier {
|
|
||||||
ParenExpr | Word | String | Number | Boolean | Regex | Dict | Array | @specialize[@name=Null]<Identifier, "null">
|
|
||||||
}
|
|
||||||
|
|
||||||
block {
|
|
||||||
(consumeToTerminator? newlineOrSemicolon)*
|
|
||||||
}
|
|
||||||
4
src/parser/shrimp.grammar.d.ts
vendored
4
src/parser/shrimp.grammar.d.ts
vendored
|
|
@ -1,4 +0,0 @@
|
||||||
declare module '*.grammar' {
|
|
||||||
const content: string
|
|
||||||
export default content
|
|
||||||
}
|
|
||||||
|
|
@ -1,53 +0,0 @@
|
||||||
// This file was generated by lezer-generator. You probably shouldn't edit it.
|
|
||||||
export const
|
|
||||||
Star = 1,
|
|
||||||
Slash = 2,
|
|
||||||
Plus = 3,
|
|
||||||
Minus = 4,
|
|
||||||
And = 5,
|
|
||||||
Or = 6,
|
|
||||||
Eq = 7,
|
|
||||||
EqEq = 8,
|
|
||||||
Neq = 9,
|
|
||||||
Lt = 10,
|
|
||||||
Lte = 11,
|
|
||||||
Gt = 12,
|
|
||||||
Gte = 13,
|
|
||||||
Modulo = 14,
|
|
||||||
Identifier = 15,
|
|
||||||
AssignableIdentifier = 16,
|
|
||||||
Word = 17,
|
|
||||||
IdentifierBeforeDot = 18,
|
|
||||||
Do = 19,
|
|
||||||
Program = 20,
|
|
||||||
PipeExpr = 21,
|
|
||||||
FunctionCall = 22,
|
|
||||||
DotGet = 23,
|
|
||||||
Number = 24,
|
|
||||||
ParenExpr = 25,
|
|
||||||
FunctionCallOrIdentifier = 26,
|
|
||||||
BinOp = 27,
|
|
||||||
String = 28,
|
|
||||||
StringFragment = 29,
|
|
||||||
Interpolation = 30,
|
|
||||||
EscapeSeq = 31,
|
|
||||||
Boolean = 32,
|
|
||||||
Regex = 33,
|
|
||||||
Dict = 34,
|
|
||||||
NamedArg = 35,
|
|
||||||
NamedArgPrefix = 36,
|
|
||||||
FunctionDef = 37,
|
|
||||||
Params = 38,
|
|
||||||
colon = 39,
|
|
||||||
keyword = 54,
|
|
||||||
Underscore = 41,
|
|
||||||
Array = 42,
|
|
||||||
Null = 43,
|
|
||||||
ConditionalOp = 44,
|
|
||||||
PositionalArg = 45,
|
|
||||||
IfExpr = 47,
|
|
||||||
SingleLineThenBlock = 49,
|
|
||||||
ThenBlock = 50,
|
|
||||||
ElseIfExpr = 51,
|
|
||||||
ElseExpr = 53,
|
|
||||||
Assign = 55
|
|
||||||
|
|
@ -1,27 +0,0 @@
|
||||||
// This file was generated by lezer-generator. You probably shouldn't edit it.
|
|
||||||
import {LRParser, LocalTokenGroup} from "@lezer/lr"
|
|
||||||
import {operatorTokenizer} from "./operatorTokenizer"
|
|
||||||
import {tokenizer, specializeKeyword} from "./tokenizer"
|
|
||||||
import {trackScope} from "./scopeTracker"
|
|
||||||
import {highlighting} from "./highlight"
|
|
||||||
const spec_Identifier = {__proto__:null,end:80, null:86, if:96, elseif:104, else:108}
|
|
||||||
export const parser = LRParser.deserialize({
|
|
||||||
version: 14,
|
|
||||||
states: "3[QYQbOOO#hQcO'#CvO$eOSO'#CxO$sQbO'#EVOOQ`'#DR'#DROOQa'#DO'#DOO%vQbO'#DWO'RQcO'#DzOOQa'#Dz'#DzO(UQcO'#DzO)WQcO'#DyO*PQRO'#CwO*dQcO'#DuO*{QcO'#DuO+^QbO'#CuO,UOpO'#CsOOQ`'#Dv'#DvO,ZQbO'#DuO,iQbO'#E]OOQ`'#D]'#D]O-^QRO'#DeOOQ`'#Du'#DuO-cQQO'#DtOOQ`'#Dt'#DtOOQ`'#Df'#DfQYQbOOO-kQbO'#DPOOQa'#Dy'#DyOOQ`'#DZ'#DZOOQ`'#E['#E[OOQ`'#Dm'#DmO-uQbO,59^O.cQbO'#CzO.kQWO'#C{OOOO'#D|'#D|OOOO'#Dg'#DgO/POSO,59dOOQa,59d,59dOOQ`'#Di'#DiO/_QbO'#DSO/gQQO,5:qOOQ`'#Dh'#DhO/lQbO,59rO/sQQO,59jOOQa,59r,59rO0OQbO,59rO0YQbO,5:PO,iQbO,59cO,iQbO,59cO,iQbO,59cO,iQbO,59tO,iQbO,59tO,iQbO,59tO0dQRO,59aO0kQRO,59aO0|QRO,59aO0wQQO,59aO1XQQO,59aO1aObO,59_O1lQbO'#DnO1wQbO,59]O2YQRO,5:wO2aQRO,5:wOOQ`,5:`,5:`OOQ`-E7d-E7dOOQ`,59k,59kOOQ`-E7k-E7kOOOO,59f,59fOOOO,59g,59gOOOO-E7e-E7eOOQa1G/O1G/OOOQ`-E7g-E7gO2lQbO1G0]OOQ`-E7f-E7fO2yQQO1G/UOOQa1G/^1G/^O3UQbO1G/^OOQO'#Dk'#DkO2yQQO1G/UOOQa1G/U1G/UOOQ`'#Dl'#DlO3UQbO1G/^OOQ`1G/k1G/kOOQa1G.}1G.}O3wQcO1G.}O4RQcO1G.}O4]QcO1G.}OOQa1G/`1G/`O5oQcO1G/`O5vQcO1G/`O5}QcO1G/`OOQa1G.{1G.{OOQa1G.y1G.yO!ZQbO'#CvO6UQbO'#CrOOQ`,5:Y,5:YOOQ`-E7l-E7lO6cQbO1G0cO6pQbO7+%wO6uQbO7+%xO7VQQO7+$pOOQa7+$p7+$pO7bQbO7+$xOOQa7+$x7+$xOOQO-E7i-E7iOOQ`-E7j-E7jOOQ`'#D_'#D_O7lQbO7+%}O7qQbO7+&OOOQ`<<Ic<<IcOOQ`'#Dj'#DjO8XQQO'#DjO8^QbO'#EXO8tQbO<<IdOOQa<<H[<<H[OOQa<<Hd<<HdOOQ`<<Ii<<IiOOQ`'#D`'#D`O8yQbO<<IjOOQ`,5:U,5:UOOQ`-E7h-E7hOOQ`AN?OAN?OO,iQbO'#DaOOQ`'#Do'#DoO9UQbOAN?UO9aQQO'#DcOOQ`AN?UAN?UO9fQbOAN?UO9kQRO,59{O9rQRO,59{OOQ`-E7m-E7mOOQ`G24pG24pO9}QbOG24pO:SQQO,59}O:XQQO1G/gOOQ`LD*[LD*[O6uQbO1G/iO7qQbO7+%ROOQ`7+%T7+%TOOQ`<<Hm<<Hm",
|
|
||||||
stateData: ":a~O!fOS!gOS~O_PO`dOaWOb_OcROhWOpWOqWO{WO!QbO!l^O!oQO!vTO!wUO!xgO~O_kOaWOb_OcROhWOpWOqWOtjOylO{WO!l^O!oQO!vTO!wUO!OjX!xjX#RjX!}jXxjX~OP!mXQ!mXR!mXS!mXT!mXU!mXW!mXX!mXY!mXZ!mX[!mX]!mX^!mX~P!ZOmrO!ouO!qpO!rqO~O_vOwvP~O_kOaWOb_OhWOpWOqWOtjO{WO!l^O!oQO!vTO!wUO!xyO~O!||O~P${OP!nXQ!nXR!nXS!nXT!nXU!nXW!nXX!nXY!nXZ!nX[!nX]!nX^!nX!x!nX#R!nXx!nX~O_kOaWOb_OcROhWOpWOqWOtjOylO{WO!l^O!oQO!vTO!wUO!}!nX~P%}OV!OO~P%}OP!mXQ!mXR!mXS!mXT!mXU!mXW!mXX!mXY!mXZ!mX[!mX]!mX^!mX~O!x!iX#R!iXx!iX~P(]OT!TOU!UOW!SOX!SOY!SOZ!SO[!SO]!SO~OP!QOQ!QOR!ROS!RO^!PO~P)eOP!QOQ!QOR!ROS!RO!x!iX#R!iXx!iX~OT!TOU!UO!x!iX#R!iXx!iX~O_POaWOb_OcROhWOpWOqWO{WO!l^O!oQO!vTO!wUO~O!k![O~O!O!]O!x!iX#R!iXx!iX~O_kOaWOb_OhWOpWOqWO{WO!l^O!oQO!vTO!wUO~OV!OO~O!x!aO#R!aO~OcROy!cO~P,iOcROtjOylO!Ofa!xfa#Rfa!}faxfa~P,iO_!eO!l^O~O!o!fO!q!fO!r!fO!s!fO!t!fO!u!fO~OmrO!o!hO!qpO!rqO~O_vOwvX~Ow!jO~O!|!mO~P${OtjO!x!oO!|!qO~O!x!rO!|!mO~P,iO`dO!QbO~P+^O!}!}O~P(]OP!QOQ!QOR!ROS!RO!}!}O~OT!TOU!UO!}!}O~O!O!]O!}!}O~O_#OOh#OO!l^O~O_#POb_O!l^O~O!O!]O!xea#Rea!}eaxea~Ow#TO~P)eOT!TOU!UOw#TO~O`dO!QbO!x#VO~P+^OtjO!x!oO!|#XO~O!x!rO!|#ZO~P,iO^!PORkiSki!xki#Rki!}kixki~OPkiQki~P3`OP!QOQ!QO~P3`OP!QOQ!QORkiSki!xki#Rki!}kixki~OW!SOX!SOY!SOZ!SO[!SO]!SOT|i!x|i#R|i!}|iw|ix|i~OU!UO~P4wOU!UO~P5ZOU|i~P4wOcROtjOylO~P,iO`dO!QbO!x#`O~P+^Ox#aO~O`dO!QbO!x#bOx!{P~P+^OtjO!x!oO!|#fO~O!x!rO!|#gO~P,iOx#hO~O`dO!QbO!x#bOx!{P!U!{P!W!{P~P+^O!x#kO~O`dO!QbO!x#bOx!{X!U!{X!W!{X~P+^Ox#mO~Ox#rO!U#nO!W#qO~Ox#wO!U#nO!W#qO~Ow#yO~Ox#wO~Ow#zO~P)eOT!TOU!UOw#zO~Ox#{O~O!x#|O~O!x#}O~Ohq~",
|
|
||||||
goto: "/r#RPPPPPPPPPPPPPPPPPPPPP#S#c#qP$i#c%g%|P&o&oPP%|&sP'W'qPPP'tP(i)UP)]P)i)l)uP)yP)]*P*V*]*c*i*r*|+W+a+hPPPP+n+r,WPP,j-wP.nPPPPPPPP.r.r/VPP/_/f/fdeOi!O!j#T#V#`#d#|#}R!Y^i`O^i!O!]!j#T#V#`#d#|#}fPO^i!O!j#T#V#`#d#|#}xkPUVbjoz}!P!Q!R!S!T!U!n!s#P#Q#Y#nR#P!]fVO^i!O!j#T#V#`#d#|#}xWPUVbjoz}!P!Q!R!S!T!U!n!s#P#Q#Y#nQ!epQ#O![R#Q!]d[Oi!O!j#T#V#`#d#|#}Q!W^Q!u!QR!x!R!aWOPUV^bijoz}!O!P!Q!R!S!T!U!j!n!s#P#Q#T#V#Y#`#d#n#|#}TrQtYmPVo#P#QQ{UQ!lzX!o{!l!p#WdeOi!O!j#T#V#`#d#|#}YlPVo#P#QQ!Y^R!cjRxRzWPUV^bjoz}!P!Q!R!S!T!U!n!s#P#Q#Y#neXOi!O!j#T#V#`#d#|#}d]Oi!O!j#T#V#`#d#|#}Q!X^Q!`bQ!y!UQ!{!TR#u#nZmPVo#P#QeeOi!O!j#T#V#`#d#|#}R#_#TQ#j#`Q$O#|R$P#}T#o#j#pQ#s#jR#x#pQiOR!biQtQR!gtQzUR!kzQwRR!iwW#d#V#`#|#}R#l#dQ!p{Q#W!lT#[!p#WQ!s}Q#Y!nT#]!s#YWoPV#P#QR!doS!^a!ZR#S!^Q#p#jR#v#pThOiSfOiQ!t!OQ#U!jQ#^#TZ#c#V#`#d#|#}daOi!O!j#T#V#`#d#|#}Q!Z^R#R!]fZO^i!O!j#T#V#`#d#|#}YlPVo#P#QQ}UQ!_bQ!cjQ!nzW!r}!n!s#YQ!u!PQ!v!QQ!w!RQ!y!SQ!z!TQ!|!UR#t#ndYOi!O!j#T#V#`#d#|#}xkPUVbjoz}!P!Q!R!S!T!U!n!s#P#Q#Y#nR!V^TsQtsSOPV^ijo!O!j#P#Q#T#V#`#d#|#}Q#e#VV#i#`#|#}ZnPVo#P#QecOi!O!j#T#V#`#d#|#}",
|
|
||||||
nodeNames: "⚠ Star Slash Plus Minus And Or Eq EqEq Neq Lt Lte Gt Gte Modulo Identifier AssignableIdentifier Word IdentifierBeforeDot Do Program PipeExpr FunctionCall DotGet Number ParenExpr FunctionCallOrIdentifier BinOp String StringFragment Interpolation EscapeSeq Boolean Regex Dict NamedArg NamedArgPrefix FunctionDef Params colon keyword Underscore Array Null ConditionalOp PositionalArg operator IfExpr keyword SingleLineThenBlock ThenBlock ElseIfExpr keyword ElseExpr keyword Assign",
|
|
||||||
maxTerm: 95,
|
|
||||||
context: trackScope,
|
|
||||||
nodeProps: [
|
|
||||||
["closedBy", 39,"end"]
|
|
||||||
],
|
|
||||||
propSources: [highlighting],
|
|
||||||
skippedNodes: [0],
|
|
||||||
repeatNodeCount: 10,
|
|
||||||
tokenData: "AO~R|OX#{XY$jYZ%TZp#{pq$jqs#{st%ntu'Vuw#{wx'[xy'ayz'zz{#{{|(e|}#{}!O(e!O!P#{!P!Q+X!Q![)S![!]3t!]!^%T!^!}#{!}#O4_#O#P6T#P#Q6Y#Q#R#{#R#S6s#S#T#{#T#Y7^#Y#Z8l#Z#b7^#b#c<z#c#f7^#f#g=q#g#h7^#h#i>h#i#o7^#o#p#{#p#q@`#q;'S#{;'S;=`$d<%l~#{~O#{~~@yS$QUmSOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{S$gP;=`<%l#{^$qUmS!fYOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{U%[UmS!xQOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{^%uZmS!gYOY%nYZ#{Zt%ntu&huw%nwx&hx#O%n#O#P&h#P;'S%n;'S;=`'P<%lO%nY&mS!gYOY&hZ;'S&h;'S;=`&y<%lO&hY&|P;=`<%l&h^'SP;=`<%l%n~'[O!q~~'aO!o~U'hUmS!lQOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{U(RUmS!}QOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{U(jWmSOt#{uw#{x!Q#{!Q![)S![#O#{#P;'S#{;'S;=`$d<%lO#{U)ZYmShQOt#{uw#{x!O#{!O!P)y!P!Q#{!Q![)S![#O#{#P;'S#{;'S;=`$d<%lO#{U*OWmSOt#{uw#{x!Q#{!Q![*h![#O#{#P;'S#{;'S;=`$d<%lO#{U*oWmShQOt#{uw#{x!Q#{!Q![*h![#O#{#P;'S#{;'S;=`$d<%lO#{U+^WmSOt#{uw#{x!P#{!P!Q+v!Q#O#{#P;'S#{;'S;=`$d<%lO#{U+{^mSOY,wYZ#{Zt,wtu-zuw,wwx-zx!P,w!P!Q#{!Q!},w!}#O2m#O#P0Y#P;'S,w;'S;=`3n<%lO,wU-O^mSqQOY,wYZ#{Zt,wtu-zuw,wwx-zx!P,w!P!Q0o!Q!},w!}#O2m#O#P0Y#P;'S,w;'S;=`3n<%lO,wQ.PXqQOY-zZ!P-z!P!Q.l!Q!}-z!}#O/Z#O#P0Y#P;'S-z;'S;=`0i<%lO-zQ.oP!P!Q.rQ.wUqQ#Z#[.r#]#^.r#a#b.r#g#h.r#i#j.r#m#n.rQ/^VOY/ZZ#O/Z#O#P/s#P#Q-z#Q;'S/Z;'S;=`0S<%lO/ZQ/vSOY/ZZ;'S/Z;'S;=`0S<%lO/ZQ0VP;=`<%l/ZQ0]SOY-zZ;'S-z;'S;=`0i<%lO-zQ0lP;=`<%l-zU0tWmSOt#{uw#{x!P#{!P!Q1^!Q#O#{#P;'S#{;'S;=`$d<%lO#{U1ebmSqQOt#{uw#{x#O#{#P#Z#{#Z#[1^#[#]#{#]#^1^#^#a#{#a#b1^#b#g#{#g#h1^#h#i#{#i#j1^#j#m#{#m#n1^#n;'S#{;'S;=`$d<%lO#{U2r[mSOY2mYZ#{Zt2mtu/Zuw2mwx/Zx#O2m#O#P/s#P#Q,w#Q;'S2m;'S;=`3h<%lO2mU3kP;=`<%l2mU3qP;=`<%l,wU3{UmSwQOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{U4fW!wQmSOt#{uw#{x!_#{!_!`5O!`#O#{#P;'S#{;'S;=`$d<%lO#{U5TVmSOt#{uw#{x#O#{#P#Q5j#Q;'S#{;'S;=`$d<%lO#{U5qU!vQmSOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{~6YO!r~U6aU!|QmSOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{U6zUmSyQOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{U7cYmSOt#{uw#{x!_#{!_!`8R!`#O#{#P#T#{#T#o7^#o;'S#{;'S;=`$d<%lO#{U8YUtQmSOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{U8qZmSOt#{uw#{x!_#{!_!`8R!`#O#{#P#T#{#T#U9d#U#o7^#o;'S#{;'S;=`$d<%lO#{U9i[mSOt#{uw#{x!_#{!_!`8R!`#O#{#P#T#{#T#`7^#`#a:_#a#o7^#o;'S#{;'S;=`$d<%lO#{U:d[mSOt#{uw#{x!_#{!_!`8R!`#O#{#P#T#{#T#g7^#g#h;Y#h#o7^#o;'S#{;'S;=`$d<%lO#{U;_[mSOt#{uw#{x!_#{!_!`8R!`#O#{#P#T#{#T#X7^#X#Y<T#Y#o7^#o;'S#{;'S;=`$d<%lO#{U<[YpQmSOt#{uw#{x!_#{!_!`8R!`#O#{#P#T#{#T#o7^#o;'S#{;'S;=`$d<%lO#{^=RY!sWmSOt#{uw#{x!_#{!_!`8R!`#O#{#P#T#{#T#o7^#o;'S#{;'S;=`$d<%lO#{^=xY!uWmSOt#{uw#{x!_#{!_!`8R!`#O#{#P#T#{#T#o7^#o;'S#{;'S;=`$d<%lO#{^>o[!tWmSOt#{uw#{x!_#{!_!`8R!`#O#{#P#T#{#T#f7^#f#g?e#g#o7^#o;'S#{;'S;=`$d<%lO#{U?j[mSOt#{uw#{x!_#{!_!`8R!`#O#{#P#T#{#T#i7^#i#j;Y#j#o7^#o;'S#{;'S;=`$d<%lO#{U@gU!OQmSOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{~AOO#R~",
|
|
||||||
tokenizers: [operatorTokenizer, 1, 2, 3, tokenizer, new LocalTokenGroup("[~RP!O!PU~ZO!k~~", 11)],
|
|
||||||
topRules: {"Program":[0,20]},
|
|
||||||
specialized: [{term: 15, get: (value: any, stack: any) => (specializeKeyword(value, stack) << 1), external: specializeKeyword},{term: 15, get: (value: keyof typeof spec_Identifier) => spec_Identifier[value] || -1}],
|
|
||||||
tokenPrec: 1164
|
|
||||||
})
|
|
||||||
258
src/parser/stringParser.ts
Normal file
258
src/parser/stringParser.ts
Normal file
|
|
@ -0,0 +1,258 @@
|
||||||
|
import { SyntaxNode } from './node'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse string contents into fragments, interpolations, and escape sequences.
|
||||||
|
*
|
||||||
|
* Input: full string including quotes, e.g. "'hello $name'"
|
||||||
|
* Output: SyntaxNode tree with StringFragment, Interpolation, EscapeSeq children
|
||||||
|
*/
|
||||||
|
export const parseString = (input: string, from: number, to: number, parser: any): SyntaxNode => {
|
||||||
|
const stringNode = new SyntaxNode('String', from, to)
|
||||||
|
const content = input.slice(from, to)
|
||||||
|
|
||||||
|
// Determine string type
|
||||||
|
const firstChar = content[0]
|
||||||
|
|
||||||
|
// Double-quoted strings: no interpolation or escapes
|
||||||
|
if (firstChar === '"') {
|
||||||
|
const fragment = new SyntaxNode('DoubleQuote', from, to)
|
||||||
|
stringNode.add(fragment)
|
||||||
|
return stringNode
|
||||||
|
}
|
||||||
|
|
||||||
|
// Curly strings: interpolation but no escapes
|
||||||
|
if (firstChar === '{') {
|
||||||
|
parseCurlyString(stringNode, input, from, to, parser)
|
||||||
|
return stringNode
|
||||||
|
}
|
||||||
|
|
||||||
|
// Single-quoted strings: interpolation and escapes
|
||||||
|
if (firstChar === "'") {
|
||||||
|
parseSingleQuoteString(stringNode, input, from, to, parser)
|
||||||
|
return stringNode
|
||||||
|
}
|
||||||
|
|
||||||
|
throw `Unknown string type starting with: ${firstChar}`
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse single-quoted string: 'hello $name\n'
|
||||||
|
* Supports: interpolation ($var, $(expr)), escape sequences (\n, \$, etc)
|
||||||
|
*/
|
||||||
|
const parseSingleQuoteString = (stringNode: SyntaxNode, input: string, from: number, to: number, parser: any) => {
|
||||||
|
let pos = from + 1 // Skip opening '
|
||||||
|
let fragmentStart = pos
|
||||||
|
|
||||||
|
while (pos < to - 1) { // -1 to skip closing '
|
||||||
|
const char = input[pos]
|
||||||
|
|
||||||
|
// Escape sequence
|
||||||
|
if (char === '\\' && pos + 1 < to - 1) {
|
||||||
|
// Push accumulated fragment
|
||||||
|
if (pos > fragmentStart) {
|
||||||
|
const frag = new SyntaxNode('StringFragment', fragmentStart, pos)
|
||||||
|
stringNode.add(frag)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add escape sequence node
|
||||||
|
const escNode = new SyntaxNode('EscapeSeq', pos, pos + 2)
|
||||||
|
stringNode.add(escNode)
|
||||||
|
|
||||||
|
pos += 2
|
||||||
|
fragmentStart = pos
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Interpolation
|
||||||
|
if (char === '$') {
|
||||||
|
// Push accumulated fragment
|
||||||
|
if (pos > fragmentStart) {
|
||||||
|
const frag = new SyntaxNode('StringFragment', fragmentStart, pos)
|
||||||
|
stringNode.add(frag)
|
||||||
|
}
|
||||||
|
|
||||||
|
pos++ // Skip $
|
||||||
|
|
||||||
|
// Parse interpolation content
|
||||||
|
if (input[pos] === '(') {
|
||||||
|
// Expression interpolation: $(expr)
|
||||||
|
const interpStart = pos - 1 // Include the $
|
||||||
|
const exprResult = parseInterpolationExpr(input, pos, parser)
|
||||||
|
const interpNode = new SyntaxNode('Interpolation', interpStart, exprResult.endPos)
|
||||||
|
interpNode.add(exprResult.node)
|
||||||
|
stringNode.add(interpNode)
|
||||||
|
pos = exprResult.endPos
|
||||||
|
} else {
|
||||||
|
// Variable interpolation: $name
|
||||||
|
const interpStart = pos - 1
|
||||||
|
const identEnd = findIdentifierEnd(input, pos, to - 1)
|
||||||
|
const identNode = new SyntaxNode('FunctionCallOrIdentifier', pos, identEnd)
|
||||||
|
const innerIdent = new SyntaxNode('Identifier', pos, identEnd)
|
||||||
|
identNode.add(innerIdent)
|
||||||
|
|
||||||
|
const interpNode = new SyntaxNode('Interpolation', interpStart, identEnd)
|
||||||
|
interpNode.add(identNode)
|
||||||
|
stringNode.add(interpNode)
|
||||||
|
pos = identEnd
|
||||||
|
}
|
||||||
|
|
||||||
|
fragmentStart = pos
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
pos++
|
||||||
|
}
|
||||||
|
|
||||||
|
// Push final fragment
|
||||||
|
if (pos > fragmentStart && fragmentStart < to - 1) {
|
||||||
|
const frag = new SyntaxNode('StringFragment', fragmentStart, pos)
|
||||||
|
stringNode.add(frag)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse curly string: { hello $name }
|
||||||
|
* Supports: interpolation ($var, $(expr)), nested braces
|
||||||
|
* Does NOT support: escape sequences (raw content)
|
||||||
|
*/
|
||||||
|
const parseCurlyString = (stringNode: SyntaxNode, input: string, from: number, to: number, parser: any) => {
|
||||||
|
let pos = from + 1 // Skip opening {
|
||||||
|
let fragmentStart = from // Include the opening { in the fragment
|
||||||
|
let depth = 1
|
||||||
|
|
||||||
|
while (pos < to && depth > 0) {
|
||||||
|
const char = input[pos]
|
||||||
|
|
||||||
|
// Track brace nesting
|
||||||
|
if (char === '{') {
|
||||||
|
depth++
|
||||||
|
pos++
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (char === '}') {
|
||||||
|
depth--
|
||||||
|
if (depth === 0) {
|
||||||
|
// Push final fragment including closing }
|
||||||
|
const frag = new SyntaxNode('CurlyString', fragmentStart, pos + 1)
|
||||||
|
stringNode.add(frag)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
pos++
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Interpolation
|
||||||
|
if (char === '$') {
|
||||||
|
// Push accumulated fragment
|
||||||
|
if (pos > fragmentStart) {
|
||||||
|
const frag = new SyntaxNode('CurlyString', fragmentStart, pos)
|
||||||
|
stringNode.add(frag)
|
||||||
|
}
|
||||||
|
|
||||||
|
pos++ // Skip $
|
||||||
|
|
||||||
|
// Parse interpolation content
|
||||||
|
if (input[pos] === '(') {
|
||||||
|
// Expression interpolation: $(expr)
|
||||||
|
const interpStart = pos - 1
|
||||||
|
const exprResult = parseInterpolationExpr(input, pos, parser)
|
||||||
|
const interpNode = new SyntaxNode('Interpolation', interpStart, exprResult.endPos)
|
||||||
|
interpNode.add(exprResult.node)
|
||||||
|
stringNode.add(interpNode)
|
||||||
|
pos = exprResult.endPos
|
||||||
|
} else {
|
||||||
|
// Variable interpolation: $name
|
||||||
|
const interpStart = pos - 1
|
||||||
|
const identEnd = findIdentifierEnd(input, pos, to)
|
||||||
|
const identNode = new SyntaxNode('FunctionCallOrIdentifier', pos, identEnd)
|
||||||
|
const innerIdent = new SyntaxNode('Identifier', pos, identEnd)
|
||||||
|
identNode.add(innerIdent)
|
||||||
|
|
||||||
|
const interpNode = new SyntaxNode('Interpolation', interpStart, identEnd)
|
||||||
|
interpNode.add(identNode)
|
||||||
|
stringNode.add(interpNode)
|
||||||
|
pos = identEnd
|
||||||
|
}
|
||||||
|
|
||||||
|
fragmentStart = pos
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
pos++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse a parenthesized expression interpolation: $(a + b)
|
||||||
|
* Returns the parsed expression node and the position after the closing )
|
||||||
|
* pos is position of the opening ( in the full input string
|
||||||
|
*/
|
||||||
|
const parseInterpolationExpr = (input: string, pos: number, parser: any): { node: SyntaxNode, endPos: number } => {
|
||||||
|
// Find matching closing paren
|
||||||
|
let depth = 1
|
||||||
|
let start = pos
|
||||||
|
let end = pos + 1 // Start after opening (
|
||||||
|
|
||||||
|
while (end < input.length && depth > 0) {
|
||||||
|
if (input[end] === '(') depth++
|
||||||
|
if (input[end] === ')') {
|
||||||
|
depth--
|
||||||
|
if (depth === 0) break
|
||||||
|
}
|
||||||
|
end++
|
||||||
|
}
|
||||||
|
|
||||||
|
const exprContent = input.slice(start + 1, end) // Content between ( and )
|
||||||
|
const closeParen = end
|
||||||
|
end++ // Move past closing )
|
||||||
|
|
||||||
|
// Use the main parser to parse the expression
|
||||||
|
const exprNode = parser.parse(exprContent)
|
||||||
|
|
||||||
|
// Get the first real node (skip Program wrapper)
|
||||||
|
const innerNode = exprNode.firstChild || exprNode
|
||||||
|
|
||||||
|
// Adjust node positions: they're relative to exprContent, need to offset to full input
|
||||||
|
const offset = start + 1 // Position where exprContent starts in full input
|
||||||
|
adjustNodePositions(innerNode, offset)
|
||||||
|
|
||||||
|
// Wrap in ParenExpr - use positions in the full string
|
||||||
|
const parenNode = new SyntaxNode('ParenExpr', start, closeParen + 1)
|
||||||
|
parenNode.add(innerNode)
|
||||||
|
|
||||||
|
return { node: parenNode, endPos: end }
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Recursively adjust all node positions by adding an offset
|
||||||
|
*/
|
||||||
|
const adjustNodePositions = (node: SyntaxNode, offset: number) => {
|
||||||
|
node.from += offset
|
||||||
|
node.to += offset
|
||||||
|
|
||||||
|
for (const child of node.children) {
|
||||||
|
adjustNodePositions(child, offset)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find the end position of an identifier starting at pos
|
||||||
|
* Identifiers: lowercase letter or emoji, followed by letters/digits/dashes/emoji
|
||||||
|
*/
|
||||||
|
const findIdentifierEnd = (input: string, pos: number, maxPos: number): number => {
|
||||||
|
let end = pos
|
||||||
|
|
||||||
|
while (end < maxPos) {
|
||||||
|
const char = input[end]!
|
||||||
|
|
||||||
|
// Stop at non-identifier characters
|
||||||
|
if (!/[a-z0-9\-?]/.test(char)) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
end++
|
||||||
|
}
|
||||||
|
|
||||||
|
return end
|
||||||
|
}
|
||||||
|
|
@ -1,7 +1,5 @@
|
||||||
import { expect, describe, test } from 'bun:test'
|
import { expect, describe, test } from 'bun:test'
|
||||||
|
|
||||||
import '../shrimp.grammar' // Importing this so changes cause it to retest!
|
|
||||||
|
|
||||||
describe('null', () => {
|
describe('null', () => {
|
||||||
test('parses null', () => {
|
test('parses null', () => {
|
||||||
expect('null').toMatchTree(`Null null`)
|
expect('null').toMatchTree(`Null null`)
|
||||||
|
|
@ -48,7 +46,6 @@ describe('Identifier', () => {
|
||||||
FunctionCallOrIdentifier
|
FunctionCallOrIdentifier
|
||||||
Identifier even?`)
|
Identifier even?`)
|
||||||
})
|
})
|
||||||
|
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('Unicode Symbol Support', () => {
|
describe('Unicode Symbol Support', () => {
|
||||||
|
|
@ -369,6 +366,138 @@ describe('Parentheses', () => {
|
||||||
PositionalArg
|
PositionalArg
|
||||||
Number 3`)
|
Number 3`)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
test('function call with named args on multiple lines in parens', () => {
|
||||||
|
expect(`(tail
|
||||||
|
arg1=true
|
||||||
|
arg2=30
|
||||||
|
)`).toMatchTree(`
|
||||||
|
ParenExpr
|
||||||
|
FunctionCall
|
||||||
|
Identifier tail
|
||||||
|
NamedArg
|
||||||
|
NamedArgPrefix arg1=
|
||||||
|
Boolean true
|
||||||
|
NamedArg
|
||||||
|
NamedArgPrefix arg2=
|
||||||
|
Number 30
|
||||||
|
`)
|
||||||
|
|
||||||
|
expect(`(
|
||||||
|
tail
|
||||||
|
arg1=true
|
||||||
|
arg2=30
|
||||||
|
)`).toMatchTree(`
|
||||||
|
ParenExpr
|
||||||
|
FunctionCall
|
||||||
|
Identifier tail
|
||||||
|
NamedArg
|
||||||
|
NamedArgPrefix arg1=
|
||||||
|
Boolean true
|
||||||
|
NamedArg
|
||||||
|
NamedArgPrefix arg2=
|
||||||
|
Number 30
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('binop with newlines in parens', () => {
|
||||||
|
expect(`(
|
||||||
|
1 + 2
|
||||||
|
)`).toMatchTree(`
|
||||||
|
ParenExpr
|
||||||
|
BinOp
|
||||||
|
Number 1
|
||||||
|
Plus +
|
||||||
|
Number 2`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('comparison with newlines in parens', () => {
|
||||||
|
expect(`(
|
||||||
|
1 < 2
|
||||||
|
)`).toMatchTree(`
|
||||||
|
ParenExpr
|
||||||
|
ConditionalOp
|
||||||
|
Number 1
|
||||||
|
Lt <
|
||||||
|
Number 2`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('function call with multiple identifiers on separate lines in parens', () => {
|
||||||
|
expect(`(echo
|
||||||
|
arg1
|
||||||
|
arg2
|
||||||
|
arg3
|
||||||
|
)`).toMatchTree(`
|
||||||
|
ParenExpr
|
||||||
|
FunctionCall
|
||||||
|
Identifier echo
|
||||||
|
PositionalArg
|
||||||
|
Identifier arg1
|
||||||
|
PositionalArg
|
||||||
|
Identifier arg2
|
||||||
|
PositionalArg
|
||||||
|
Identifier arg3`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('function call with mulitline identifiers starting separate lines in parens', () => {
|
||||||
|
expect(`(
|
||||||
|
|
||||||
|
echo
|
||||||
|
arg1
|
||||||
|
arg2
|
||||||
|
arg3
|
||||||
|
|
||||||
|
)`).toMatchTree(`
|
||||||
|
ParenExpr
|
||||||
|
FunctionCall
|
||||||
|
Identifier echo
|
||||||
|
PositionalArg
|
||||||
|
Identifier arg1
|
||||||
|
PositionalArg
|
||||||
|
Identifier arg2
|
||||||
|
PositionalArg
|
||||||
|
Identifier arg3`)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('Number literals', () => {
|
||||||
|
test('allows underscores in integer literals', () => {
|
||||||
|
expect('10_000').toMatchTree(`Number 10_000`)
|
||||||
|
expect('1_000_000').toMatchTree(`Number 1_000_000`)
|
||||||
|
expect('100_000').toMatchTree(`Number 100_000`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('allows underscores in decimal literals', () => {
|
||||||
|
expect('3.14_159').toMatchTree(`Number 3.14_159`)
|
||||||
|
expect('1_000.50').toMatchTree(`Number 1_000.50`)
|
||||||
|
expect('0.000_001').toMatchTree(`Number 0.000_001`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('allows underscores in negative numbers', () => {
|
||||||
|
expect('-10_000').toMatchTree(`Number -10_000`)
|
||||||
|
expect('-3.14_159').toMatchTree(`Number -3.14_159`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('allows underscores in positive numbers with explicit sign', () => {
|
||||||
|
expect('+10_000').toMatchTree(`Number +10_000`)
|
||||||
|
expect('+3.14_159').toMatchTree(`Number +3.14_159`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('works in expressions', () => {
|
||||||
|
expect('1_000 + 2_000').toMatchTree(`
|
||||||
|
BinOp
|
||||||
|
Number 1_000
|
||||||
|
Plus +
|
||||||
|
Number 2_000`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('works in function calls', () => {
|
||||||
|
expect('echo 10_000').toMatchTree(`
|
||||||
|
FunctionCall
|
||||||
|
Identifier echo
|
||||||
|
PositionalArg
|
||||||
|
Number 10_000`)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('BinOp', () => {
|
describe('BinOp', () => {
|
||||||
|
|
@ -532,103 +661,199 @@ describe('Assign', () => {
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('DotGet whitespace sensitivity', () => {
|
describe('CompoundAssign', () => {
|
||||||
test('no whitespace - DotGet works when identifier in scope', () => {
|
test('parses += operator', () => {
|
||||||
expect('basename = 5; basename.prop').toMatchTree(`
|
expect('x += 5').toMatchTree(`
|
||||||
Assign
|
CompoundAssign
|
||||||
AssignableIdentifier basename
|
AssignableIdentifier x
|
||||||
Eq =
|
PlusEq +=
|
||||||
Number 5
|
Number 5`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parses -= operator', () => {
|
||||||
|
expect('count -= 1').toMatchTree(`
|
||||||
|
CompoundAssign
|
||||||
|
AssignableIdentifier count
|
||||||
|
MinusEq -=
|
||||||
|
Number 1`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parses *= operator', () => {
|
||||||
|
expect('total *= 2').toMatchTree(`
|
||||||
|
CompoundAssign
|
||||||
|
AssignableIdentifier total
|
||||||
|
StarEq *=
|
||||||
|
Number 2`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parses /= operator', () => {
|
||||||
|
expect('value /= 10').toMatchTree(`
|
||||||
|
CompoundAssign
|
||||||
|
AssignableIdentifier value
|
||||||
|
SlashEq /=
|
||||||
|
Number 10`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parses %= operator', () => {
|
||||||
|
expect('remainder %= 3').toMatchTree(`
|
||||||
|
CompoundAssign
|
||||||
|
AssignableIdentifier remainder
|
||||||
|
ModuloEq %=
|
||||||
|
Number 3`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parses compound assignment with expression', () => {
|
||||||
|
expect('x += 1 + 2').toMatchTree(`
|
||||||
|
CompoundAssign
|
||||||
|
AssignableIdentifier x
|
||||||
|
PlusEq +=
|
||||||
|
BinOp
|
||||||
|
Number 1
|
||||||
|
Plus +
|
||||||
|
Number 2`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parses compound assignment with function call', () => {
|
||||||
|
expect('total += add 5 3').toMatchTree(`
|
||||||
|
CompoundAssign
|
||||||
|
AssignableIdentifier total
|
||||||
|
PlusEq +=
|
||||||
|
FunctionCall
|
||||||
|
Identifier add
|
||||||
|
PositionalArg
|
||||||
|
Number 5
|
||||||
|
PositionalArg
|
||||||
|
Number 3`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parses ??= operator', () => {
|
||||||
|
expect('x ??= 5').toMatchTree(`
|
||||||
|
CompoundAssign
|
||||||
|
AssignableIdentifier x
|
||||||
|
NullishEq ??=
|
||||||
|
Number 5`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parses ??= with expression', () => {
|
||||||
|
expect('config ??= get-default').toMatchTree(`
|
||||||
|
CompoundAssign
|
||||||
|
AssignableIdentifier config
|
||||||
|
NullishEq ??=
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier get-default`)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('Nullish coalescing operator', () => {
|
||||||
|
test('? can still end an identifier', () => {
|
||||||
|
expect('what?').toMatchTree(`
|
||||||
FunctionCallOrIdentifier
|
FunctionCallOrIdentifier
|
||||||
DotGet
|
Identifier what?`)
|
||||||
IdentifierBeforeDot basename
|
|
||||||
Identifier prop`)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
test('space before dot - NOT DotGet, parses as division', () => {
|
test('?? can still end an identifier', () => {
|
||||||
expect('basename = 5; basename / prop').toMatchTree(`
|
expect('what??').toMatchTree(`
|
||||||
Assign
|
FunctionCallOrIdentifier
|
||||||
AssignableIdentifier basename
|
Identifier what??`)
|
||||||
Eq =
|
|
||||||
Number 5
|
|
||||||
BinOp
|
|
||||||
Identifier basename
|
|
||||||
Slash /
|
|
||||||
Identifier prop`)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
test('dot followed by slash is Word, not DotGet', () => {
|
test('?? can still be in a word', () => {
|
||||||
expect('basename ./cool').toMatchTree(`
|
expect('what??the').toMatchTree(`
|
||||||
FunctionCall
|
FunctionCallOrIdentifier
|
||||||
Identifier basename
|
Identifier what??the`)
|
||||||
PositionalArg
|
|
||||||
Word ./cool`)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
test('identifier not in scope with dot becomes Word', () => {
|
test('?? can still start a word', () => {
|
||||||
expect('readme.txt').toMatchTree(`Word readme.txt`)
|
expect('??what??the').toMatchTree(`
|
||||||
|
Word ??what??the`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parses ?? operator', () => {
|
||||||
|
expect('x ?? 5').toMatchTree(`
|
||||||
|
ConditionalOp
|
||||||
|
Identifier x
|
||||||
|
NullishCoalesce ??
|
||||||
|
Number 5`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parses chained ?? operators', () => {
|
||||||
|
expect('a ?? b ?? c').toMatchTree(`
|
||||||
|
ConditionalOp
|
||||||
|
ConditionalOp
|
||||||
|
Identifier a
|
||||||
|
NullishCoalesce ??
|
||||||
|
Identifier b
|
||||||
|
NullishCoalesce ??
|
||||||
|
Identifier c`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parses ?? with expressions', () => {
|
||||||
|
expect('get-value ?? default-value').toMatchTree(`
|
||||||
|
ConditionalOp
|
||||||
|
Identifier get-value
|
||||||
|
NullishCoalesce ??
|
||||||
|
Identifier default-value`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parses ?? with parenthesized function call', () => {
|
||||||
|
expect('get-value ?? (default 10)').toMatchTree(`
|
||||||
|
ConditionalOp
|
||||||
|
Identifier get-value
|
||||||
|
NullishCoalesce ??
|
||||||
|
ParenExpr
|
||||||
|
FunctionCall
|
||||||
|
Identifier default
|
||||||
|
PositionalArg
|
||||||
|
Number 10`)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('Comments', () => {
|
describe('Comments', () => {
|
||||||
test('are barely there', () => {
|
test('are greedy', () => {
|
||||||
expect(`x = 5 # one banana\ny = 2 # two bananas`).toMatchTree(`
|
expect(`
|
||||||
|
x = 5 # one banana
|
||||||
|
y = 2 #two bananas`).toMatchTree(`
|
||||||
Assign
|
Assign
|
||||||
AssignableIdentifier x
|
AssignableIdentifier x
|
||||||
Eq =
|
Eq =
|
||||||
Number 5
|
Number 5
|
||||||
|
Comment # one banana
|
||||||
Assign
|
Assign
|
||||||
AssignableIdentifier y
|
AssignableIdentifier y
|
||||||
Eq =
|
Eq =
|
||||||
Number 2`)
|
Number 2
|
||||||
|
Comment #two bananas`)
|
||||||
|
|
||||||
expect('# some comment\nbasename = 5 # very astute\n basename / prop\n# good info').toMatchTree(`
|
expect(`
|
||||||
Assign
|
# some comment
|
||||||
AssignableIdentifier basename
|
basename = 5 # very astute
|
||||||
Eq =
|
basename / prop
|
||||||
Number 5
|
# good info`).toMatchTree(`
|
||||||
BinOp
|
Comment # some comment
|
||||||
Identifier basename
|
Assign
|
||||||
Slash /
|
AssignableIdentifier basename
|
||||||
Identifier prop`)
|
Eq =
|
||||||
})
|
Number 5
|
||||||
})
|
Comment # very astute
|
||||||
|
BinOp
|
||||||
describe('Array destructuring', () => {
|
Identifier basename
|
||||||
test('parses array pattern with two variables', () => {
|
Slash /
|
||||||
expect('[ a b ] = [ 1 2 3 4]').toMatchTree(`
|
Identifier prop
|
||||||
Assign
|
Comment # good info`)
|
||||||
Array
|
|
||||||
Identifier a
|
|
||||||
Identifier b
|
|
||||||
Eq =
|
|
||||||
Array
|
|
||||||
Number 1
|
|
||||||
Number 2
|
|
||||||
Number 3
|
|
||||||
Number 4`)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
test('parses array pattern with one variable', () => {
|
test('words with # are not considered comments', () => {
|
||||||
expect('[ x ] = [ 42 ]').toMatchTree(`
|
expect('find my#hashtag-file.txt').toMatchTree(`
|
||||||
Assign
|
FunctionCall
|
||||||
Array
|
Identifier find
|
||||||
Identifier x
|
PositionalArg
|
||||||
Eq =
|
Word my#hashtag-file.txt`)
|
||||||
Array
|
|
||||||
Number 42`)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
test('parses array pattern with emoji identifiers', () => {
|
test('hastags in strings are not comments', () => {
|
||||||
expect('[ 🚀 💎 ] = [ 1 2 ]').toMatchTree(`
|
expect("'this is not a #comment'").toMatchTree(`
|
||||||
Assign
|
String
|
||||||
Array
|
StringFragment this is not a #comment`)
|
||||||
Identifier 🚀
|
|
||||||
Identifier 💎
|
|
||||||
Eq =
|
|
||||||
Array
|
|
||||||
Number 1
|
|
||||||
Number 2`)
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
@ -668,7 +893,7 @@ Assign
|
||||||
EqEq ==
|
EqEq ==
|
||||||
Number 5
|
Number 5
|
||||||
colon :
|
colon :
|
||||||
ThenBlock
|
Block
|
||||||
Boolean true
|
Boolean true
|
||||||
keyword end
|
keyword end
|
||||||
keyword end
|
keyword end
|
||||||
|
|
@ -710,10 +935,10 @@ Assign
|
||||||
EqEq ==
|
EqEq ==
|
||||||
Number 5
|
Number 5
|
||||||
colon :
|
colon :
|
||||||
ThenBlock
|
Block
|
||||||
Boolean true
|
Boolean true
|
||||||
keyword end
|
keyword end
|
||||||
keyword end
|
keyword end
|
||||||
`)
|
`)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
||||||
70
src/parser/tests/bitwise.test.ts
Normal file
70
src/parser/tests/bitwise.test.ts
Normal file
|
|
@ -0,0 +1,70 @@
|
||||||
|
import { expect, describe, test } from 'bun:test'
|
||||||
|
|
||||||
|
describe('bitwise operators - grammar', () => {
|
||||||
|
test('parses band (bitwise AND)', () => {
|
||||||
|
expect('5 band 3').toMatchTree(`
|
||||||
|
BinOp
|
||||||
|
Number 5
|
||||||
|
Band band
|
||||||
|
Number 3`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parses bor (bitwise OR)', () => {
|
||||||
|
expect('5 bor 3').toMatchTree(`
|
||||||
|
BinOp
|
||||||
|
Number 5
|
||||||
|
Bor bor
|
||||||
|
Number 3`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parses bxor (bitwise XOR)', () => {
|
||||||
|
expect('5 bxor 3').toMatchTree(`
|
||||||
|
BinOp
|
||||||
|
Number 5
|
||||||
|
Bxor bxor
|
||||||
|
Number 3`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parses << (left shift)', () => {
|
||||||
|
expect('5 << 2').toMatchTree(`
|
||||||
|
BinOp
|
||||||
|
Number 5
|
||||||
|
Shl <<
|
||||||
|
Number 2`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parses >> (signed right shift)', () => {
|
||||||
|
expect('20 >> 2').toMatchTree(`
|
||||||
|
BinOp
|
||||||
|
Number 20
|
||||||
|
Shr >>
|
||||||
|
Number 2`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parses >>> (unsigned right shift)', () => {
|
||||||
|
expect('-1 >>> 1').toMatchTree(`
|
||||||
|
BinOp
|
||||||
|
Number -1
|
||||||
|
Ushr >>>
|
||||||
|
Number 1`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parses bnot (bitwise NOT) as function call', () => {
|
||||||
|
expect('bnot 5').toMatchTree(`
|
||||||
|
FunctionCall
|
||||||
|
Identifier bnot
|
||||||
|
PositionalArg
|
||||||
|
Number 5`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('bitwise operators work in expressions', () => {
|
||||||
|
expect('x = 5 band 3').toMatchTree(`
|
||||||
|
Assign
|
||||||
|
AssignableIdentifier x
|
||||||
|
Eq =
|
||||||
|
BinOp
|
||||||
|
Number 5
|
||||||
|
Band band
|
||||||
|
Number 3`)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
@ -1,8 +1,6 @@
|
||||||
import { expect, describe, test } from 'bun:test'
|
import { expect, describe, test } from 'bun:test'
|
||||||
|
|
||||||
import '../shrimp.grammar' // Importing this so changes cause it to retest!
|
describe('if/else if/else', () => {
|
||||||
|
|
||||||
describe('if/elseif/else', () => {
|
|
||||||
test('parses single line if', () => {
|
test('parses single line if', () => {
|
||||||
expect(`if y == 1: 'cool' end`).toMatchTree(`
|
expect(`if y == 1: 'cool' end`).toMatchTree(`
|
||||||
IfExpr
|
IfExpr
|
||||||
|
|
@ -12,7 +10,7 @@ describe('if/elseif/else', () => {
|
||||||
EqEq ==
|
EqEq ==
|
||||||
Number 1
|
Number 1
|
||||||
colon :
|
colon :
|
||||||
SingleLineThenBlock
|
Block
|
||||||
String
|
String
|
||||||
StringFragment cool
|
StringFragment cool
|
||||||
keyword end
|
keyword end
|
||||||
|
|
@ -24,9 +22,10 @@ describe('if/elseif/else', () => {
|
||||||
Eq =
|
Eq =
|
||||||
IfExpr
|
IfExpr
|
||||||
keyword if
|
keyword if
|
||||||
Identifier x
|
FunctionCallOrIdentifier
|
||||||
|
Identifier x
|
||||||
colon :
|
colon :
|
||||||
SingleLineThenBlock
|
Block
|
||||||
Number 2
|
Number 2
|
||||||
keyword end
|
keyword end
|
||||||
`)
|
`)
|
||||||
|
|
@ -44,7 +43,7 @@ describe('if/elseif/else', () => {
|
||||||
Lt <
|
Lt <
|
||||||
Number 9
|
Number 9
|
||||||
colon :
|
colon :
|
||||||
ThenBlock
|
Block
|
||||||
FunctionCallOrIdentifier
|
FunctionCallOrIdentifier
|
||||||
Identifier yes
|
Identifier yes
|
||||||
keyword end
|
keyword end
|
||||||
|
|
@ -59,80 +58,89 @@ describe('if/elseif/else', () => {
|
||||||
end`).toMatchTree(`
|
end`).toMatchTree(`
|
||||||
IfExpr
|
IfExpr
|
||||||
keyword if
|
keyword if
|
||||||
Identifier with-else
|
FunctionCallOrIdentifier
|
||||||
|
Identifier with-else
|
||||||
colon :
|
colon :
|
||||||
ThenBlock
|
Block
|
||||||
FunctionCallOrIdentifier
|
FunctionCallOrIdentifier
|
||||||
Identifier x
|
Identifier x
|
||||||
ElseExpr
|
ElseExpr
|
||||||
keyword else
|
keyword else
|
||||||
colon :
|
colon :
|
||||||
ThenBlock
|
Block
|
||||||
FunctionCallOrIdentifier
|
FunctionCallOrIdentifier
|
||||||
Identifier y
|
Identifier y
|
||||||
keyword end
|
keyword end
|
||||||
`)
|
`)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('parses multiline if with elseif', () => {
|
test('parses multiline if with else if', () => {
|
||||||
expect(`if with-elseif:
|
expect(`if with-else-if:
|
||||||
x
|
x
|
||||||
elseif another-condition:
|
else if another-condition:
|
||||||
y
|
y
|
||||||
end`).toMatchTree(`
|
end`).toMatchTree(`
|
||||||
IfExpr
|
IfExpr
|
||||||
keyword if
|
keyword if
|
||||||
Identifier with-elseif
|
FunctionCallOrIdentifier
|
||||||
|
Identifier with-else-if
|
||||||
colon :
|
colon :
|
||||||
ThenBlock
|
Block
|
||||||
FunctionCallOrIdentifier
|
FunctionCallOrIdentifier
|
||||||
Identifier x
|
Identifier x
|
||||||
ElseIfExpr
|
ElseIfExpr
|
||||||
keyword elseif
|
keyword else
|
||||||
Identifier another-condition
|
keyword if
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier another-condition
|
||||||
colon :
|
colon :
|
||||||
ThenBlock
|
Block
|
||||||
FunctionCallOrIdentifier
|
FunctionCallOrIdentifier
|
||||||
Identifier y
|
Identifier y
|
||||||
keyword end
|
keyword end
|
||||||
`)
|
`)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('parses multiline if with multiple elseif and else', () => {
|
test('parses multiline if with multiple else if and else', () => {
|
||||||
expect(`if with-elseif-else:
|
expect(`if with-else-if-else:
|
||||||
x
|
x
|
||||||
elseif another-condition:
|
else if another-condition:
|
||||||
y
|
y
|
||||||
elseif yet-another-condition:
|
else if yet-another-condition:
|
||||||
z
|
z
|
||||||
else:
|
else:
|
||||||
oh-no
|
oh-no
|
||||||
end`).toMatchTree(`
|
end`).toMatchTree(`
|
||||||
IfExpr
|
IfExpr
|
||||||
keyword if
|
keyword if
|
||||||
Identifier with-elseif-else
|
FunctionCallOrIdentifier
|
||||||
|
Identifier with-else-if-else
|
||||||
colon :
|
colon :
|
||||||
ThenBlock
|
Block
|
||||||
FunctionCallOrIdentifier
|
FunctionCallOrIdentifier
|
||||||
Identifier x
|
Identifier x
|
||||||
ElseIfExpr
|
ElseIfExpr
|
||||||
keyword elseif
|
keyword else
|
||||||
Identifier another-condition
|
keyword if
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier another-condition
|
||||||
colon :
|
colon :
|
||||||
ThenBlock
|
Block
|
||||||
FunctionCallOrIdentifier
|
FunctionCallOrIdentifier
|
||||||
Identifier y
|
Identifier y
|
||||||
ElseIfExpr
|
ElseIfExpr
|
||||||
keyword elseif
|
keyword else
|
||||||
Identifier yet-another-condition
|
keyword if
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier yet-another-condition
|
||||||
colon :
|
colon :
|
||||||
ThenBlock
|
Block
|
||||||
FunctionCallOrIdentifier
|
FunctionCallOrIdentifier
|
||||||
Identifier z
|
Identifier z
|
||||||
ElseExpr
|
ElseExpr
|
||||||
keyword else
|
keyword else
|
||||||
colon :
|
colon :
|
||||||
ThenBlock
|
Block
|
||||||
FunctionCallOrIdentifier
|
FunctionCallOrIdentifier
|
||||||
Identifier oh-no
|
Identifier oh-no
|
||||||
keyword end
|
keyword end
|
||||||
|
|
@ -148,9 +156,221 @@ describe('if/elseif/else', () => {
|
||||||
keyword if
|
keyword if
|
||||||
Boolean true
|
Boolean true
|
||||||
colon :
|
colon :
|
||||||
SingleLineThenBlock
|
Block
|
||||||
Number 2
|
Number 2
|
||||||
keyword end
|
keyword end
|
||||||
`)
|
`)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
test('parses function calls in if tests', () => {
|
||||||
|
expect(`if var? 'abc': true end`).toMatchTree(`
|
||||||
|
IfExpr
|
||||||
|
keyword if
|
||||||
|
FunctionCall
|
||||||
|
Identifier var?
|
||||||
|
PositionalArg
|
||||||
|
String
|
||||||
|
StringFragment abc
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
Boolean true
|
||||||
|
keyword end
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test("parses paren'd function calls in if tests", () => {
|
||||||
|
expect(`if (var? 'abc'): true end`).toMatchTree(`
|
||||||
|
IfExpr
|
||||||
|
keyword if
|
||||||
|
ParenExpr
|
||||||
|
FunctionCall
|
||||||
|
Identifier var?
|
||||||
|
PositionalArg
|
||||||
|
String
|
||||||
|
StringFragment abc
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
Boolean true
|
||||||
|
keyword end
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
test('parses function calls in else-if tests', () => {
|
||||||
|
expect(`if false: true else if var? 'abc': true end`).toMatchTree(`
|
||||||
|
IfExpr
|
||||||
|
keyword if
|
||||||
|
Boolean false
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
Boolean true
|
||||||
|
ElseIfExpr
|
||||||
|
keyword else
|
||||||
|
keyword if
|
||||||
|
FunctionCall
|
||||||
|
Identifier var?
|
||||||
|
PositionalArg
|
||||||
|
String
|
||||||
|
StringFragment abc
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
Boolean true
|
||||||
|
keyword end
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test("parses paren'd function calls in else-if tests", () => {
|
||||||
|
expect(`if false: true else if (var? 'abc'): true end`).toMatchTree(`
|
||||||
|
IfExpr
|
||||||
|
keyword if
|
||||||
|
Boolean false
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
Boolean true
|
||||||
|
ElseIfExpr
|
||||||
|
keyword else
|
||||||
|
keyword if
|
||||||
|
ParenExpr
|
||||||
|
FunctionCall
|
||||||
|
Identifier var?
|
||||||
|
PositionalArg
|
||||||
|
String
|
||||||
|
StringFragment abc
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
Boolean true
|
||||||
|
keyword end
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('allows if/else in parens', () => {
|
||||||
|
expect(`eh? = (if true: true end)`).toMatchTree(`
|
||||||
|
Assign
|
||||||
|
AssignableIdentifier eh?
|
||||||
|
Eq =
|
||||||
|
ParenExpr
|
||||||
|
IfExpr
|
||||||
|
keyword if
|
||||||
|
Boolean true
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
Boolean true
|
||||||
|
keyword end
|
||||||
|
`)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe('while', () => {
|
||||||
|
test('infinite loop', () => {
|
||||||
|
expect(`while true: true end`).toMatchTree(`
|
||||||
|
WhileExpr
|
||||||
|
keyword while
|
||||||
|
Boolean true
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
Boolean true
|
||||||
|
keyword end`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('basic expression', () => {
|
||||||
|
expect(`while a > 0: true end`).toMatchTree(`
|
||||||
|
WhileExpr
|
||||||
|
keyword while
|
||||||
|
ConditionalOp
|
||||||
|
Identifier a
|
||||||
|
Gt >
|
||||||
|
Number 0
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
Boolean true
|
||||||
|
keyword end`)
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
test('compound expression', () => {
|
||||||
|
expect(`while a > 0 and b < 100 and c < 1000: true end`).toMatchTree(`
|
||||||
|
WhileExpr
|
||||||
|
keyword while
|
||||||
|
ConditionalOp
|
||||||
|
ConditionalOp
|
||||||
|
ConditionalOp
|
||||||
|
Identifier a
|
||||||
|
Gt >
|
||||||
|
Number 0
|
||||||
|
And and
|
||||||
|
ConditionalOp
|
||||||
|
Identifier b
|
||||||
|
Lt <
|
||||||
|
Number 100
|
||||||
|
And and
|
||||||
|
ConditionalOp
|
||||||
|
Identifier c
|
||||||
|
Lt <
|
||||||
|
Number 1000
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
Boolean true
|
||||||
|
keyword end`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('multiline infinite loop', () => {
|
||||||
|
expect(`
|
||||||
|
while true:
|
||||||
|
true
|
||||||
|
end`).toMatchTree(`
|
||||||
|
WhileExpr
|
||||||
|
keyword while
|
||||||
|
Boolean true
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
Boolean true
|
||||||
|
keyword end`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('multiline basic expression', () => {
|
||||||
|
expect(`
|
||||||
|
while a > 0:
|
||||||
|
true
|
||||||
|
end`).toMatchTree(`
|
||||||
|
WhileExpr
|
||||||
|
keyword while
|
||||||
|
ConditionalOp
|
||||||
|
Identifier a
|
||||||
|
Gt >
|
||||||
|
Number 0
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
Boolean true
|
||||||
|
keyword end`)
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
test('multiline compound expression', () => {
|
||||||
|
expect(`
|
||||||
|
while a > 0 and b < 100 and c < 1000:
|
||||||
|
true
|
||||||
|
end`).toMatchTree(`
|
||||||
|
WhileExpr
|
||||||
|
keyword while
|
||||||
|
ConditionalOp
|
||||||
|
ConditionalOp
|
||||||
|
ConditionalOp
|
||||||
|
Identifier a
|
||||||
|
Gt >
|
||||||
|
Number 0
|
||||||
|
And and
|
||||||
|
ConditionalOp
|
||||||
|
Identifier b
|
||||||
|
Lt <
|
||||||
|
Number 100
|
||||||
|
And and
|
||||||
|
ConditionalOp
|
||||||
|
Identifier c
|
||||||
|
Lt <
|
||||||
|
Number 1000
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
Boolean true
|
||||||
|
keyword end`)
|
||||||
|
})
|
||||||
|
})
|
||||||
56
src/parser/tests/destructuring.test.ts
Normal file
56
src/parser/tests/destructuring.test.ts
Normal file
|
|
@ -0,0 +1,56 @@
|
||||||
|
import { expect, describe, test } from 'bun:test'
|
||||||
|
|
||||||
|
describe('Array destructuring', () => {
|
||||||
|
test('parses array pattern with two variables', () => {
|
||||||
|
expect('[ a b ] = [ 1 2 3 4]').toMatchTree(`
|
||||||
|
Assign
|
||||||
|
Array
|
||||||
|
Identifier a
|
||||||
|
Identifier b
|
||||||
|
Eq =
|
||||||
|
Array
|
||||||
|
Number 1
|
||||||
|
Number 2
|
||||||
|
Number 3
|
||||||
|
Number 4`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parses array pattern with one variable', () => {
|
||||||
|
expect('[ x ] = [ 42 ]').toMatchTree(`
|
||||||
|
Assign
|
||||||
|
Array
|
||||||
|
Identifier x
|
||||||
|
Eq =
|
||||||
|
Array
|
||||||
|
Number 42`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parses array pattern with emoji identifiers', () => {
|
||||||
|
expect('[ 🚀 💎 ] = [ 1 2 ]').toMatchTree(`
|
||||||
|
Assign
|
||||||
|
Array
|
||||||
|
Identifier 🚀
|
||||||
|
Identifier 💎
|
||||||
|
Eq =
|
||||||
|
Array
|
||||||
|
Number 1
|
||||||
|
Number 2`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('works with dotget', () => {
|
||||||
|
expect('[ a ] = [ [1 2 3] ]; a.1').toMatchTree(`
|
||||||
|
Assign
|
||||||
|
Array
|
||||||
|
Identifier a
|
||||||
|
Eq =
|
||||||
|
Array
|
||||||
|
Array
|
||||||
|
Number 1
|
||||||
|
Number 2
|
||||||
|
Number 3
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
DotGet
|
||||||
|
IdentifierBeforeDot a
|
||||||
|
Number 1`)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
@ -1,6 +1,44 @@
|
||||||
import { describe, test, expect } from 'bun:test'
|
import { describe, test, expect } from 'bun:test'
|
||||||
import '../../testSetup'
|
import '../../testSetup'
|
||||||
|
|
||||||
|
describe('DotGet whitespace sensitivity', () => {
|
||||||
|
test('no whitespace - DotGet works when identifier in scope', () => {
|
||||||
|
expect('basename = 5; basename.prop').toMatchTree(`
|
||||||
|
Assign
|
||||||
|
AssignableIdentifier basename
|
||||||
|
Eq =
|
||||||
|
Number 5
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
DotGet
|
||||||
|
IdentifierBeforeDot basename
|
||||||
|
Identifier prop`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('space before dot - NOT DotGet, parses as division', () => {
|
||||||
|
expect('basename = 5; basename / prop').toMatchTree(`
|
||||||
|
Assign
|
||||||
|
AssignableIdentifier basename
|
||||||
|
Eq =
|
||||||
|
Number 5
|
||||||
|
BinOp
|
||||||
|
Identifier basename
|
||||||
|
Slash /
|
||||||
|
Identifier prop`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('dot followed by slash is Word, not DotGet', () => {
|
||||||
|
expect('basename ./cool').toMatchTree(`
|
||||||
|
FunctionCall
|
||||||
|
Identifier basename
|
||||||
|
PositionalArg
|
||||||
|
Word ./cool`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('identifier not in scope with dot becomes Word', () => {
|
||||||
|
expect('readme.txt').toMatchTree(`Word readme.txt`)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
describe('DotGet', () => {
|
describe('DotGet', () => {
|
||||||
test('readme.txt is Word when readme not in scope', () => {
|
test('readme.txt is Word when readme not in scope', () => {
|
||||||
expect('readme.txt').toMatchTree(`Word readme.txt`)
|
expect('readme.txt').toMatchTree(`Word readme.txt`)
|
||||||
|
|
@ -199,7 +237,7 @@ end`).toMatchTree(`
|
||||||
`)
|
`)
|
||||||
})
|
})
|
||||||
|
|
||||||
test("dot get doesn't work with spaces", () => {
|
test.skip("dot get doesn't work with spaces", () => {
|
||||||
expect('obj . prop').toMatchTree(`
|
expect('obj . prop').toMatchTree(`
|
||||||
FunctionCall
|
FunctionCall
|
||||||
Identifier obj
|
Identifier obj
|
||||||
|
|
@ -298,4 +336,163 @@ end`).toMatchTree(`
|
||||||
Number 2
|
Number 2
|
||||||
`)
|
`)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
// NOTE: these are parsed as DotGet(meta, DotGet(script, name)) because that's easiest,
|
||||||
|
// but the compiler flattens them
|
||||||
|
test('chained dot get: meta.script.name', () => {
|
||||||
|
expect('meta = 42; meta.script.name').toMatchTree(`
|
||||||
|
Assign
|
||||||
|
AssignableIdentifier meta
|
||||||
|
Eq =
|
||||||
|
Number 42
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
DotGet
|
||||||
|
IdentifierBeforeDot meta
|
||||||
|
DotGet
|
||||||
|
IdentifierBeforeDot script
|
||||||
|
Identifier name
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('chained dot get: a.b.c.d', () => {
|
||||||
|
expect('a = 1; a.b.c.d').toMatchTree(`
|
||||||
|
Assign
|
||||||
|
AssignableIdentifier a
|
||||||
|
Eq =
|
||||||
|
Number 1
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
DotGet
|
||||||
|
IdentifierBeforeDot a
|
||||||
|
DotGet
|
||||||
|
IdentifierBeforeDot b
|
||||||
|
DotGet
|
||||||
|
IdentifierBeforeDot c
|
||||||
|
Identifier d
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('chained dot get in function call', () => {
|
||||||
|
expect('config = 1; echo config.db.host').toMatchTree(`
|
||||||
|
Assign
|
||||||
|
AssignableIdentifier config
|
||||||
|
Eq =
|
||||||
|
Number 1
|
||||||
|
FunctionCall
|
||||||
|
Identifier echo
|
||||||
|
PositionalArg
|
||||||
|
DotGet
|
||||||
|
IdentifierBeforeDot config
|
||||||
|
DotGet
|
||||||
|
IdentifierBeforeDot db
|
||||||
|
Identifier host
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('chained dot get with numeric index at end', () => {
|
||||||
|
expect('obj = 1; obj.items.0').toMatchTree(`
|
||||||
|
Assign
|
||||||
|
AssignableIdentifier obj
|
||||||
|
Eq =
|
||||||
|
Number 1
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
DotGet
|
||||||
|
IdentifierBeforeDot obj
|
||||||
|
DotGet
|
||||||
|
IdentifierBeforeDot items
|
||||||
|
Number 0
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('chained dot get with ParenExpr at end', () => {
|
||||||
|
expect('obj = 1; obj.items.(i)').toMatchTree(`
|
||||||
|
Assign
|
||||||
|
AssignableIdentifier obj
|
||||||
|
Eq =
|
||||||
|
Number 1
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
DotGet
|
||||||
|
IdentifierBeforeDot obj
|
||||||
|
DotGet
|
||||||
|
IdentifierBeforeDot items
|
||||||
|
ParenExpr
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier i
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('not in scope remains Word with chained dots', () => {
|
||||||
|
expect('readme.md.bak').toMatchTree(`Word readme.md.bak`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('chained dot get in nested functions', () => {
|
||||||
|
expect(`do cfg:
|
||||||
|
do inner:
|
||||||
|
cfg.db.host
|
||||||
|
end
|
||||||
|
end`).toMatchTree(`
|
||||||
|
FunctionDef
|
||||||
|
Do do
|
||||||
|
Params
|
||||||
|
Identifier cfg
|
||||||
|
colon :
|
||||||
|
FunctionDef
|
||||||
|
Do do
|
||||||
|
Params
|
||||||
|
Identifier inner
|
||||||
|
colon :
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
DotGet
|
||||||
|
IdentifierBeforeDot cfg
|
||||||
|
DotGet
|
||||||
|
IdentifierBeforeDot db
|
||||||
|
Identifier host
|
||||||
|
keyword end
|
||||||
|
keyword end
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('mixed simple and chained dot get', () => {
|
||||||
|
expect('obj = 1; obj.a; obj.b.c').toMatchTree(`
|
||||||
|
Assign
|
||||||
|
AssignableIdentifier obj
|
||||||
|
Eq =
|
||||||
|
Number 1
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
DotGet
|
||||||
|
IdentifierBeforeDot obj
|
||||||
|
Identifier a
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
DotGet
|
||||||
|
IdentifierBeforeDot obj
|
||||||
|
DotGet
|
||||||
|
IdentifierBeforeDot b
|
||||||
|
Identifier c
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test.skip('chained numeric dot get: row.2.1.b', () => {
|
||||||
|
expect('row = []; row.2.1').toMatchTree(`
|
||||||
|
Assign
|
||||||
|
AssignableIdentifier row
|
||||||
|
Eq =
|
||||||
|
Array []
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
DotGet
|
||||||
|
IdentifierBeforeDot row
|
||||||
|
DotGet
|
||||||
|
Number 2
|
||||||
|
DotGet
|
||||||
|
Number 1
|
||||||
|
Identifier b
|
||||||
|
`)
|
||||||
|
|
||||||
|
test('parses $.pid just fine', () => {
|
||||||
|
expect(`$.pid`).toMatchTree(`
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
DotGet
|
||||||
|
Dollar $
|
||||||
|
Identifier pid
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
|
||||||
296
src/parser/tests/exceptions.test.ts
Normal file
296
src/parser/tests/exceptions.test.ts
Normal file
|
|
@ -0,0 +1,296 @@
|
||||||
|
import { expect, describe, test } from 'bun:test'
|
||||||
|
|
||||||
|
describe('try/catch/finally/throw', () => {
|
||||||
|
test('parses try with catch', () => {
|
||||||
|
expect(`try:
|
||||||
|
risky-operation
|
||||||
|
catch err:
|
||||||
|
handle-error err
|
||||||
|
end`).toMatchTree(`
|
||||||
|
TryExpr
|
||||||
|
keyword try
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier risky-operation
|
||||||
|
CatchExpr
|
||||||
|
keyword catch
|
||||||
|
Identifier err
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
FunctionCall
|
||||||
|
Identifier handle-error
|
||||||
|
PositionalArg
|
||||||
|
Identifier err
|
||||||
|
keyword end
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parses try with finally', () => {
|
||||||
|
expect(`try:
|
||||||
|
do-work
|
||||||
|
finally:
|
||||||
|
cleanup
|
||||||
|
end`).toMatchTree(`
|
||||||
|
TryExpr
|
||||||
|
keyword try
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier do-work
|
||||||
|
FinallyExpr
|
||||||
|
keyword finally
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier cleanup
|
||||||
|
keyword end
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parses try with catch and finally', () => {
|
||||||
|
expect(`try:
|
||||||
|
risky-operation
|
||||||
|
catch err:
|
||||||
|
handle-error err
|
||||||
|
finally:
|
||||||
|
cleanup
|
||||||
|
end`).toMatchTree(`
|
||||||
|
TryExpr
|
||||||
|
keyword try
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier risky-operation
|
||||||
|
CatchExpr
|
||||||
|
keyword catch
|
||||||
|
Identifier err
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
FunctionCall
|
||||||
|
Identifier handle-error
|
||||||
|
PositionalArg
|
||||||
|
Identifier err
|
||||||
|
FinallyExpr
|
||||||
|
keyword finally
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier cleanup
|
||||||
|
keyword end
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parses single-line try with catch', () => {
|
||||||
|
expect('result = try: parse-number input catch err: 0 end').toMatchTree(`
|
||||||
|
Assign
|
||||||
|
AssignableIdentifier result
|
||||||
|
Eq =
|
||||||
|
TryExpr
|
||||||
|
keyword try
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
FunctionCall
|
||||||
|
Identifier parse-number
|
||||||
|
PositionalArg
|
||||||
|
Identifier input
|
||||||
|
CatchExpr
|
||||||
|
keyword catch
|
||||||
|
Identifier err
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
Number 0
|
||||||
|
keyword end
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parses single-line try with finally', () => {
|
||||||
|
expect('try: work catch err: 0 finally: cleanup end').toMatchTree(`
|
||||||
|
TryExpr
|
||||||
|
keyword try
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier work
|
||||||
|
CatchExpr
|
||||||
|
keyword catch
|
||||||
|
Identifier err
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
Number 0
|
||||||
|
FinallyExpr
|
||||||
|
keyword finally
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier cleanup
|
||||||
|
keyword end
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parses throw statement with string', () => {
|
||||||
|
expect("throw 'error message'").toMatchTree(`
|
||||||
|
Throw
|
||||||
|
keyword throw
|
||||||
|
String
|
||||||
|
StringFragment error message
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parses throw statement with BinOp', () => {
|
||||||
|
expect("throw 'error message:' + msg").toMatchTree(`
|
||||||
|
Throw
|
||||||
|
keyword throw
|
||||||
|
BinOp
|
||||||
|
String
|
||||||
|
StringFragment error message:
|
||||||
|
Plus +
|
||||||
|
Identifier msg
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parses throw statement with identifier', () => {
|
||||||
|
expect('throw error-object').toMatchTree(`
|
||||||
|
Throw
|
||||||
|
keyword throw
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier error-object
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parses throw statement with dict', () => {
|
||||||
|
expect('throw [type=validation-error message=failed]').toMatchTree(`
|
||||||
|
Throw
|
||||||
|
keyword throw
|
||||||
|
Dict
|
||||||
|
NamedArg
|
||||||
|
NamedArgPrefix type=
|
||||||
|
Identifier validation-error
|
||||||
|
NamedArg
|
||||||
|
NamedArgPrefix message=
|
||||||
|
Identifier failed
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('does not parse identifiers that start with try', () => {
|
||||||
|
expect('trying = try: work catch err: 0 end').toMatchTree(`
|
||||||
|
Assign
|
||||||
|
AssignableIdentifier trying
|
||||||
|
Eq =
|
||||||
|
TryExpr
|
||||||
|
keyword try
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier work
|
||||||
|
CatchExpr
|
||||||
|
keyword catch
|
||||||
|
Identifier err
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
Number 0
|
||||||
|
keyword end
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('function-level exception handling', () => {
|
||||||
|
test('parses function with catch', () => {
|
||||||
|
expect(`read-file = do path:
|
||||||
|
read-data path
|
||||||
|
catch e:
|
||||||
|
empty-string
|
||||||
|
end`).toMatchTree(`
|
||||||
|
Assign
|
||||||
|
AssignableIdentifier read-file
|
||||||
|
Eq =
|
||||||
|
FunctionDef
|
||||||
|
Do do
|
||||||
|
Params
|
||||||
|
Identifier path
|
||||||
|
colon :
|
||||||
|
FunctionCall
|
||||||
|
Identifier read-data
|
||||||
|
PositionalArg
|
||||||
|
Identifier path
|
||||||
|
CatchExpr
|
||||||
|
keyword catch
|
||||||
|
Identifier e
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier empty-string
|
||||||
|
keyword end
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parses function with finally', () => {
|
||||||
|
expect(`cleanup-task = do x:
|
||||||
|
do-work x
|
||||||
|
finally:
|
||||||
|
close-resources
|
||||||
|
end`).toMatchTree(`
|
||||||
|
Assign
|
||||||
|
AssignableIdentifier cleanup-task
|
||||||
|
Eq =
|
||||||
|
FunctionDef
|
||||||
|
Do do
|
||||||
|
Params
|
||||||
|
Identifier x
|
||||||
|
colon :
|
||||||
|
FunctionCall
|
||||||
|
Identifier do-work
|
||||||
|
PositionalArg
|
||||||
|
Identifier x
|
||||||
|
FinallyExpr
|
||||||
|
keyword finally
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier close-resources
|
||||||
|
keyword end
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parses function with catch and finally', () => {
|
||||||
|
expect(`safe-operation = do x:
|
||||||
|
risky-work x
|
||||||
|
catch err:
|
||||||
|
log err
|
||||||
|
default-value
|
||||||
|
finally:
|
||||||
|
cleanup
|
||||||
|
end`).toMatchTree(`
|
||||||
|
Assign
|
||||||
|
AssignableIdentifier safe-operation
|
||||||
|
Eq =
|
||||||
|
FunctionDef
|
||||||
|
Do do
|
||||||
|
Params
|
||||||
|
Identifier x
|
||||||
|
colon :
|
||||||
|
FunctionCall
|
||||||
|
Identifier risky-work
|
||||||
|
PositionalArg
|
||||||
|
Identifier x
|
||||||
|
CatchExpr
|
||||||
|
keyword catch
|
||||||
|
Identifier err
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
FunctionCall
|
||||||
|
Identifier log
|
||||||
|
PositionalArg
|
||||||
|
Identifier err
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier default-value
|
||||||
|
FinallyExpr
|
||||||
|
keyword finally
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier cleanup
|
||||||
|
keyword end
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
})
|
||||||
301
src/parser/tests/function-blocks.test.ts
Normal file
301
src/parser/tests/function-blocks.test.ts
Normal file
|
|
@ -0,0 +1,301 @@
|
||||||
|
import { expect, describe, test } from 'bun:test'
|
||||||
|
|
||||||
|
describe('single line function blocks', () => {
|
||||||
|
test('work with no args', () => {
|
||||||
|
expect(`trap: echo bye bye end`).toMatchTree(`
|
||||||
|
FunctionCallWithBlock
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier trap
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
FunctionCall
|
||||||
|
Identifier echo
|
||||||
|
PositionalArg
|
||||||
|
Identifier bye
|
||||||
|
PositionalArg
|
||||||
|
Identifier bye
|
||||||
|
keyword end`
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('work with one arg', () => {
|
||||||
|
expect(`trap EXIT: echo bye bye end`).toMatchTree(`
|
||||||
|
FunctionCallWithBlock
|
||||||
|
FunctionCall
|
||||||
|
Identifier trap
|
||||||
|
PositionalArg
|
||||||
|
Word EXIT
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
FunctionCall
|
||||||
|
Identifier echo
|
||||||
|
PositionalArg
|
||||||
|
Identifier bye
|
||||||
|
PositionalArg
|
||||||
|
Identifier bye
|
||||||
|
keyword end`
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('work with named args', () => {
|
||||||
|
expect(`attach signal='exit': echo bye bye end`).toMatchTree(`
|
||||||
|
FunctionCallWithBlock
|
||||||
|
FunctionCall
|
||||||
|
Identifier attach
|
||||||
|
NamedArg
|
||||||
|
NamedArgPrefix signal=
|
||||||
|
String
|
||||||
|
StringFragment exit
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
FunctionCall
|
||||||
|
Identifier echo
|
||||||
|
PositionalArg
|
||||||
|
Identifier bye
|
||||||
|
PositionalArg
|
||||||
|
Identifier bye
|
||||||
|
keyword end`
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
test('work with dot-get', () => {
|
||||||
|
expect(`signals = [=]; signals.trap 'EXIT': echo bye bye end`).toMatchTree(`
|
||||||
|
Assign
|
||||||
|
AssignableIdentifier signals
|
||||||
|
Eq =
|
||||||
|
Dict [=]
|
||||||
|
FunctionCallWithBlock
|
||||||
|
FunctionCall
|
||||||
|
DotGet
|
||||||
|
IdentifierBeforeDot signals
|
||||||
|
Identifier trap
|
||||||
|
PositionalArg
|
||||||
|
String
|
||||||
|
StringFragment EXIT
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
FunctionCall
|
||||||
|
Identifier echo
|
||||||
|
PositionalArg
|
||||||
|
Identifier bye
|
||||||
|
PositionalArg
|
||||||
|
Identifier bye
|
||||||
|
keyword end`
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('multi line function blocks', () => {
|
||||||
|
test('work with no args', () => {
|
||||||
|
expect(`
|
||||||
|
trap:
|
||||||
|
echo bye bye
|
||||||
|
end
|
||||||
|
`).toMatchTree(`
|
||||||
|
FunctionCallWithBlock
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier trap
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
FunctionCall
|
||||||
|
Identifier echo
|
||||||
|
PositionalArg
|
||||||
|
Identifier bye
|
||||||
|
PositionalArg
|
||||||
|
Identifier bye
|
||||||
|
keyword end`
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('work with one arg', () => {
|
||||||
|
expect(`
|
||||||
|
trap EXIT:
|
||||||
|
echo bye bye
|
||||||
|
end`).toMatchTree(`
|
||||||
|
FunctionCallWithBlock
|
||||||
|
FunctionCall
|
||||||
|
Identifier trap
|
||||||
|
PositionalArg
|
||||||
|
Word EXIT
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
FunctionCall
|
||||||
|
Identifier echo
|
||||||
|
PositionalArg
|
||||||
|
Identifier bye
|
||||||
|
PositionalArg
|
||||||
|
Identifier bye
|
||||||
|
keyword end`
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('work with named args', () => {
|
||||||
|
expect(`
|
||||||
|
attach signal='exit' code=1:
|
||||||
|
echo bye bye
|
||||||
|
end`).toMatchTree(`
|
||||||
|
FunctionCallWithBlock
|
||||||
|
FunctionCall
|
||||||
|
Identifier attach
|
||||||
|
NamedArg
|
||||||
|
NamedArgPrefix signal=
|
||||||
|
String
|
||||||
|
StringFragment exit
|
||||||
|
NamedArg
|
||||||
|
NamedArgPrefix code=
|
||||||
|
Number 1
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
FunctionCall
|
||||||
|
Identifier echo
|
||||||
|
PositionalArg
|
||||||
|
Identifier bye
|
||||||
|
PositionalArg
|
||||||
|
Identifier bye
|
||||||
|
keyword end`
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
test('work with dot-get', () => {
|
||||||
|
expect(`
|
||||||
|
signals = [=]
|
||||||
|
signals.trap 'EXIT':
|
||||||
|
echo bye bye
|
||||||
|
end`).toMatchTree(`
|
||||||
|
Assign
|
||||||
|
AssignableIdentifier signals
|
||||||
|
Eq =
|
||||||
|
Dict [=]
|
||||||
|
FunctionCallWithBlock
|
||||||
|
FunctionCall
|
||||||
|
DotGet
|
||||||
|
IdentifierBeforeDot signals
|
||||||
|
Identifier trap
|
||||||
|
PositionalArg
|
||||||
|
String
|
||||||
|
StringFragment EXIT
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
FunctionCall
|
||||||
|
Identifier echo
|
||||||
|
PositionalArg
|
||||||
|
Identifier bye
|
||||||
|
PositionalArg
|
||||||
|
Identifier bye
|
||||||
|
keyword end`
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('ribbit', () => {
|
||||||
|
test('head tag', () => {
|
||||||
|
expect(`
|
||||||
|
head:
|
||||||
|
title What up
|
||||||
|
meta charSet=UTF-8
|
||||||
|
meta name='viewport' content='width=device-width, initial-scale=1, viewport-fit=cover'
|
||||||
|
end`).toMatchTree(`
|
||||||
|
FunctionCallWithBlock
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier head
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
FunctionCall
|
||||||
|
Identifier title
|
||||||
|
PositionalArg
|
||||||
|
Word What
|
||||||
|
PositionalArg
|
||||||
|
Identifier up
|
||||||
|
FunctionCall
|
||||||
|
Identifier meta
|
||||||
|
PositionalArg
|
||||||
|
Word charSet=UTF-8
|
||||||
|
FunctionCall
|
||||||
|
Identifier meta
|
||||||
|
NamedArg
|
||||||
|
NamedArgPrefix name=
|
||||||
|
String
|
||||||
|
StringFragment viewport
|
||||||
|
NamedArg
|
||||||
|
NamedArgPrefix content=
|
||||||
|
String
|
||||||
|
StringFragment width=device-width, initial-scale=1, viewport-fit=cover
|
||||||
|
keyword end
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('li', () => {
|
||||||
|
expect(`
|
||||||
|
list:
|
||||||
|
li border-bottom='1px solid black' one
|
||||||
|
li two
|
||||||
|
li three
|
||||||
|
end`).toMatchTree(`
|
||||||
|
FunctionCallWithBlock
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier list
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
FunctionCall
|
||||||
|
Identifier li
|
||||||
|
NamedArg
|
||||||
|
NamedArgPrefix border-bottom=
|
||||||
|
String
|
||||||
|
StringFragment 1px solid black
|
||||||
|
PositionalArg
|
||||||
|
Identifier one
|
||||||
|
FunctionCall
|
||||||
|
Identifier li
|
||||||
|
PositionalArg
|
||||||
|
Identifier two
|
||||||
|
FunctionCall
|
||||||
|
Identifier li
|
||||||
|
PositionalArg
|
||||||
|
Identifier three
|
||||||
|
keyword end`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('inline expressions', () => {
|
||||||
|
expect(`
|
||||||
|
p:
|
||||||
|
h1 class=bright style='font-family: helvetica' Heya
|
||||||
|
h2 man that is (b wild)!
|
||||||
|
end`)
|
||||||
|
.toMatchTree(`
|
||||||
|
FunctionCallWithBlock
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier p
|
||||||
|
colon :
|
||||||
|
Block
|
||||||
|
FunctionCall
|
||||||
|
Identifier h1
|
||||||
|
NamedArg
|
||||||
|
NamedArgPrefix class=
|
||||||
|
Identifier bright
|
||||||
|
NamedArg
|
||||||
|
NamedArgPrefix style=
|
||||||
|
String
|
||||||
|
StringFragment font-family: helvetica
|
||||||
|
PositionalArg
|
||||||
|
Word Heya
|
||||||
|
FunctionCall
|
||||||
|
Identifier h2
|
||||||
|
PositionalArg
|
||||||
|
Identifier man
|
||||||
|
PositionalArg
|
||||||
|
Identifier that
|
||||||
|
PositionalArg
|
||||||
|
Identifier is
|
||||||
|
PositionalArg
|
||||||
|
ParenExpr
|
||||||
|
FunctionCall
|
||||||
|
Identifier b
|
||||||
|
PositionalArg
|
||||||
|
Identifier wild
|
||||||
|
PositionalArg
|
||||||
|
Word !
|
||||||
|
keyword end`)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
@ -1,7 +1,5 @@
|
||||||
import { expect, describe, test } from 'bun:test'
|
import { expect, describe, test } from 'bun:test'
|
||||||
|
|
||||||
import '../shrimp.grammar' // Importing this so changes cause it to retest!
|
|
||||||
|
|
||||||
describe('calling functions', () => {
|
describe('calling functions', () => {
|
||||||
test('call with no args', () => {
|
test('call with no args', () => {
|
||||||
expect('tail').toMatchTree(`
|
expect('tail').toMatchTree(`
|
||||||
|
|
@ -31,6 +29,70 @@ describe('calling functions', () => {
|
||||||
`)
|
`)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
test('call with dashed named arg', () => {
|
||||||
|
expect('tail pre-lines=30 path').toMatchTree(`
|
||||||
|
FunctionCall
|
||||||
|
Identifier tail
|
||||||
|
NamedArg
|
||||||
|
NamedArgPrefix pre-lines=
|
||||||
|
Number 30
|
||||||
|
PositionalArg
|
||||||
|
Identifier path
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('call with function', () => {
|
||||||
|
expect(`tail do x: x end`).toMatchTree(`
|
||||||
|
FunctionCall
|
||||||
|
Identifier tail
|
||||||
|
PositionalArg
|
||||||
|
FunctionDef
|
||||||
|
Do do
|
||||||
|
Params
|
||||||
|
Identifier x
|
||||||
|
colon :
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier x
|
||||||
|
keyword end
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('call with arg and function', () => {
|
||||||
|
expect(`tail true do x: x end`).toMatchTree(`
|
||||||
|
FunctionCall
|
||||||
|
Identifier tail
|
||||||
|
PositionalArg
|
||||||
|
Boolean true
|
||||||
|
PositionalArg
|
||||||
|
FunctionDef
|
||||||
|
Do do
|
||||||
|
Params
|
||||||
|
Identifier x
|
||||||
|
colon :
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier x
|
||||||
|
keyword end
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('call with function in named arg', () => {
|
||||||
|
expect(`tail callback=do x: x end`).toMatchTree(`
|
||||||
|
FunctionCall
|
||||||
|
Identifier tail
|
||||||
|
NamedArg
|
||||||
|
NamedArgPrefix callback=
|
||||||
|
FunctionDef
|
||||||
|
Do do
|
||||||
|
Params
|
||||||
|
Identifier x
|
||||||
|
colon :
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier x
|
||||||
|
keyword end
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
test('command with arg that is also a command', () => {
|
test('command with arg that is also a command', () => {
|
||||||
expect('tail tail').toMatchTree(`
|
expect('tail tail').toMatchTree(`
|
||||||
FunctionCall
|
FunctionCall
|
||||||
|
|
@ -51,8 +113,8 @@ describe('calling functions', () => {
|
||||||
Identifier tail
|
Identifier tail
|
||||||
NamedArg
|
NamedArg
|
||||||
NamedArgPrefix lines=
|
NamedArgPrefix lines=
|
||||||
⚠
|
⚠
|
||||||
⚠ `)
|
⚠`)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
@ -61,7 +123,7 @@ describe('Do', () => {
|
||||||
expect('do: 1 end').toMatchTree(`
|
expect('do: 1 end').toMatchTree(`
|
||||||
FunctionDef
|
FunctionDef
|
||||||
Do do
|
Do do
|
||||||
Params
|
Params
|
||||||
colon :
|
colon :
|
||||||
Number 1
|
Number 1
|
||||||
keyword end`)
|
keyword end`)
|
||||||
|
|
@ -165,3 +227,58 @@ end`).toMatchTree(`
|
||||||
`)
|
`)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe('default params', () => {
|
||||||
|
test('parses function with single default parameter', () => {
|
||||||
|
expect('do x=1: x + 1 end').toMatchTree(`
|
||||||
|
FunctionDef
|
||||||
|
Do do
|
||||||
|
Params
|
||||||
|
NamedParam
|
||||||
|
NamedArgPrefix x=
|
||||||
|
Number 1
|
||||||
|
colon :
|
||||||
|
BinOp
|
||||||
|
Identifier x
|
||||||
|
Plus +
|
||||||
|
Number 1
|
||||||
|
keyword end`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parses function with multiple default parameters', () => {
|
||||||
|
expect(`do x='something' y=true: x * y end`).toMatchTree(`
|
||||||
|
FunctionDef
|
||||||
|
Do do
|
||||||
|
Params
|
||||||
|
NamedParam
|
||||||
|
NamedArgPrefix x=
|
||||||
|
String
|
||||||
|
StringFragment something
|
||||||
|
NamedParam
|
||||||
|
NamedArgPrefix y=
|
||||||
|
Boolean true
|
||||||
|
colon :
|
||||||
|
BinOp
|
||||||
|
Identifier x
|
||||||
|
Star *
|
||||||
|
Identifier y
|
||||||
|
keyword end`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parses function with mixed parameters', () => {
|
||||||
|
expect('do x y=true: x * y end').toMatchTree(`
|
||||||
|
FunctionDef
|
||||||
|
Do do
|
||||||
|
Params
|
||||||
|
Identifier x
|
||||||
|
NamedParam
|
||||||
|
NamedArgPrefix y=
|
||||||
|
Boolean true
|
||||||
|
colon :
|
||||||
|
BinOp
|
||||||
|
Identifier x
|
||||||
|
Star *
|
||||||
|
Identifier y
|
||||||
|
keyword end`)
|
||||||
|
})
|
||||||
|
})
|
||||||
32
src/parser/tests/import.test.ts
Normal file
32
src/parser/tests/import.test.ts
Normal file
|
|
@ -0,0 +1,32 @@
|
||||||
|
import { expect, describe, test } from 'bun:test'
|
||||||
|
|
||||||
|
describe('import', () => {
|
||||||
|
test('parses single import', () => {
|
||||||
|
expect(`import str`).toMatchTree(`
|
||||||
|
Import
|
||||||
|
keyword import
|
||||||
|
Identifier str
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parses multiple imports', () => {
|
||||||
|
expect(`import str math list`).toMatchTree(`
|
||||||
|
Import
|
||||||
|
keyword import
|
||||||
|
Identifier str
|
||||||
|
Identifier math
|
||||||
|
Identifier list
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parses named args', () => {
|
||||||
|
expect(`import str only=ends-with?`).toMatchTree(`
|
||||||
|
Import
|
||||||
|
keyword import
|
||||||
|
Identifier str
|
||||||
|
NamedArg
|
||||||
|
NamedArgPrefix only=
|
||||||
|
Identifier ends-with?
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
@ -1,6 +1,86 @@
|
||||||
import { expect, describe, test } from 'bun:test'
|
import { expect, describe, test } from 'bun:test'
|
||||||
|
|
||||||
import '../shrimp.grammar' // Importing this so changes cause it to retest!
|
describe('number literals', () => {
|
||||||
|
test('binary numbers', () => {
|
||||||
|
expect('0b110').toMatchTree(`
|
||||||
|
Number 0b110
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('hex numbers', () => {
|
||||||
|
expect('0xdeadbeef').toMatchTree(`
|
||||||
|
Number 0xdeadbeef
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('hex numbers uppercase', () => {
|
||||||
|
expect('0xFF').toMatchTree(`
|
||||||
|
Number 0xFF
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('octal numbers', () => {
|
||||||
|
expect('0o644').toMatchTree(`
|
||||||
|
Number 0o644
|
||||||
|
`)
|
||||||
|
|
||||||
|
expect('0o055').toMatchTree(`
|
||||||
|
Number 0o055
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('decimal numbers still work', () => {
|
||||||
|
expect('42').toMatchTree(`
|
||||||
|
Number 42
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('negative binary', () => {
|
||||||
|
expect('-0b110').toMatchTree(`
|
||||||
|
Number -0b110
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('negative hex', () => {
|
||||||
|
expect('-0xFF').toMatchTree(`
|
||||||
|
Number -0xFF
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('negative octal', () => {
|
||||||
|
expect('-0o755').toMatchTree(`
|
||||||
|
Number -0o755
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('positive prefix binary', () => {
|
||||||
|
expect('+0b110').toMatchTree(`
|
||||||
|
Number +0b110
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('positive prefix hex', () => {
|
||||||
|
expect('+0xFF').toMatchTree(`
|
||||||
|
Number +0xFF
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('positive prefix octal', () => {
|
||||||
|
expect('+0o644').toMatchTree(`
|
||||||
|
Number +0o644
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('hex, binary, and octal in arrays', () => {
|
||||||
|
expect('[0xFF 0b110 0o644 42]').toMatchTree(`
|
||||||
|
Array
|
||||||
|
Number 0xFF
|
||||||
|
Number 0b110
|
||||||
|
Number 0o644
|
||||||
|
Number 42
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
describe('array literals', () => {
|
describe('array literals', () => {
|
||||||
test('work with numbers', () => {
|
test('work with numbers', () => {
|
||||||
|
|
@ -150,8 +230,11 @@ describe('array literals', () => {
|
||||||
2 # second
|
2 # second
|
||||||
]`).toMatchTree(`
|
]`).toMatchTree(`
|
||||||
Array
|
Array
|
||||||
|
Comment # something...
|
||||||
Number 1
|
Number 1
|
||||||
|
Comment # first
|
||||||
Number 2
|
Number 2
|
||||||
|
Comment # second
|
||||||
`)
|
`)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
@ -251,6 +334,22 @@ describe('dict literals', () => {
|
||||||
`)
|
`)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
test('work with functions', () => {
|
||||||
|
expect(`[trap=do x: x end]`).toMatchTree(`
|
||||||
|
Dict
|
||||||
|
NamedArg
|
||||||
|
NamedArgPrefix trap=
|
||||||
|
FunctionDef
|
||||||
|
Do do
|
||||||
|
Params
|
||||||
|
Identifier x
|
||||||
|
colon :
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier x
|
||||||
|
keyword end
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
test('can be nested', () => {
|
test('can be nested', () => {
|
||||||
expect('[a=one b=[two [c=three]]]').toMatchTree(`
|
expect('[a=one b=[two [c=three]]]').toMatchTree(`
|
||||||
Dict
|
Dict
|
||||||
|
|
@ -286,14 +385,35 @@ describe('dict literals', () => {
|
||||||
Number 3
|
Number 3
|
||||||
`)
|
`)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
test('can have spaces between equals', () => {
|
||||||
|
expect(`[
|
||||||
|
a = 1
|
||||||
|
b = 2
|
||||||
|
c = 3
|
||||||
|
]`).toMatchTree(`
|
||||||
|
Dict
|
||||||
|
NamedArg
|
||||||
|
NamedArgPrefix a =
|
||||||
|
Number 1
|
||||||
|
NamedArg
|
||||||
|
NamedArgPrefix b =
|
||||||
|
Number 2
|
||||||
|
NamedArg
|
||||||
|
NamedArgPrefix c =
|
||||||
|
Number 3
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
test('empty dict', () => {
|
test('empty dict', () => {
|
||||||
expect('[=]').toMatchTree(`
|
expect('[=]').toMatchTree(`
|
||||||
Dict [=]
|
Dict [=]
|
||||||
`)
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('empty dict w whitespace', () => {
|
||||||
expect('[ = ]').toMatchTree(`
|
expect('[ = ]').toMatchTree(`
|
||||||
Array
|
Dict [ = ]
|
||||||
Word =
|
|
||||||
`)
|
`)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
@ -397,12 +517,15 @@ c=3]`).toMatchTree(`
|
||||||
c=3
|
c=3
|
||||||
]`).toMatchTree(`
|
]`).toMatchTree(`
|
||||||
Dict
|
Dict
|
||||||
|
Comment # something...
|
||||||
NamedArg
|
NamedArg
|
||||||
NamedArgPrefix a=
|
NamedArgPrefix a=
|
||||||
Number 1
|
Number 1
|
||||||
|
Comment # first
|
||||||
NamedArg
|
NamedArg
|
||||||
NamedArgPrefix b=
|
NamedArgPrefix b=
|
||||||
Number 2
|
Number 2
|
||||||
|
Comment # second
|
||||||
NamedArg
|
NamedArg
|
||||||
NamedArgPrefix c=
|
NamedArgPrefix c=
|
||||||
Number 3
|
Number 3
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,5 @@
|
||||||
import { expect, describe, test } from 'bun:test'
|
import { expect, describe, test } from 'bun:test'
|
||||||
|
|
||||||
import '../shrimp.grammar' // Importing this so changes cause it to retest!
|
|
||||||
|
|
||||||
describe('multiline', () => {
|
describe('multiline', () => {
|
||||||
test('parses multiline strings', () => {
|
test('parses multiline strings', () => {
|
||||||
expect(`'first'\n'second'`).toMatchTree(`
|
expect(`'first'\n'second'`).toMatchTree(`
|
||||||
|
|
@ -76,12 +74,12 @@ end
|
||||||
expect(`
|
expect(`
|
||||||
do:
|
do:
|
||||||
2
|
2
|
||||||
|
|
||||||
end
|
end
|
||||||
`).toMatchTree(`
|
`).toMatchTree(`
|
||||||
FunctionDef
|
FunctionDef
|
||||||
Do do
|
Do do
|
||||||
Params
|
Params
|
||||||
colon :
|
colon :
|
||||||
Number 2
|
Number 2
|
||||||
keyword end
|
keyword end
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,5 @@
|
||||||
import { expect, describe, test } from 'bun:test'
|
import { expect, describe, test } from 'bun:test'
|
||||||
|
|
||||||
import '../shrimp.grammar' // Importing this so changes cause it to retest!
|
|
||||||
|
|
||||||
describe('pipe expressions', () => {
|
describe('pipe expressions', () => {
|
||||||
test('simple pipe expression', () => {
|
test('simple pipe expression', () => {
|
||||||
expect('echo hello | grep h').toMatchTree(`
|
expect('echo hello | grep h').toMatchTree(`
|
||||||
|
|
@ -98,4 +96,312 @@ describe('pipe expressions', () => {
|
||||||
Identifier double
|
Identifier double
|
||||||
`)
|
`)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
test('string literals can be piped', () => {
|
||||||
|
expect(`'hey there' | echo`).toMatchTree(`
|
||||||
|
PipeExpr
|
||||||
|
String
|
||||||
|
StringFragment hey there
|
||||||
|
operator |
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier echo
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('number literals can be piped', () => {
|
||||||
|
expect(`42 | echo`).toMatchTree(`
|
||||||
|
PipeExpr
|
||||||
|
Number 42
|
||||||
|
operator |
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier echo`)
|
||||||
|
|
||||||
|
expect(`4.22 | echo`).toMatchTree(`
|
||||||
|
PipeExpr
|
||||||
|
Number 4.22
|
||||||
|
operator |
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier echo`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('null literals can be piped', () => {
|
||||||
|
expect(`null | echo`).toMatchTree(`
|
||||||
|
PipeExpr
|
||||||
|
Null null
|
||||||
|
operator |
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier echo`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('boolean literals can be piped', () => {
|
||||||
|
expect(`true | echo`).toMatchTree(`
|
||||||
|
PipeExpr
|
||||||
|
Boolean true
|
||||||
|
operator |
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier echo`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('array literals can be piped', () => {
|
||||||
|
expect(`[1 2 3] | echo`).toMatchTree(`
|
||||||
|
PipeExpr
|
||||||
|
Array
|
||||||
|
Number 1
|
||||||
|
Number 2
|
||||||
|
Number 3
|
||||||
|
operator |
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier echo
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('dict literals can be piped', () => {
|
||||||
|
expect(`[a=1 b=2 c=3] | echo`).toMatchTree(`
|
||||||
|
PipeExpr
|
||||||
|
Dict
|
||||||
|
NamedArg
|
||||||
|
NamedArgPrefix a=
|
||||||
|
Number 1
|
||||||
|
NamedArg
|
||||||
|
NamedArgPrefix b=
|
||||||
|
Number 2
|
||||||
|
NamedArg
|
||||||
|
NamedArgPrefix c=
|
||||||
|
Number 3
|
||||||
|
operator |
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier echo
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parenthesized expressions can be piped', () => {
|
||||||
|
expect(`(1 + 2) | echo`).toMatchTree(`
|
||||||
|
PipeExpr
|
||||||
|
ParenExpr
|
||||||
|
BinOp
|
||||||
|
Number 1
|
||||||
|
Plus +
|
||||||
|
Number 2
|
||||||
|
operator |
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier echo
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('complex parenthesized expressions with pipes', () => {
|
||||||
|
expect(`((math.random) * 10 + 1) | math.floor`).toMatchTree(`
|
||||||
|
PipeExpr
|
||||||
|
ParenExpr
|
||||||
|
BinOp
|
||||||
|
BinOp
|
||||||
|
ParenExpr
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
DotGet
|
||||||
|
IdentifierBeforeDot math
|
||||||
|
Identifier random
|
||||||
|
Star *
|
||||||
|
Number 10
|
||||||
|
Plus +
|
||||||
|
Number 1
|
||||||
|
operator |
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
DotGet
|
||||||
|
IdentifierBeforeDot math
|
||||||
|
Identifier floor
|
||||||
|
`)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe('pipe continuation', () => {
|
||||||
|
test('pipe on next line', () => {
|
||||||
|
expect(`hello
|
||||||
|
| echo`).toMatchTree(`
|
||||||
|
PipeExpr
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier hello
|
||||||
|
operator |
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier echo
|
||||||
|
`)
|
||||||
|
|
||||||
|
expect(`echo hello
|
||||||
|
| grep h`).toMatchTree(`
|
||||||
|
PipeExpr
|
||||||
|
FunctionCall
|
||||||
|
Identifier echo
|
||||||
|
PositionalArg
|
||||||
|
Identifier hello
|
||||||
|
operator |
|
||||||
|
FunctionCall
|
||||||
|
Identifier grep
|
||||||
|
PositionalArg
|
||||||
|
Identifier h
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('pipe on next non-empty line', () => {
|
||||||
|
expect(`hello
|
||||||
|
|
||||||
|
|
||||||
|
| echo`).toMatchTree(`
|
||||||
|
PipeExpr
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier hello
|
||||||
|
operator |
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier echo
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('multi-line pipe chain', () => {
|
||||||
|
expect(`echo hello
|
||||||
|
| grep h
|
||||||
|
| sort`).toMatchTree(`
|
||||||
|
PipeExpr
|
||||||
|
FunctionCall
|
||||||
|
Identifier echo
|
||||||
|
PositionalArg
|
||||||
|
Identifier hello
|
||||||
|
operator |
|
||||||
|
FunctionCall
|
||||||
|
Identifier grep
|
||||||
|
PositionalArg
|
||||||
|
Identifier h
|
||||||
|
operator |
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier sort
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('pipe with indentation', () => {
|
||||||
|
expect(`echo hello
|
||||||
|
| grep h
|
||||||
|
| sort`).toMatchTree(`
|
||||||
|
PipeExpr
|
||||||
|
FunctionCall
|
||||||
|
Identifier echo
|
||||||
|
PositionalArg
|
||||||
|
Identifier hello
|
||||||
|
operator |
|
||||||
|
FunctionCall
|
||||||
|
Identifier grep
|
||||||
|
PositionalArg
|
||||||
|
Identifier h
|
||||||
|
operator |
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier sort
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('pipe after operand on next line (trailing pipe style)', () => {
|
||||||
|
expect(`echo hello |
|
||||||
|
grep h`).toMatchTree(`
|
||||||
|
PipeExpr
|
||||||
|
FunctionCall
|
||||||
|
Identifier echo
|
||||||
|
PositionalArg
|
||||||
|
Identifier hello
|
||||||
|
operator |
|
||||||
|
FunctionCall
|
||||||
|
Identifier grep
|
||||||
|
PositionalArg
|
||||||
|
Identifier h
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('same-line pipes still work', () => {
|
||||||
|
expect('echo hello | grep h | sort').toMatchTree(`
|
||||||
|
PipeExpr
|
||||||
|
FunctionCall
|
||||||
|
Identifier echo
|
||||||
|
PositionalArg
|
||||||
|
Identifier hello
|
||||||
|
operator |
|
||||||
|
FunctionCall
|
||||||
|
Identifier grep
|
||||||
|
PositionalArg
|
||||||
|
Identifier h
|
||||||
|
operator |
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier sort
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('lots of pipes', () => {
|
||||||
|
expect(`
|
||||||
|
'this should help readability in long chains'
|
||||||
|
| split ' '
|
||||||
|
| map (ref str.to-upper)
|
||||||
|
| join '-'
|
||||||
|
| echo
|
||||||
|
`).toMatchTree(`
|
||||||
|
PipeExpr
|
||||||
|
String
|
||||||
|
StringFragment this should help readability in long chains
|
||||||
|
operator |
|
||||||
|
FunctionCall
|
||||||
|
Identifier split
|
||||||
|
PositionalArg
|
||||||
|
String
|
||||||
|
StringFragment (space)
|
||||||
|
operator |
|
||||||
|
FunctionCall
|
||||||
|
Identifier map
|
||||||
|
PositionalArg
|
||||||
|
ParenExpr
|
||||||
|
FunctionCall
|
||||||
|
Identifier ref
|
||||||
|
PositionalArg
|
||||||
|
DotGet
|
||||||
|
IdentifierBeforeDot str
|
||||||
|
Identifier to-upper
|
||||||
|
operator |
|
||||||
|
FunctionCall
|
||||||
|
Identifier join
|
||||||
|
PositionalArg
|
||||||
|
String
|
||||||
|
StringFragment -
|
||||||
|
operator |
|
||||||
|
FunctionCallOrIdentifier
|
||||||
|
Identifier echo
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('Underscore', () => {
|
||||||
|
test('works in pipes', () => {
|
||||||
|
expect(`sub 3 1 | div (sub 110 9 | sub 1) _ | div 5`).toMatchTree(`
|
||||||
|
PipeExpr
|
||||||
|
FunctionCall
|
||||||
|
Identifier sub
|
||||||
|
PositionalArg
|
||||||
|
Number 3
|
||||||
|
PositionalArg
|
||||||
|
Number 1
|
||||||
|
operator |
|
||||||
|
FunctionCall
|
||||||
|
Identifier div
|
||||||
|
PositionalArg
|
||||||
|
ParenExpr
|
||||||
|
PipeExpr
|
||||||
|
FunctionCall
|
||||||
|
Identifier sub
|
||||||
|
PositionalArg
|
||||||
|
Number 110
|
||||||
|
PositionalArg
|
||||||
|
Number 9
|
||||||
|
operator |
|
||||||
|
FunctionCall
|
||||||
|
Identifier sub
|
||||||
|
PositionalArg
|
||||||
|
Number 1
|
||||||
|
PositionalArg
|
||||||
|
Underscore _
|
||||||
|
operator |
|
||||||
|
FunctionCall
|
||||||
|
Identifier div
|
||||||
|
PositionalArg
|
||||||
|
Number 5
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
@ -1,14 +1,13 @@
|
||||||
import { expect, describe, test } from 'bun:test'
|
import { expect, describe, test } from 'bun:test'
|
||||||
|
|
||||||
import '../shrimp.grammar' // Importing this so changes cause it to retest!
|
|
||||||
|
|
||||||
describe('string interpolation', () => {
|
describe('string interpolation', () => {
|
||||||
test('string with variable interpolation', () => {
|
test('string with variable interpolation', () => {
|
||||||
expect("'hello $name'").toMatchTree(`
|
expect("'hello $name'").toMatchTree(`
|
||||||
String
|
String
|
||||||
StringFragment ${'hello '}
|
StringFragment ${'hello '}
|
||||||
Interpolation
|
Interpolation
|
||||||
Identifier name
|
FunctionCallOrIdentifier
|
||||||
|
Identifier name
|
||||||
`)
|
`)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
@ -44,7 +43,8 @@ describe('string interpolation', () => {
|
||||||
String
|
String
|
||||||
StringFragment x/
|
StringFragment x/
|
||||||
Interpolation
|
Interpolation
|
||||||
Identifier y
|
FunctionCallOrIdentifier
|
||||||
|
Identifier y
|
||||||
StringFragment /z
|
StringFragment /z
|
||||||
`)
|
`)
|
||||||
})
|
})
|
||||||
|
|
@ -122,8 +122,58 @@ describe('string escape sequences', () => {
|
||||||
String
|
String
|
||||||
StringFragment value:
|
StringFragment value:
|
||||||
Interpolation
|
Interpolation
|
||||||
Identifier x
|
FunctionCallOrIdentifier
|
||||||
|
Identifier x
|
||||||
EscapeSeq \\n
|
EscapeSeq \\n
|
||||||
`)
|
`)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe('curly strings', () => {
|
||||||
|
test('work on one line', () => {
|
||||||
|
expect('{ one two three }').toMatchTree(`
|
||||||
|
String
|
||||||
|
CurlyString { one two three }
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('work on multiple lines', () => {
|
||||||
|
expect(`{
|
||||||
|
one
|
||||||
|
two
|
||||||
|
three }`).toMatchTree(`
|
||||||
|
String
|
||||||
|
CurlyString {
|
||||||
|
one
|
||||||
|
two
|
||||||
|
three }`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('can contain other curlies', () => {
|
||||||
|
expect(`{ { one }
|
||||||
|
two
|
||||||
|
{ three } }`).toMatchTree(`
|
||||||
|
String
|
||||||
|
CurlyString { { one }
|
||||||
|
two
|
||||||
|
{ three } }`)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('double quoted strings', () => {
|
||||||
|
test("work", () => {
|
||||||
|
expect(`"hello world"`).toMatchTree(`
|
||||||
|
String
|
||||||
|
DoubleQuote "hello world"`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test("don't interpolate", () => {
|
||||||
|
expect(`"hello $world"`).toMatchTree(`
|
||||||
|
String
|
||||||
|
DoubleQuote "hello $world"`)
|
||||||
|
|
||||||
|
expect(`"hello $(1 + 2)"`).toMatchTree(`
|
||||||
|
String
|
||||||
|
DoubleQuote "hello $(1 + 2)"`)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
|
||||||
750
src/parser/tests/tokens.test.ts
Normal file
750
src/parser/tests/tokens.test.ts
Normal file
|
|
@ -0,0 +1,750 @@
|
||||||
|
import { expect, describe, test } from 'bun:test'
|
||||||
|
|
||||||
|
describe('constant types', () => {
|
||||||
|
test('null', () => {
|
||||||
|
expect(`null`).toBeToken('Null')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('boolean', () => {
|
||||||
|
expect(`true`).toMatchToken('Boolean', 'true')
|
||||||
|
expect(`false`).toMatchToken('Boolean', 'false')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('numbers', () => {
|
||||||
|
test('non-numbers', () => {
|
||||||
|
expect(`1st`).toMatchToken('Word', '1st')
|
||||||
|
expect(`1_`).toMatchToken('Word', '1_')
|
||||||
|
expect(`100.`).toMatchTokens(
|
||||||
|
{ type: 'Number', value: '100' },
|
||||||
|
{ type: 'Operator', value: '.' },
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('simple numbers', () => {
|
||||||
|
expect(`1`).toMatchToken('Number', '1')
|
||||||
|
expect(`200`).toMatchToken('Number', '200')
|
||||||
|
expect(`5.20`).toMatchToken('Number', '5.20')
|
||||||
|
expect(`0.20`).toMatchToken('Number', '0.20')
|
||||||
|
expect(`-20`).toMatchToken('Number', '-20')
|
||||||
|
expect(`+20`).toMatchToken('Number', '+20')
|
||||||
|
expect(`-2134.34`).toMatchToken('Number', '-2134.34')
|
||||||
|
expect(`+20.5325`).toMatchToken('Number', '+20.5325')
|
||||||
|
expect(`1_000`).toMatchToken('Number', '1_000')
|
||||||
|
expect(`53_232_220`).toMatchToken('Number', '53_232_220')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('binary numbers', () => {
|
||||||
|
expect('0b110').toMatchToken('Number', '0b110')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('hex numbers', () => {
|
||||||
|
expect('0xdeadbeef').toMatchToken('Number', '0xdeadbeef')
|
||||||
|
expect('0x02d3f4').toMatchToken('Number', '0x02d3f4')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('hex numbers uppercase', () => {
|
||||||
|
expect('0xFF').toMatchToken('Number', '0xFF')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('octal numbers', () => {
|
||||||
|
expect('0o644').toMatchToken('Number', '0o644')
|
||||||
|
expect('0o055').toMatchToken('Number', '0o055')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('negative binary', () => {
|
||||||
|
expect('-0b110').toMatchToken('Number', '-0b110')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('negative hex', () => {
|
||||||
|
expect('-0xFF').toMatchToken('Number', '-0xFF')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('negative octal', () => {
|
||||||
|
expect('-0o755').toMatchToken('Number', '-0o755')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('positive prefix binary', () => {
|
||||||
|
expect('+0b110').toMatchToken('Number', '+0b110')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('positive prefix hex', () => {
|
||||||
|
expect('+0xFF').toMatchToken('Number', '+0xFF')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('positive prefix octal', () => {
|
||||||
|
expect('+0o644').toMatchToken('Number', '+0o644')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('underscores in number', () => {
|
||||||
|
expect(`1_000`).toMatchToken('Number', '1_000')
|
||||||
|
expect(`1_0`).toMatchToken('Number', '1_0')
|
||||||
|
expect('0b11_0').toMatchToken('Number', '0b11_0')
|
||||||
|
expect('0xdead_beef').toMatchToken('Number', '0xdead_beef')
|
||||||
|
expect('0o64_4').toMatchToken('Number', '0o64_4')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('identifiers', () => {
|
||||||
|
test('regular', () => {
|
||||||
|
expect('name').toBeToken('Identifier')
|
||||||
|
expect('bobby-mcgee').toBeToken('Identifier')
|
||||||
|
expect('starts-with?').toBeToken('Identifier')
|
||||||
|
expect('📢').toMatchToken('Identifier', '📢')
|
||||||
|
expect(' 📢 ').toMatchToken('Identifier', '📢')
|
||||||
|
expect(' oink-🐷-oink').toMatchToken('Identifier', 'oink-🐷-oink')
|
||||||
|
expect('$').toMatchToken('Identifier', '$')
|
||||||
|
expect('$cool').toMatchToken('Identifier', '$cool')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('one character identifiers', () => {
|
||||||
|
expect('a').toMatchToken('Identifier', 'a')
|
||||||
|
expect('z').toMatchToken('Identifier', 'z')
|
||||||
|
expect('$').toMatchToken('Identifier', '$')
|
||||||
|
expect('📢').toMatchToken('Identifier', '📢')
|
||||||
|
expect('?').toBeToken('Word') // ? alone is not valid identifier start
|
||||||
|
})
|
||||||
|
|
||||||
|
test('two character identifiers', () => {
|
||||||
|
expect('ab').toMatchToken('Identifier', 'ab')
|
||||||
|
expect('a1').toMatchToken('Identifier', 'a1')
|
||||||
|
expect('a-').toMatchToken('Identifier', 'a-')
|
||||||
|
expect('a?').toMatchToken('Identifier', 'a?') // ? valid at end
|
||||||
|
expect('ab?').toMatchToken('Identifier', 'ab?')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('three+ character identifiers', () => {
|
||||||
|
expect('abc').toMatchToken('Identifier', 'abc')
|
||||||
|
expect('a-b').toMatchToken('Identifier', 'a-b')
|
||||||
|
expect('a1b').toMatchToken('Identifier', 'a1b')
|
||||||
|
expect('abc?').toMatchToken('Identifier', 'abc?') // ? valid at end
|
||||||
|
expect('a-b-c?').toMatchToken('Identifier', 'a-b-c?')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('edge cases', () => {
|
||||||
|
expect('-bobby-mcgee').toBeToken('Word')
|
||||||
|
expect('starts-with??').toMatchToken('Identifier', 'starts-with??')
|
||||||
|
expect('starts?with?').toMatchToken('Identifier', 'starts?with?')
|
||||||
|
expect('a??b').toMatchToken('Identifier', 'a??b')
|
||||||
|
expect('oink-oink!').toBeToken('Word')
|
||||||
|
expect('dog#pound').toMatchToken('Word', 'dog#pound')
|
||||||
|
expect('http://website.com').toMatchToken('Word', 'http://website.com')
|
||||||
|
expect('school$cool').toMatchToken('Identifier', 'school$cool')
|
||||||
|
expect('EXIT:').toMatchTokens(
|
||||||
|
{ type: 'Word', value: 'EXIT' },
|
||||||
|
{ type: 'Colon' },
|
||||||
|
)
|
||||||
|
expect(`if y == 1: 'cool' end`).toMatchTokens(
|
||||||
|
{ type: 'Keyword', value: 'if' },
|
||||||
|
{ type: 'Identifier', value: 'y' },
|
||||||
|
{ type: 'Operator', value: '==' },
|
||||||
|
{ type: 'Number', value: '1' },
|
||||||
|
{ type: 'Colon' },
|
||||||
|
{ type: 'String', value: `'cool'` },
|
||||||
|
{ type: 'Keyword', value: 'end' },
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('paths', () => {
|
||||||
|
test('starting with ./', () => {
|
||||||
|
expect('./tmp').toMatchToken('Word', './tmp')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('starting with /', () => {
|
||||||
|
expect('/home/chris/dev').toMatchToken('Word', '/home/chris/dev')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('identifiers with dots tokenize separately', () => {
|
||||||
|
expect('readme.txt').toMatchTokens(
|
||||||
|
{ type: 'Identifier', value: 'readme' },
|
||||||
|
{ type: 'Operator', value: '.' },
|
||||||
|
{ type: 'Identifier', value: 'txt' },
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('words (non-identifiers) consume dots', () => {
|
||||||
|
expect('README.md').toMatchToken('Word', 'README.md')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('all sorts of weird stuff', () => {
|
||||||
|
expect('dog#pound').toMatchToken('Word', 'dog#pound')
|
||||||
|
expect('my/kinda/place').toMatchToken('my/kinda/place')
|
||||||
|
expect('file://%/$##/@40!/index.php').toMatchToken('Word', 'file://%/$##/@40!/index.php')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('strings', () => {
|
||||||
|
test('single quoted', () => {
|
||||||
|
expect(`'hello world'`).toMatchToken('String', `'hello world'`)
|
||||||
|
expect(`'it\\'s a beautiful world'`).toMatchToken("'it\\'s a beautiful world'")
|
||||||
|
})
|
||||||
|
|
||||||
|
test('double quoted', () => {
|
||||||
|
expect(`"hello world"`).toMatchToken('String', `"hello world"`)
|
||||||
|
expect(`"it's a beautiful world"`).toMatchToken('String', `"it's a beautiful world"`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('empty strings', () => {
|
||||||
|
expect(`''`).toMatchToken('String', `''`)
|
||||||
|
expect(`""`).toMatchToken('String', `""`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('escape sequences', () => {
|
||||||
|
expect(`'hello\\nworld'`).toMatchToken('String', `'hello\\nworld'`)
|
||||||
|
expect(`'tab\\there'`).toMatchToken('String', `'tab\\there'`)
|
||||||
|
expect(`'quote\\''`).toMatchToken('String', `'quote\\''`)
|
||||||
|
expect(`'backslash\\\\'`).toMatchToken('String', `'backslash\\\\'`)
|
||||||
|
expect(`'dollar\\$sign'`).toMatchToken('String', `'dollar\\$sign'`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('unclosed strings - error case', () => {
|
||||||
|
// These should either fail or produce unexpected results
|
||||||
|
expect(`'hello`).toMatchToken('String', `'hello`)
|
||||||
|
expect(`"world`).toMatchToken('String', `"world`)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('curly strings', () => {
|
||||||
|
test('curly quoted', () => {
|
||||||
|
expect('{ one two three }').toMatchToken('String', `{ one two three }`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('work on multiple lines', () => {
|
||||||
|
expect(`{
|
||||||
|
one
|
||||||
|
two
|
||||||
|
three }`).toMatchToken('String', `{
|
||||||
|
one
|
||||||
|
two
|
||||||
|
three }`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('can contain other curlies', () => {
|
||||||
|
expect(`{ { one }
|
||||||
|
two
|
||||||
|
{ three } }`).toMatchToken('String', `{ { one }
|
||||||
|
two
|
||||||
|
{ three } }`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('empty curly string', () => {
|
||||||
|
expect('{}').toMatchToken('String', '{}')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('unclosed curly string - error case', () => {
|
||||||
|
// Should either fail or produce unexpected results
|
||||||
|
expect('{ hello').toMatchToken('String', '{ hello')
|
||||||
|
expect('{ nested { unclosed }').toMatchToken('String', '{ nested { unclosed }')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('operators', () => {
|
||||||
|
test('math operators', () => {
|
||||||
|
// assignment
|
||||||
|
expect('=').toMatchToken('Operator', '=')
|
||||||
|
|
||||||
|
// logic
|
||||||
|
expect('or').toMatchToken('Operator', 'or')
|
||||||
|
expect('and').toMatchToken('Operator', 'and')
|
||||||
|
|
||||||
|
// bitwise
|
||||||
|
expect('band').toMatchToken('Operator', 'band')
|
||||||
|
expect('bor').toMatchToken('Operator', 'bor')
|
||||||
|
expect('bxor').toMatchToken('Operator', 'bxor')
|
||||||
|
expect('>>>').toMatchToken('Operator', '>>>')
|
||||||
|
expect('>>').toMatchToken('Operator', '>>')
|
||||||
|
expect('<<').toMatchToken('Operator', '<<')
|
||||||
|
|
||||||
|
// compound assignment
|
||||||
|
expect('??=').toMatchToken('Operator', '??=')
|
||||||
|
expect('+=').toMatchToken('Operator', '+=')
|
||||||
|
expect('-=').toMatchToken('Operator', '-=')
|
||||||
|
expect('*=').toMatchToken('Operator', '*=')
|
||||||
|
expect('/=').toMatchToken('Operator', '/=')
|
||||||
|
expect('%=').toMatchToken('Operator', '%=')
|
||||||
|
|
||||||
|
// nullish
|
||||||
|
expect('??').toMatchToken('Operator', '??')
|
||||||
|
|
||||||
|
// math
|
||||||
|
expect('**').toMatchToken('Operator', '**')
|
||||||
|
expect('*').toMatchToken('Operator', '*')
|
||||||
|
expect('/').toMatchToken('Operator', '/')
|
||||||
|
expect('+').toMatchToken('Operator', '+')
|
||||||
|
expect('-').toMatchToken('Operator', '-')
|
||||||
|
expect('%').toMatchToken('Operator', '%')
|
||||||
|
|
||||||
|
// comparison
|
||||||
|
expect('>=').toMatchToken('Operator', '>=')
|
||||||
|
expect('<=').toMatchToken('Operator', '<=')
|
||||||
|
expect('!=').toMatchToken('Operator', '!=')
|
||||||
|
expect('==').toMatchToken('Operator', '==')
|
||||||
|
expect('>').toMatchToken('Operator', '>')
|
||||||
|
expect('<').toMatchToken('Operator', '<')
|
||||||
|
|
||||||
|
// property access
|
||||||
|
expect('.').toMatchToken('Operator', '.')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('keywords', () => {
|
||||||
|
test('keywords', () => {
|
||||||
|
expect(`import`).toMatchToken('Keyword', 'import')
|
||||||
|
|
||||||
|
expect(`end`).toMatchToken('Keyword', 'end')
|
||||||
|
expect(`do`).toMatchToken('Keyword', 'do')
|
||||||
|
|
||||||
|
expect(`while`).toMatchToken('Keyword', 'while')
|
||||||
|
|
||||||
|
expect(`if`).toMatchToken('Keyword', 'if')
|
||||||
|
expect(`else`).toMatchToken('Keyword', 'else')
|
||||||
|
|
||||||
|
expect(`try`).toMatchToken('Keyword', 'try')
|
||||||
|
expect(`catch`).toMatchToken('Keyword', 'catch')
|
||||||
|
expect(`finally`).toMatchToken('Keyword', 'finally')
|
||||||
|
expect(`throw`).toMatchToken('Keyword', 'throw')
|
||||||
|
expect(`not`).toMatchToken('Keyword', 'not')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('regex', () => {
|
||||||
|
test('use double slash', () => {
|
||||||
|
expect(`//[0-9]+//`).toMatchToken('Regex', '//[0-9]+//')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('punctuation', () => {
|
||||||
|
test('underscore', () => {
|
||||||
|
expect(`_`).toBeToken('Underscore')
|
||||||
|
expect(`__`).toMatchToken('Word', '__')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('semicolon', () => {
|
||||||
|
expect(`;`).toBeToken('Semicolon')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('newline', () => {
|
||||||
|
expect('\n').toBeToken('Newline')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('colon', () => {
|
||||||
|
expect(':').toBeToken('Colon')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('comments', () => {
|
||||||
|
test('comments', () => {
|
||||||
|
expect(`# hey friends`).toMatchToken('Comment', '# hey friends')
|
||||||
|
expect(`#hey-friends`).toMatchToken('Comment', '#hey-friends')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('brackets', () => {
|
||||||
|
test('parens', () => {
|
||||||
|
expect(`(`).toBeToken('OpenParen')
|
||||||
|
expect(`)`).toBeToken('CloseParen')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('staples', () => {
|
||||||
|
expect(`[`).toBeToken('OpenBracket')
|
||||||
|
expect(`]`).toBeToken('CloseBracket')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('multiple tokens', () => {
|
||||||
|
test('constants work fine', () => {
|
||||||
|
expect(`null true false`).toMatchTokens(
|
||||||
|
{ type: 'Null' },
|
||||||
|
{ type: 'Boolean', value: 'true' },
|
||||||
|
{ type: 'Boolean', value: 'false' },
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('numbers', () => {
|
||||||
|
expect(`100 -400.42 null`).toMatchTokens(
|
||||||
|
{ type: 'Number', value: '100' },
|
||||||
|
{ type: 'Number', value: '-400.42' },
|
||||||
|
{ type: 'Null' },
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('whitespace', () => {
|
||||||
|
expect(`
|
||||||
|
'hello world'
|
||||||
|
|
||||||
|
'goodbye world'
|
||||||
|
`).toMatchTokens(
|
||||||
|
{ type: 'Newline' },
|
||||||
|
{ type: 'String', value: "'hello world'" },
|
||||||
|
{ type: 'Newline' },
|
||||||
|
{ type: 'Newline' },
|
||||||
|
{ type: 'String', value: "'goodbye world'" },
|
||||||
|
{ type: 'Newline' },
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('newline in parens is ignored', () => {
|
||||||
|
expect(`(
|
||||||
|
'hello world'
|
||||||
|
|
||||||
|
'goodbye world'
|
||||||
|
)`).toMatchTokens(
|
||||||
|
{ type: 'OpenParen' },
|
||||||
|
{ type: 'String', value: "'hello world'" },
|
||||||
|
{ type: 'String', value: "'goodbye world'" },
|
||||||
|
{ type: 'CloseParen' },
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('newline in brackets is ignored', () => {
|
||||||
|
expect(`[
|
||||||
|
a b
|
||||||
|
c d
|
||||||
|
|
||||||
|
e
|
||||||
|
|
||||||
|
f
|
||||||
|
|
||||||
|
]`).toMatchTokens(
|
||||||
|
{ type: 'OpenBracket' },
|
||||||
|
{ type: 'Identifier', value: "a" },
|
||||||
|
{ type: 'Identifier', value: "b" },
|
||||||
|
{ type: 'Identifier', value: "c" },
|
||||||
|
{ type: 'Identifier', value: "d" },
|
||||||
|
{ type: 'Identifier', value: "e" },
|
||||||
|
{ type: 'Identifier', value: "f" },
|
||||||
|
{ type: 'CloseBracket' },
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('function call', () => {
|
||||||
|
expect('echo hello world').toMatchTokens(
|
||||||
|
{ type: 'Identifier', value: 'echo' },
|
||||||
|
{ type: 'Identifier', value: 'hello' },
|
||||||
|
{ type: 'Identifier', value: 'world' },
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('function call w/ parens', () => {
|
||||||
|
expect('echo(bold hello world)').toMatchTokens(
|
||||||
|
{ type: 'Identifier', value: 'echo' },
|
||||||
|
{ type: 'OpenParen' },
|
||||||
|
{ type: 'Identifier', value: 'bold' },
|
||||||
|
{ type: 'Identifier', value: 'hello' },
|
||||||
|
{ type: 'Identifier', value: 'world' },
|
||||||
|
{ type: 'CloseParen' },
|
||||||
|
)
|
||||||
|
|
||||||
|
expect('echo (bold hello world)').toMatchTokens(
|
||||||
|
{ type: 'Identifier', value: 'echo' },
|
||||||
|
{ type: 'OpenParen' },
|
||||||
|
{ type: 'Identifier', value: 'bold' },
|
||||||
|
{ type: 'Identifier', value: 'hello' },
|
||||||
|
{ type: 'Identifier', value: 'world' },
|
||||||
|
{ type: 'CloseParen' },
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('assignment', () => {
|
||||||
|
expect('x = 5').toMatchTokens(
|
||||||
|
{ type: 'Identifier', value: 'x' },
|
||||||
|
{ type: 'Operator', value: '=' },
|
||||||
|
{ type: 'Number', value: '5' },
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('math expression', () => {
|
||||||
|
expect('1 + 2 * 3').toMatchTokens(
|
||||||
|
{ type: 'Number', value: '1' },
|
||||||
|
{ type: 'Operator', value: '+' },
|
||||||
|
{ type: 'Number', value: '2' },
|
||||||
|
{ type: 'Operator', value: '*' },
|
||||||
|
{ type: 'Number', value: '3' },
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('inline comment', () => {
|
||||||
|
expect('x = 5 # set x').toMatchTokens(
|
||||||
|
{ type: 'Identifier', value: 'x' },
|
||||||
|
{ type: 'Operator', value: '=' },
|
||||||
|
{ type: 'Number', value: '5' },
|
||||||
|
{ type: 'Comment', value: '# set x' },
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('line comment', () => {
|
||||||
|
expect('x = 5 \n# hello\n set x').toMatchTokens(
|
||||||
|
{ type: 'Identifier', value: 'x' },
|
||||||
|
{ type: 'Operator', value: '=' },
|
||||||
|
{ type: 'Number', value: '5' },
|
||||||
|
{ type: 'Newline' },
|
||||||
|
{ type: 'Comment', value: '# hello' },
|
||||||
|
{ type: 'Newline' },
|
||||||
|
{ type: 'Identifier', value: 'set' },
|
||||||
|
{ type: 'Identifier', value: 'x' },
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('colons separate tokens', () => {
|
||||||
|
expect('x do: y').toMatchTokens(
|
||||||
|
{ type: 'Identifier', value: 'x' },
|
||||||
|
{ type: 'Keyword', value: 'do' },
|
||||||
|
{ type: 'Colon' },
|
||||||
|
{ type: 'Identifier', value: 'y' },
|
||||||
|
)
|
||||||
|
|
||||||
|
expect('x: y').toMatchTokens(
|
||||||
|
{ type: 'Identifier', value: 'x' },
|
||||||
|
{ type: 'Colon' },
|
||||||
|
{ type: 'Identifier', value: 'y' },
|
||||||
|
)
|
||||||
|
|
||||||
|
expect('5: y').toMatchTokens(
|
||||||
|
{ type: 'Number', value: '5' },
|
||||||
|
{ type: 'Colon' },
|
||||||
|
{ type: 'Identifier', value: 'y' },
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
expect(`if (var? 'abc'): y`).toMatchTokens(
|
||||||
|
{ type: 'Keyword', value: 'if' },
|
||||||
|
{ type: 'OpenParen' },
|
||||||
|
{ type: 'Identifier', value: 'var?' },
|
||||||
|
{ type: 'String', value: `'abc'` },
|
||||||
|
{ type: 'CloseParen' },
|
||||||
|
{ type: 'Colon' },
|
||||||
|
{ type: 'Identifier', value: 'y' },
|
||||||
|
)
|
||||||
|
|
||||||
|
expect(`
|
||||||
|
do x:
|
||||||
|
y
|
||||||
|
end`).toMatchTokens(
|
||||||
|
{ type: 'Newline' },
|
||||||
|
{ type: 'Keyword', value: 'do' },
|
||||||
|
{ type: 'Identifier', value: 'x' },
|
||||||
|
{ type: 'Colon' },
|
||||||
|
{ type: 'Newline' },
|
||||||
|
{ type: 'Identifier', value: 'y' },
|
||||||
|
{ type: 'Newline' },
|
||||||
|
{ type: 'Keyword', value: 'end' },
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('semicolons separate statements', () => {
|
||||||
|
expect('x; y').toMatchTokens(
|
||||||
|
{ type: 'Identifier', value: 'x' },
|
||||||
|
{ type: 'Semicolon' },
|
||||||
|
{ type: 'Identifier', value: 'y' },
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('semicolons in parens', () => {
|
||||||
|
expect('(x; y)').toMatchTokens(
|
||||||
|
{ type: 'OpenParen' },
|
||||||
|
{ type: 'Identifier', value: 'x' },
|
||||||
|
{ type: 'Semicolon' },
|
||||||
|
{ type: 'Identifier', value: 'y' },
|
||||||
|
{ type: 'CloseParen' },
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('dot operator beginning word with slash', () => {
|
||||||
|
expect(`(basename ./cool)`).toMatchTokens(
|
||||||
|
{ 'type': 'OpenParen' },
|
||||||
|
{ 'type': 'Identifier', 'value': 'basename' },
|
||||||
|
{ 'type': 'Word', 'value': './cool' },
|
||||||
|
{ 'type': 'CloseParen' }
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('dot word after identifier with space', () => {
|
||||||
|
expect(`expand-path .git`).toMatchTokens(
|
||||||
|
{ 'type': 'Identifier', 'value': 'expand-path' },
|
||||||
|
{ 'type': 'Word', 'value': '.git' },
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('dot operator after identifier without space', () => {
|
||||||
|
expect(`config.path`).toMatchTokens(
|
||||||
|
{ 'type': 'Identifier', 'value': 'config' },
|
||||||
|
{ 'type': 'Operator', 'value': '.' },
|
||||||
|
{ 'type': 'Identifier', 'value': 'path' },
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('nesting edge cases', () => {
|
||||||
|
test('deeply nested parens', () => {
|
||||||
|
expect('((nested))').toMatchTokens(
|
||||||
|
{ type: 'OpenParen' },
|
||||||
|
{ type: 'OpenParen' },
|
||||||
|
{ type: 'Identifier', value: 'nested' },
|
||||||
|
{ type: 'CloseParen' },
|
||||||
|
{ type: 'CloseParen' },
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('mixed nesting', () => {
|
||||||
|
expect('([combo])').toMatchTokens(
|
||||||
|
{ type: 'OpenParen' },
|
||||||
|
{ type: 'OpenBracket' },
|
||||||
|
{ type: 'Identifier', value: 'combo' },
|
||||||
|
{ type: 'CloseBracket' },
|
||||||
|
{ type: 'CloseParen' },
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('invalid numbers that should be words', () => {
|
||||||
|
test('invalid binary', () => {
|
||||||
|
expect('0b2').toMatchToken('Word', '0b2')
|
||||||
|
expect('0b123').toMatchToken('Word', '0b123')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('invalid octal', () => {
|
||||||
|
expect('0o8').toMatchToken('Word', '0o8')
|
||||||
|
expect('0o999').toMatchToken('Word', '0o999')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('invalid hex', () => {
|
||||||
|
expect('0xGGG').toMatchToken('Word', '0xGGG')
|
||||||
|
expect('0xZZZ').toMatchToken('Word', '0xZZZ')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('multiple decimal points', () => {
|
||||||
|
expect('1.2.3').toMatchToken('Word', '1.2.3')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('unicode and emoji', () => {
|
||||||
|
test('greek letters', () => {
|
||||||
|
expect('αβγ').toMatchToken('Identifier', 'αβγ')
|
||||||
|
expect('delta-δ').toMatchToken('Identifier', 'delta-δ')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('math symbols', () => {
|
||||||
|
expect('∑').toMatchToken('Identifier', '∑')
|
||||||
|
expect('∏').toMatchToken('Identifier', '∏')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('CJK characters', () => {
|
||||||
|
expect('你好').toMatchToken('Identifier', '你好')
|
||||||
|
expect('こんにちは').toMatchToken('Identifier', 'こんにちは')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('empty and whitespace input', () => {
|
||||||
|
test('empty string', () => {
|
||||||
|
expect('').toMatchTokens()
|
||||||
|
})
|
||||||
|
|
||||||
|
test('only whitespace', () => {
|
||||||
|
expect(' ').toMatchTokens()
|
||||||
|
})
|
||||||
|
|
||||||
|
test('only tabs', () => {
|
||||||
|
expect('\t\t\t').toMatchTokens()
|
||||||
|
})
|
||||||
|
|
||||||
|
test('only newlines', () => {
|
||||||
|
expect('\n\n\n').toMatchTokens(
|
||||||
|
{ type: 'Newline' },
|
||||||
|
{ type: 'Newline' },
|
||||||
|
{ type: 'Newline' },
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('named args', () => {
|
||||||
|
test("don't need spaces", () => {
|
||||||
|
expect(`named=arg`).toMatchTokens(
|
||||||
|
{ type: 'NamedArgPrefix', value: 'named=' },
|
||||||
|
{ type: 'Identifier', value: 'arg' },
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test("can have spaces", () => {
|
||||||
|
expect(`named= arg`).toMatchTokens(
|
||||||
|
{ type: 'NamedArgPrefix', value: 'named=' },
|
||||||
|
{ type: 'Identifier', value: 'arg' },
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test("can include numbers", () => {
|
||||||
|
expect(`named123= arg`).toMatchTokens(
|
||||||
|
{ type: 'NamedArgPrefix', value: 'named123=' },
|
||||||
|
{ type: 'Identifier', value: 'arg' },
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('dot operator', () => {
|
||||||
|
test('standalone dot', () => {
|
||||||
|
expect('.').toMatchToken('Operator', '.')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('dot between identifiers tokenizes as separate tokens', () => {
|
||||||
|
expect('config.path').toMatchTokens(
|
||||||
|
{ type: 'Identifier', value: 'config' },
|
||||||
|
{ type: 'Operator', value: '.' },
|
||||||
|
{ type: 'Identifier', value: 'path' },
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('dot with number', () => {
|
||||||
|
expect('array.0').toMatchTokens(
|
||||||
|
{ type: 'Identifier', value: 'array' },
|
||||||
|
{ type: 'Operator', value: '.' },
|
||||||
|
{ type: 'Number', value: '0' },
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('chained dots', () => {
|
||||||
|
expect('a.b.c').toMatchTokens(
|
||||||
|
{ type: 'Identifier', value: 'a' },
|
||||||
|
{ type: 'Operator', value: '.' },
|
||||||
|
{ type: 'Identifier', value: 'b' },
|
||||||
|
{ type: 'Operator', value: '.' },
|
||||||
|
{ type: 'Identifier', value: 'c' },
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('identifier-like paths tokenize separately', () => {
|
||||||
|
expect('readme.txt').toMatchTokens(
|
||||||
|
{ type: 'Identifier', value: 'readme' },
|
||||||
|
{ type: 'Operator', value: '.' },
|
||||||
|
{ type: 'Identifier', value: 'txt' },
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('word-like paths remain as single token', () => {
|
||||||
|
expect('./file.txt').toMatchToken('Word', './file.txt')
|
||||||
|
expect('README.TXT').toMatchToken('Word', 'README.TXT')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('dot with paren expression', () => {
|
||||||
|
expect('obj.(1 + 2)').toMatchTokens(
|
||||||
|
{ type: 'Identifier', value: 'obj' },
|
||||||
|
{ type: 'Operator', value: '.' },
|
||||||
|
{ type: 'OpenParen' },
|
||||||
|
{ type: 'Number', value: '1' },
|
||||||
|
{ type: 'Operator', value: '+' },
|
||||||
|
{ type: 'Number', value: '2' },
|
||||||
|
{ type: 'CloseParen' },
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('chained dot with paren expression', () => {
|
||||||
|
expect('obj.items.(i)').toMatchTokens(
|
||||||
|
{ type: 'Identifier', value: 'obj' },
|
||||||
|
{ type: 'Operator', value: '.' },
|
||||||
|
{ type: 'Identifier', value: 'items' },
|
||||||
|
{ type: 'Operator', value: '.' },
|
||||||
|
{ type: 'OpenParen' },
|
||||||
|
{ type: 'Identifier', value: 'i' },
|
||||||
|
{ type: 'CloseParen' },
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
@ -1,285 +0,0 @@
|
||||||
import { ExternalTokenizer, InputStream, Stack } from '@lezer/lr'
|
|
||||||
import { Identifier, AssignableIdentifier, Word, IdentifierBeforeDot, Do } from './shrimp.terms'
|
|
||||||
|
|
||||||
// doobie doobie do (we need the `do` keyword to know when we're defining params)
|
|
||||||
export function specializeKeyword(ident: string) {
|
|
||||||
return ident === 'do' ? Do : -1
|
|
||||||
}
|
|
||||||
|
|
||||||
// tell the dotGet searcher about builtin globals
|
|
||||||
export const globals: string[] = []
|
|
||||||
export const setGlobals = (newGlobals: string[]) => {
|
|
||||||
globals.length = 0
|
|
||||||
globals.push(...newGlobals)
|
|
||||||
}
|
|
||||||
|
|
||||||
// The only chars that can't be words are whitespace, apostrophes, closing parens, and EOF.
|
|
||||||
|
|
||||||
export const tokenizer = new ExternalTokenizer(
|
|
||||||
(input: InputStream, stack: Stack) => {
|
|
||||||
const ch = getFullCodePoint(input, 0)
|
|
||||||
if (!isWordChar(ch)) return
|
|
||||||
|
|
||||||
// Don't consume things that start with digits - let Number token handle it
|
|
||||||
if (isDigit(ch)) return
|
|
||||||
|
|
||||||
// Don't consume things that start with - or + followed by a digit (negative/positive numbers)
|
|
||||||
if ((ch === 45 /* - */ || ch === 43) /* + */ && isDigit(input.peek(1))) return
|
|
||||||
|
|
||||||
const isValidStart = isLowercaseLetter(ch) || isEmojiOrUnicode(ch)
|
|
||||||
const canBeWord = stack.canShift(Word)
|
|
||||||
|
|
||||||
// Consume all word characters, tracking if it remains a valid identifier
|
|
||||||
const { pos, isValidIdentifier, stoppedAtDot } = consumeWordToken(
|
|
||||||
input,
|
|
||||||
isValidStart,
|
|
||||||
canBeWord
|
|
||||||
)
|
|
||||||
|
|
||||||
// Check if we should emit IdentifierBeforeDot for property access
|
|
||||||
if (stoppedAtDot) {
|
|
||||||
const dotGetToken = checkForDotGet(input, stack, pos)
|
|
||||||
|
|
||||||
if (dotGetToken) {
|
|
||||||
input.advance(pos)
|
|
||||||
input.acceptToken(dotGetToken)
|
|
||||||
} else {
|
|
||||||
// Not in scope - continue consuming the dot as part of the word
|
|
||||||
const afterDot = consumeRestOfWord(input, pos + 1, canBeWord)
|
|
||||||
input.advance(afterDot)
|
|
||||||
input.acceptToken(Word)
|
|
||||||
}
|
|
||||||
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Advance past the token we consumed
|
|
||||||
input.advance(pos)
|
|
||||||
|
|
||||||
// Choose which token to emit
|
|
||||||
if (isValidIdentifier) {
|
|
||||||
const token = chooseIdentifierToken(input, stack)
|
|
||||||
input.acceptToken(token)
|
|
||||||
} else {
|
|
||||||
input.acceptToken(Word)
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{ contextual: true }
|
|
||||||
)
|
|
||||||
|
|
||||||
// Build identifier text from input stream, handling surrogate pairs for emoji
|
|
||||||
const buildIdentifierText = (input: InputStream, length: number): string => {
|
|
||||||
let text = ''
|
|
||||||
for (let i = 0; i < length; i++) {
|
|
||||||
const charCode = input.peek(i)
|
|
||||||
if (charCode === -1) break
|
|
||||||
|
|
||||||
// Handle surrogate pairs for emoji (UTF-16 encoding)
|
|
||||||
if (charCode >= 0xd800 && charCode <= 0xdbff && i + 1 < length) {
|
|
||||||
const low = input.peek(i + 1)
|
|
||||||
if (low >= 0xdc00 && low <= 0xdfff) {
|
|
||||||
text += String.fromCharCode(charCode, low)
|
|
||||||
i++ // Skip the low surrogate
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
text += String.fromCharCode(charCode)
|
|
||||||
}
|
|
||||||
return text
|
|
||||||
}
|
|
||||||
|
|
||||||
// Consume word characters, tracking if it remains a valid identifier
|
|
||||||
// Returns the position after consuming, whether it's a valid identifier, and if we stopped at a dot
|
|
||||||
const consumeWordToken = (
|
|
||||||
input: InputStream,
|
|
||||||
isValidStart: boolean,
|
|
||||||
canBeWord: boolean
|
|
||||||
): { pos: number; isValidIdentifier: boolean; stoppedAtDot: boolean } => {
|
|
||||||
let pos = getCharSize(getFullCodePoint(input, 0))
|
|
||||||
let isValidIdentifier = isValidStart
|
|
||||||
let stoppedAtDot = false
|
|
||||||
|
|
||||||
while (true) {
|
|
||||||
const ch = getFullCodePoint(input, pos)
|
|
||||||
|
|
||||||
// Stop at dot if we have a valid identifier (might be property access)
|
|
||||||
if (ch === 46 /* . */ && isValidIdentifier) {
|
|
||||||
stoppedAtDot = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
// Stop if we hit a non-word character
|
|
||||||
if (!isWordChar(ch)) break
|
|
||||||
|
|
||||||
// Context-aware termination: semicolon/colon can end a word if followed by whitespace
|
|
||||||
// This allows `hello; 2` to parse correctly while `hello;world` stays as one word
|
|
||||||
if (canBeWord && (ch === 59 /* ; */ || ch === 58) /* : */) {
|
|
||||||
const nextCh = getFullCodePoint(input, pos + 1)
|
|
||||||
if (!isWordChar(nextCh)) break
|
|
||||||
}
|
|
||||||
|
|
||||||
// Track identifier validity: must be lowercase, digit, dash, or emoji/unicode
|
|
||||||
if (!isLowercaseLetter(ch) && !isDigit(ch) && ch !== 45 /* - */ && ch !== 63 /* ? */ && !isEmojiOrUnicode(ch)) {
|
|
||||||
if (!canBeWord) break
|
|
||||||
isValidIdentifier = false
|
|
||||||
}
|
|
||||||
|
|
||||||
pos += getCharSize(ch)
|
|
||||||
}
|
|
||||||
|
|
||||||
return { pos, isValidIdentifier, stoppedAtDot }
|
|
||||||
}
|
|
||||||
|
|
||||||
// Consume the rest of a word after we've decided not to treat a dot as DotGet
|
|
||||||
// Used when we have "file.txt" - we already consumed "file", now consume ".txt"
|
|
||||||
const consumeRestOfWord = (input: InputStream, startPos: number, canBeWord: boolean): number => {
|
|
||||||
let pos = startPos
|
|
||||||
while (true) {
|
|
||||||
const ch = getFullCodePoint(input, pos)
|
|
||||||
|
|
||||||
// Stop if we hit a non-word character
|
|
||||||
if (!isWordChar(ch)) break
|
|
||||||
|
|
||||||
// Context-aware termination for semicolon/colon
|
|
||||||
if (canBeWord && (ch === 59 /* ; */ || ch === 58) /* : */) {
|
|
||||||
const nextCh = getFullCodePoint(input, pos + 1)
|
|
||||||
if (!isWordChar(nextCh)) break
|
|
||||||
}
|
|
||||||
|
|
||||||
pos += getCharSize(ch)
|
|
||||||
}
|
|
||||||
return pos
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if this identifier is in scope (for property access detection)
|
|
||||||
// Returns IdentifierBeforeDot token if in scope, null otherwise
|
|
||||||
const checkForDotGet = (input: InputStream, stack: Stack, pos: number): number | null => {
|
|
||||||
const identifierText = buildIdentifierText(input, pos)
|
|
||||||
const context = stack.context as { scope: { has(name: string): boolean } } | undefined
|
|
||||||
|
|
||||||
// If identifier is in scope, this is property access (e.g., obj.prop)
|
|
||||||
// If not in scope, it should be consumed as a Word (e.g., file.txt)
|
|
||||||
return context?.scope.has(identifierText) || globals.includes(identifierText) ? IdentifierBeforeDot : null
|
|
||||||
}
|
|
||||||
|
|
||||||
// Decide between AssignableIdentifier and Identifier using grammar state + peek-ahead
|
|
||||||
const chooseIdentifierToken = (input: InputStream, stack: Stack): number => {
|
|
||||||
const canAssignable = stack.canShift(AssignableIdentifier)
|
|
||||||
const canRegular = stack.canShift(Identifier)
|
|
||||||
|
|
||||||
// Only one option is valid - use it
|
|
||||||
if (canAssignable && !canRegular) return AssignableIdentifier
|
|
||||||
if (canRegular && !canAssignable) return Identifier
|
|
||||||
|
|
||||||
// Both possible (ambiguous context) - peek ahead for '=' to disambiguate
|
|
||||||
// This happens at statement start where both `x = 5` (assign) and `echo x` (call) are valid
|
|
||||||
let peekPos = 0
|
|
||||||
while (true) {
|
|
||||||
const ch = getFullCodePoint(input, peekPos)
|
|
||||||
if (isWhiteSpace(ch)) {
|
|
||||||
peekPos += getCharSize(ch)
|
|
||||||
} else {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const nextCh = getFullCodePoint(input, peekPos)
|
|
||||||
if (nextCh === 61 /* = */) {
|
|
||||||
// Found '=', but check if it's followed by whitespace
|
|
||||||
// If '=' is followed by non-whitespace (like '=cool*'), it won't be tokenized as Eq
|
|
||||||
// In that case, this should be Identifier (for function call), not AssignableIdentifier
|
|
||||||
const charAfterEquals = getFullCodePoint(input, peekPos + 1)
|
|
||||||
if (isWhiteSpace(charAfterEquals) || charAfterEquals === -1 /* EOF */) {
|
|
||||||
return AssignableIdentifier
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return Identifier
|
|
||||||
}
|
|
||||||
|
|
||||||
// Character classification helpers
|
|
||||||
const isWhiteSpace = (ch: number): boolean => {
|
|
||||||
return ch === 32 /* space */ || ch === 9 /* tab */ || ch === 13 /* \r */
|
|
||||||
}
|
|
||||||
|
|
||||||
const isWordChar = (ch: number): boolean => {
|
|
||||||
return (
|
|
||||||
!isWhiteSpace(ch) &&
|
|
||||||
ch !== 10 /* \n */ &&
|
|
||||||
ch !== 41 /* ) */ &&
|
|
||||||
ch !== 93 /* ] */ &&
|
|
||||||
ch !== -1 /* EOF */
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const isLowercaseLetter = (ch: number): boolean => {
|
|
||||||
return ch >= 97 && ch <= 122 // a-z
|
|
||||||
}
|
|
||||||
|
|
||||||
const isDigit = (ch: number): boolean => {
|
|
||||||
return ch >= 48 && ch <= 57 // 0-9
|
|
||||||
}
|
|
||||||
|
|
||||||
const getFullCodePoint = (input: InputStream, pos: number): number => {
|
|
||||||
const ch = input.peek(pos)
|
|
||||||
|
|
||||||
// Check if this is a high surrogate (0xD800-0xDBFF)
|
|
||||||
if (ch >= 0xd800 && ch <= 0xdbff) {
|
|
||||||
const low = input.peek(pos + 1)
|
|
||||||
// Check if next is low surrogate (0xDC00-0xDFFF)
|
|
||||||
if (low >= 0xdc00 && low <= 0xdfff) {
|
|
||||||
// Combine surrogate pair into full code point
|
|
||||||
return 0x10000 + ((ch & 0x3ff) << 10) + (low & 0x3ff)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return ch
|
|
||||||
}
|
|
||||||
|
|
||||||
const isEmojiOrUnicode = (ch: number): boolean => {
|
|
||||||
return (
|
|
||||||
// Basic Emoticons
|
|
||||||
(ch >= 0x1f600 && ch <= 0x1f64f) ||
|
|
||||||
// Miscellaneous Symbols and Pictographs
|
|
||||||
(ch >= 0x1f300 && ch <= 0x1f5ff) ||
|
|
||||||
// Transport and Map Symbols
|
|
||||||
(ch >= 0x1f680 && ch <= 0x1f6ff) ||
|
|
||||||
// Regional Indicator Symbols (flags)
|
|
||||||
(ch >= 0x1f1e6 && ch <= 0x1f1ff) ||
|
|
||||||
// Miscellaneous Symbols (hearts, stars, weather)
|
|
||||||
(ch >= 0x2600 && ch <= 0x26ff) ||
|
|
||||||
// Dingbats (scissors, pencils, etc)
|
|
||||||
(ch >= 0x2700 && ch <= 0x27bf) ||
|
|
||||||
// Supplemental Symbols and Pictographs (newer emojis)
|
|
||||||
(ch >= 0x1f900 && ch <= 0x1f9ff) ||
|
|
||||||
// Symbols and Pictographs Extended-A (newest emojis)
|
|
||||||
(ch >= 0x1fa70 && ch <= 0x1faff) ||
|
|
||||||
// Various Asian Characters with emoji presentation
|
|
||||||
(ch >= 0x1f018 && ch <= 0x1f270) ||
|
|
||||||
// Variation Selectors (for emoji presentation)
|
|
||||||
(ch >= 0xfe00 && ch <= 0xfe0f) ||
|
|
||||||
// Additional miscellaneous items
|
|
||||||
(ch >= 0x238c && ch <= 0x2454) ||
|
|
||||||
// Combining Diacritical Marks for Symbols
|
|
||||||
(ch >= 0x20d0 && ch <= 0x20ff) ||
|
|
||||||
// Latin-1 Supplement (includes ², ³, ¹ and other special chars)
|
|
||||||
(ch >= 0x00a0 && ch <= 0x00ff) ||
|
|
||||||
// Greek and Coptic (U+0370-U+03FF)
|
|
||||||
(ch >= 0x0370 && ch <= 0x03ff) ||
|
|
||||||
// Mathematical Alphanumeric Symbols (U+1D400-U+1D7FF)
|
|
||||||
(ch >= 0x1d400 && ch <= 0x1d7ff) ||
|
|
||||||
// Mathematical Operators (U+2200-U+22FF)
|
|
||||||
(ch >= 0x2200 && ch <= 0x22ff) ||
|
|
||||||
// Superscripts and Subscripts (U+2070-U+209F)
|
|
||||||
(ch >= 0x2070 && ch <= 0x209f) ||
|
|
||||||
// Arrows (U+2190-U+21FF)
|
|
||||||
(ch >= 0x2190 && ch <= 0x21ff) ||
|
|
||||||
// Hiragana (U+3040-U+309F)
|
|
||||||
(ch >= 0x3040 && ch <= 0x309f) ||
|
|
||||||
// Katakana (U+30A0-U+30FF)
|
|
||||||
(ch >= 0x30a0 && ch <= 0x30ff) ||
|
|
||||||
// CJK Unified Ideographs (U+4E00-U+9FFF)
|
|
||||||
(ch >= 0x4e00 && ch <= 0x9fff)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const getCharSize = (ch: number) => (ch > 0xffff ? 2 : 1) // emoji takes 2 UTF-16 code units
|
|
||||||
594
src/parser/tokenizer2.ts
Normal file
594
src/parser/tokenizer2.ts
Normal file
|
|
@ -0,0 +1,594 @@
|
||||||
|
const DEBUG = process.env.DEBUG || false
|
||||||
|
|
||||||
|
export type Token = {
|
||||||
|
type: TokenType
|
||||||
|
value?: string,
|
||||||
|
from: number,
|
||||||
|
to: number,
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum TokenType {
|
||||||
|
Comment,
|
||||||
|
|
||||||
|
Keyword,
|
||||||
|
Operator,
|
||||||
|
|
||||||
|
Newline,
|
||||||
|
Semicolon,
|
||||||
|
Colon,
|
||||||
|
Underscore,
|
||||||
|
|
||||||
|
OpenParen,
|
||||||
|
CloseParen,
|
||||||
|
OpenBracket,
|
||||||
|
CloseBracket,
|
||||||
|
|
||||||
|
Identifier,
|
||||||
|
Word,
|
||||||
|
NamedArgPrefix,
|
||||||
|
|
||||||
|
Null,
|
||||||
|
Boolean,
|
||||||
|
Number,
|
||||||
|
String,
|
||||||
|
Regex,
|
||||||
|
}
|
||||||
|
|
||||||
|
const valueTokens = new Set([
|
||||||
|
TokenType.Comment,
|
||||||
|
TokenType.Keyword, TokenType.Operator,
|
||||||
|
TokenType.Identifier, TokenType.Word, TokenType.NamedArgPrefix,
|
||||||
|
TokenType.Boolean, TokenType.Number, TokenType.String, TokenType.Regex,
|
||||||
|
TokenType.Underscore
|
||||||
|
])
|
||||||
|
|
||||||
|
const operators = new Set([
|
||||||
|
// assignment
|
||||||
|
'=',
|
||||||
|
|
||||||
|
// logic
|
||||||
|
'or',
|
||||||
|
'and',
|
||||||
|
|
||||||
|
// bitwise
|
||||||
|
'band',
|
||||||
|
'bor',
|
||||||
|
'bxor',
|
||||||
|
'>>>',
|
||||||
|
'>>',
|
||||||
|
'<<',
|
||||||
|
|
||||||
|
// compound assignment
|
||||||
|
'??=',
|
||||||
|
'+=',
|
||||||
|
'-=',
|
||||||
|
'*=',
|
||||||
|
'/=',
|
||||||
|
'%=',
|
||||||
|
|
||||||
|
// nullish
|
||||||
|
'??',
|
||||||
|
|
||||||
|
// math
|
||||||
|
'**',
|
||||||
|
'*',
|
||||||
|
'/',
|
||||||
|
'+',
|
||||||
|
'-',
|
||||||
|
'%',
|
||||||
|
|
||||||
|
// comparison
|
||||||
|
'>=',
|
||||||
|
'<=',
|
||||||
|
'!=',
|
||||||
|
'==',
|
||||||
|
'>',
|
||||||
|
'<',
|
||||||
|
|
||||||
|
// property access
|
||||||
|
'.',
|
||||||
|
|
||||||
|
// pipe
|
||||||
|
'|',
|
||||||
|
])
|
||||||
|
|
||||||
|
const keywords = new Set([
|
||||||
|
'import',
|
||||||
|
'end',
|
||||||
|
'do',
|
||||||
|
'if',
|
||||||
|
'while',
|
||||||
|
'if',
|
||||||
|
'else',
|
||||||
|
'try',
|
||||||
|
'catch',
|
||||||
|
'finally',
|
||||||
|
'throw',
|
||||||
|
'not',
|
||||||
|
])
|
||||||
|
|
||||||
|
// helper
|
||||||
|
function c(strings: TemplateStringsArray, ...values: any[]) {
|
||||||
|
return strings.reduce((result, str, i) => result + str + (values[i] ?? ""), "").charCodeAt(0)
|
||||||
|
}
|
||||||
|
|
||||||
|
function s(c: number): string {
|
||||||
|
return String.fromCharCode(c)
|
||||||
|
}
|
||||||
|
|
||||||
|
export class Scanner {
|
||||||
|
input = ''
|
||||||
|
pos = 0
|
||||||
|
start = 0
|
||||||
|
char = 0
|
||||||
|
prev = 0
|
||||||
|
inParen = 0
|
||||||
|
inBracket = 0
|
||||||
|
tokens: Token[] = []
|
||||||
|
prevIsWhitespace = true
|
||||||
|
|
||||||
|
reset() {
|
||||||
|
this.input = ''
|
||||||
|
this.pos = 0
|
||||||
|
this.start = 0
|
||||||
|
this.char = 0
|
||||||
|
this.prev = 0
|
||||||
|
this.tokens.length = 0
|
||||||
|
this.prevIsWhitespace = true
|
||||||
|
}
|
||||||
|
|
||||||
|
peek(count = 0): number {
|
||||||
|
return getFullCodePoint(this.input, this.pos + count)
|
||||||
|
}
|
||||||
|
|
||||||
|
next(): number {
|
||||||
|
this.prevIsWhitespace = isWhitespace(this.char)
|
||||||
|
this.prev = this.char
|
||||||
|
this.char = this.peek()
|
||||||
|
this.pos += getCharSize(this.char)
|
||||||
|
|
||||||
|
return this.char
|
||||||
|
}
|
||||||
|
|
||||||
|
push(type: TokenType, from?: number, to?: number) {
|
||||||
|
from ??= this.start
|
||||||
|
to ??= this.pos - getCharSize(this.char)
|
||||||
|
if (to < from) to = from
|
||||||
|
|
||||||
|
this.tokens.push(Object.assign({}, {
|
||||||
|
type,
|
||||||
|
from,
|
||||||
|
to,
|
||||||
|
}, valueTokens.has(type) ? { value: this.input.slice(from, to) } : {}))
|
||||||
|
|
||||||
|
if (DEBUG) {
|
||||||
|
const tok = this.tokens.at(-1)
|
||||||
|
console.log(`≫ PUSH(${from},${to})`, TokenType[tok?.type || 0], '—', tok?.value)
|
||||||
|
}
|
||||||
|
|
||||||
|
this.start = this.pos
|
||||||
|
}
|
||||||
|
|
||||||
|
pushChar(type: TokenType) {
|
||||||
|
this.push(type, this.pos - 1, this.pos)
|
||||||
|
}
|
||||||
|
|
||||||
|
// turn shrimp code into shrimp tokens that get fed into the parser
|
||||||
|
tokenize(input: string): Token[] {
|
||||||
|
this.reset()
|
||||||
|
this.input = input
|
||||||
|
this.next()
|
||||||
|
|
||||||
|
while (this.char > 0) {
|
||||||
|
const char = this.char
|
||||||
|
|
||||||
|
if (char === c`#`) {
|
||||||
|
this.readComment()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isBracket(char)) {
|
||||||
|
this.readBracket()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isStringDelim(char)) {
|
||||||
|
this.readString(char)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (char === c`{`) {
|
||||||
|
this.readCurlyString()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isIdentStart(char)) {
|
||||||
|
this.readWordOrIdent(true) // true = started with identifier char
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isDigit(char) || ((char === c`-` || char === c`+`) && isDigit(this.peek()))) {
|
||||||
|
this.readNumber()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (char === c`:`) {
|
||||||
|
this.pushChar(TokenType.Colon)
|
||||||
|
this.next()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// whitespace-sensitive dot as operator (property access) only after identifier/number
|
||||||
|
if (char === c`.`) {
|
||||||
|
if (this.canBeDotGet(this.tokens.at(-1))) {
|
||||||
|
this.pushChar(TokenType.Operator)
|
||||||
|
this.next()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (char === c`/` && this.peek() === c`/`) {
|
||||||
|
this.readRegex()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isWordChar(char)) {
|
||||||
|
this.readWordOrIdent(false) // false = didn't start with identifier char
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (char === c`\n`) {
|
||||||
|
if (this.inParen === 0 && this.inBracket === 0)
|
||||||
|
this.pushChar(TokenType.Newline)
|
||||||
|
this.next()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (char === c`;`) {
|
||||||
|
this.pushChar(TokenType.Semicolon)
|
||||||
|
this.next()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
this.next()
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.tokens
|
||||||
|
}
|
||||||
|
|
||||||
|
readComment() {
|
||||||
|
this.start = this.pos - 1
|
||||||
|
while (this.char !== c`\n` && this.char > 0) this.next()
|
||||||
|
this.push(TokenType.Comment)
|
||||||
|
}
|
||||||
|
|
||||||
|
readBracket() {
|
||||||
|
switch (this.char) {
|
||||||
|
case c`(`:
|
||||||
|
this.inParen++
|
||||||
|
this.pushChar(TokenType.OpenParen); break
|
||||||
|
case c`)`:
|
||||||
|
this.inParen--
|
||||||
|
this.pushChar(TokenType.CloseParen); break
|
||||||
|
case c`[`:
|
||||||
|
this.inBracket++
|
||||||
|
this.pushChar(TokenType.OpenBracket); break
|
||||||
|
case c`]`:
|
||||||
|
this.inBracket--
|
||||||
|
this.pushChar(TokenType.CloseBracket); break
|
||||||
|
}
|
||||||
|
this.next()
|
||||||
|
}
|
||||||
|
|
||||||
|
readString(delim: number) {
|
||||||
|
this.start = this.pos - 1
|
||||||
|
this.next() // skip opening delim
|
||||||
|
while (this.char > 0 && (this.char !== delim || (this.char === delim && this.prev === c`\\`)))
|
||||||
|
this.next()
|
||||||
|
this.next() // skip closing delim
|
||||||
|
|
||||||
|
this.push(TokenType.String)
|
||||||
|
}
|
||||||
|
|
||||||
|
readCurlyString() {
|
||||||
|
this.start = this.pos - 1
|
||||||
|
let depth = 1
|
||||||
|
this.next()
|
||||||
|
|
||||||
|
while (depth > 0 && this.char > 0) {
|
||||||
|
if (this.char === c`{`) depth++
|
||||||
|
if (this.char === c`}`) depth--
|
||||||
|
this.next()
|
||||||
|
}
|
||||||
|
|
||||||
|
this.push(TokenType.String)
|
||||||
|
}
|
||||||
|
|
||||||
|
readWordOrIdent(startedWithIdentChar: boolean) {
|
||||||
|
this.start = this.pos - getCharSize(this.char)
|
||||||
|
|
||||||
|
while (isWordChar(this.char)) {
|
||||||
|
// stop at colon if followed by whitespace (e.g., 'do x: echo x end')
|
||||||
|
if (this.char === c`:`) {
|
||||||
|
const nextCh = this.peek()
|
||||||
|
if (isWhitespace(nextCh) || nextCh === 0) break
|
||||||
|
}
|
||||||
|
|
||||||
|
// stop at equal sign (named arg) - but only if what we've read so far is an identifier
|
||||||
|
if (this.char === c`=`) {
|
||||||
|
const soFar = this.input.slice(this.start, this.pos - getCharSize(this.char))
|
||||||
|
if (isIdentifer(soFar)) {
|
||||||
|
this.next()
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// stop at dot only if it would create a valid property access
|
||||||
|
// AND only if we started with an identifier character (not for Words like README.txt)
|
||||||
|
if (startedWithIdentChar && this.char === c`.`) {
|
||||||
|
const nextCh = this.peek()
|
||||||
|
if (isIdentStart(nextCh) || isDigit(nextCh) || nextCh === c`(`) {
|
||||||
|
const soFar = this.input.slice(this.start, this.pos - getCharSize(this.char))
|
||||||
|
if (isIdentifer(soFar)) break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.next()
|
||||||
|
}
|
||||||
|
|
||||||
|
const word = this.input.slice(this.start, this.pos - getCharSize(this.char))
|
||||||
|
|
||||||
|
// classify the token based on what we read
|
||||||
|
if (word === '_')
|
||||||
|
this.push(TokenType.Underscore)
|
||||||
|
|
||||||
|
else if (word === 'null')
|
||||||
|
this.push(TokenType.Null)
|
||||||
|
|
||||||
|
else if (word === 'true' || word === 'false')
|
||||||
|
this.push(TokenType.Boolean)
|
||||||
|
|
||||||
|
else if (isKeyword(word))
|
||||||
|
this.push(TokenType.Keyword)
|
||||||
|
|
||||||
|
else if (isOperator(word))
|
||||||
|
this.push(TokenType.Operator)
|
||||||
|
|
||||||
|
else if (isIdentifer(word))
|
||||||
|
this.push(TokenType.Identifier)
|
||||||
|
|
||||||
|
else if (word.endsWith('='))
|
||||||
|
this.push(TokenType.NamedArgPrefix)
|
||||||
|
|
||||||
|
else
|
||||||
|
this.push(TokenType.Word)
|
||||||
|
}
|
||||||
|
|
||||||
|
readNumber() {
|
||||||
|
this.start = this.pos - 1
|
||||||
|
while (isWordChar(this.char)) {
|
||||||
|
// stop at dot unless it's part of the number
|
||||||
|
if (this.char === c`.`) {
|
||||||
|
const nextCh = this.peek()
|
||||||
|
if (!isDigit(nextCh)) break
|
||||||
|
}
|
||||||
|
|
||||||
|
// stop at colon
|
||||||
|
if (this.char === c`:`) {
|
||||||
|
const nextCh = this.peek()
|
||||||
|
if (isWhitespace(nextCh) || nextCh === 0) break
|
||||||
|
}
|
||||||
|
this.next()
|
||||||
|
}
|
||||||
|
const ident = this.input.slice(this.start, this.pos - 1)
|
||||||
|
this.push(isNumber(ident) ? TokenType.Number : TokenType.Word)
|
||||||
|
}
|
||||||
|
|
||||||
|
readRegex() {
|
||||||
|
this.start = this.pos - 1
|
||||||
|
this.next() // skip 2nd /
|
||||||
|
|
||||||
|
while (this.char > 0) {
|
||||||
|
if (this.char === c`/` && this.peek() === c`/`) {
|
||||||
|
this.next() // skip /
|
||||||
|
this.next() // skip /
|
||||||
|
|
||||||
|
// read regex flags
|
||||||
|
while (this.char > 0 && isIdentStart(this.char))
|
||||||
|
this.next()
|
||||||
|
|
||||||
|
// validate regex
|
||||||
|
const to = this.pos - getCharSize(this.char)
|
||||||
|
const regexText = this.input.slice(this.start, to)
|
||||||
|
const [_, pattern, flags] = regexText.match(/^\/\/(.*)\/\/([gimsuy]*)$/) || []
|
||||||
|
|
||||||
|
if (pattern) {
|
||||||
|
try {
|
||||||
|
new RegExp(pattern, flags)
|
||||||
|
this.push(TokenType.Regex)
|
||||||
|
break
|
||||||
|
} catch (e) {
|
||||||
|
// invalid regex - fall through to Word
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// invalid regex is treated as Word
|
||||||
|
this.push(TokenType.Word)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
this.next()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
canBeDotGet(lastToken?: Token): boolean {
|
||||||
|
return !this.prevIsWhitespace && !!lastToken &&
|
||||||
|
(lastToken.type === TokenType.Identifier ||
|
||||||
|
lastToken.type === TokenType.Number ||
|
||||||
|
lastToken.type === TokenType.CloseParen ||
|
||||||
|
lastToken.type === TokenType.CloseBracket)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const isNumber = (word: string): boolean => {
|
||||||
|
// regular number
|
||||||
|
if (/^[+-]?\d+(_?\d+)*(\.(\d+(_?\d+)*))?$/.test(word))
|
||||||
|
return true
|
||||||
|
|
||||||
|
// binary
|
||||||
|
if (/^[+-]?0b[01]+(_?[01]+)*(\.[01](_?[01]*))?$/.test(word))
|
||||||
|
return true
|
||||||
|
|
||||||
|
// octal
|
||||||
|
if (/^[+-]?0o[0-7]+(_?[0-7]+)*(\.[0-7](_?[0-7]*))?$/.test(word))
|
||||||
|
return true
|
||||||
|
|
||||||
|
// hex
|
||||||
|
if (/^[+-]?0x[0-9a-f]+([0-9a-f]_?[0-9a-f]+)*(\.([0-9a-f]_?[0-9a-f]*))?$/i.test(word))
|
||||||
|
return true
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
const isIdentifer = (s: string): boolean => {
|
||||||
|
if (s.length === 0) return false
|
||||||
|
|
||||||
|
let pos = 0
|
||||||
|
const chars = []
|
||||||
|
while (pos < s.length) {
|
||||||
|
const out = getFullCodePoint(s, pos)
|
||||||
|
pos += getCharSize(out)
|
||||||
|
chars.push(out)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (chars.length === 1)
|
||||||
|
return isIdentStart(chars[0]!)
|
||||||
|
else if (chars.length === 2)
|
||||||
|
return isIdentStart(chars[0]!) && isIdentEnd(chars[1]!)
|
||||||
|
else
|
||||||
|
return isIdentStart(chars[0]!) &&
|
||||||
|
chars.slice(1, chars.length - 1).every(isIdentChar) &&
|
||||||
|
isIdentEnd(chars.at(-1)!)
|
||||||
|
}
|
||||||
|
|
||||||
|
const isStringDelim = (ch: number): boolean => {
|
||||||
|
return ch === c`'` || ch === c`"`
|
||||||
|
}
|
||||||
|
|
||||||
|
export const isIdentStart = (char: number | string): boolean => {
|
||||||
|
let ch = typeof char === 'string' ? char.charCodeAt(0) : char
|
||||||
|
return isLowercaseLetter(ch) || isEmojiOrUnicode(ch) || ch === 36 /* $ */
|
||||||
|
}
|
||||||
|
|
||||||
|
export const isIdentChar = (char: number | string): boolean => {
|
||||||
|
let ch = typeof char === 'string' ? char.charCodeAt(0) : char
|
||||||
|
return isIdentStart(ch) || isDigit(ch) || ch === 45 /* - */ || ch === 63 /* ? */
|
||||||
|
}
|
||||||
|
|
||||||
|
const isIdentEnd = (char: number | string): boolean => {
|
||||||
|
return isIdentChar(char)
|
||||||
|
}
|
||||||
|
|
||||||
|
const isLowercaseLetter = (ch: number): boolean => {
|
||||||
|
return ch >= 97 && ch <= 122 // a-z
|
||||||
|
}
|
||||||
|
|
||||||
|
const isDigit = (ch: number): boolean => {
|
||||||
|
return ch >= 48 && ch <= 57 // 0-9
|
||||||
|
}
|
||||||
|
|
||||||
|
const isWhitespace = (ch: number): boolean => {
|
||||||
|
return ch === 32 /* space */ || ch === 9 /* tab */ ||
|
||||||
|
ch === 13 /* \r */ || ch === 10 /* \n */ ||
|
||||||
|
ch === -1 || ch === 0 /* EOF */
|
||||||
|
}
|
||||||
|
|
||||||
|
const isWordChar = (ch: number): boolean => {
|
||||||
|
return (
|
||||||
|
!isWhitespace(ch) &&
|
||||||
|
ch !== 10 /* \n */ &&
|
||||||
|
ch !== 59 /* ; */ &&
|
||||||
|
ch !== 40 /* ( */ &&
|
||||||
|
ch !== 41 /* ) */ &&
|
||||||
|
ch !== 93 /* ] */ &&
|
||||||
|
ch !== -1 /* EOF */
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const isOperator = (word: string): boolean => {
|
||||||
|
return operators.has(word)
|
||||||
|
}
|
||||||
|
|
||||||
|
const isKeyword = (word: string): boolean => {
|
||||||
|
return keywords.has(word)
|
||||||
|
}
|
||||||
|
|
||||||
|
const isBracket = (char: number): boolean => {
|
||||||
|
return char === c`(` || char === c`)` || char === c`[` || char === c`]`
|
||||||
|
}
|
||||||
|
|
||||||
|
const getCharSize = (ch: number) =>
|
||||||
|
(ch > 0xffff ? 2 : 1) // emoji takes 2 UTF-16 code units
|
||||||
|
|
||||||
|
const getFullCodePoint = (input: string, pos: number): number => {
|
||||||
|
const ch = input[pos]?.charCodeAt(0) || 0
|
||||||
|
|
||||||
|
// Check if this is a high surrogate (0xD800-0xDBFF)
|
||||||
|
if (ch >= 0xd800 && ch <= 0xdbff) {
|
||||||
|
const low = input[pos + 1]?.charCodeAt(0) || 0
|
||||||
|
// Check if next is low surrogate (0xDC00-0xDFFF)
|
||||||
|
if (low >= 0xdc00 && low <= 0xdfff) {
|
||||||
|
// Combine surrogate pair into full code point
|
||||||
|
return 0x10000 + ((ch & 0x3ff) << 10) + (low & 0x3ff)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ch
|
||||||
|
}
|
||||||
|
|
||||||
|
const isEmojiOrUnicode = (ch: number): boolean => {
|
||||||
|
return (
|
||||||
|
// Basic Emoticons
|
||||||
|
(ch >= 0x1f600 && ch <= 0x1f64f) ||
|
||||||
|
// Miscellaneous Symbols and Pictographs
|
||||||
|
(ch >= 0x1f300 && ch <= 0x1f5ff) ||
|
||||||
|
// Transport and Map Symbols
|
||||||
|
(ch >= 0x1f680 && ch <= 0x1f6ff) ||
|
||||||
|
// Regional Indicator Symbols (flags)
|
||||||
|
(ch >= 0x1f1e6 && ch <= 0x1f1ff) ||
|
||||||
|
// Miscellaneous Symbols (hearts, stars, weather)
|
||||||
|
(ch >= 0x2600 && ch <= 0x26ff) ||
|
||||||
|
// Dingbats (scissors, pencils, etc)
|
||||||
|
(ch >= 0x2700 && ch <= 0x27bf) ||
|
||||||
|
// Supplemental Symbols and Pictographs (newer emojis)
|
||||||
|
(ch >= 0x1f900 && ch <= 0x1f9ff) ||
|
||||||
|
// Symbols and Pictographs Extended-A (newest emojis)
|
||||||
|
(ch >= 0x1fa70 && ch <= 0x1faff) ||
|
||||||
|
// Various Asian Characters with emoji presentation
|
||||||
|
(ch >= 0x1f018 && ch <= 0x1f270) ||
|
||||||
|
// Variation Selectors (for emoji presentation)
|
||||||
|
(ch >= 0xfe00 && ch <= 0xfe0f) ||
|
||||||
|
// Additional miscellaneous items
|
||||||
|
(ch >= 0x238c && ch <= 0x2454) ||
|
||||||
|
// Combining Diacritical Marks for Symbols
|
||||||
|
(ch >= 0x20d0 && ch <= 0x20ff) ||
|
||||||
|
// Latin-1 Supplement (includes ², ³, ¹ and other special chars)
|
||||||
|
(ch >= 0x00a0 && ch <= 0x00ff) ||
|
||||||
|
// Greek and Coptic (U+0370-U+03FF)
|
||||||
|
(ch >= 0x0370 && ch <= 0x03ff) ||
|
||||||
|
// Mathematical Alphanumeric Symbols (U+1D400-U+1D7FF)
|
||||||
|
(ch >= 0x1d400 && ch <= 0x1d7ff) ||
|
||||||
|
// Mathematical Operators (U+2200-U+22FF)
|
||||||
|
(ch >= 0x2200 && ch <= 0x22ff) ||
|
||||||
|
// Superscripts and Subscripts (U+2070-U+209F)
|
||||||
|
(ch >= 0x2070 && ch <= 0x209f) ||
|
||||||
|
// Arrows (U+2190-U+21FF)
|
||||||
|
(ch >= 0x2190 && ch <= 0x21ff) ||
|
||||||
|
// Hiragana (U+3040-U+309F)
|
||||||
|
(ch >= 0x3040 && ch <= 0x309f) ||
|
||||||
|
// Katakana (U+30A0-U+30FF)
|
||||||
|
(ch >= 0x30a0 && ch <= 0x30ff) ||
|
||||||
|
// CJK Unified Ideographs (U+4E00-U+9FFF)
|
||||||
|
(ch >= 0x4e00 && ch <= 0x9fff)
|
||||||
|
)
|
||||||
|
}
|
||||||
12
src/prelude/date.ts
Normal file
12
src/prelude/date.ts
Normal file
|
|
@ -0,0 +1,12 @@
|
||||||
|
export const date = {
|
||||||
|
now: () => Date.now(),
|
||||||
|
year: (time: number) => (new Date(time)).getFullYear(),
|
||||||
|
month: (time: number) => (new Date(time)).getMonth(),
|
||||||
|
date: (time: number) => (new Date(time)).getDate(),
|
||||||
|
hour: (time: number) => (new Date(time)).getHours(),
|
||||||
|
minute: (time: number) => (new Date(time)).getMinutes(),
|
||||||
|
second: (time: number) => (new Date(time)).getSeconds(),
|
||||||
|
ms: (time: number) => (new Date(time)).getMilliseconds(),
|
||||||
|
new: (year: number, month: number, day: number, hour = 0, minute = 0, second = 0, ms = 0) =>
|
||||||
|
new Date(year, month, day, hour, minute, second, ms).getTime()
|
||||||
|
}
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
import { type Value, toString, toValue } from 'reefvm'
|
import { type Value, toString } from 'reefvm'
|
||||||
|
|
||||||
export const dict = {
|
export const dict = {
|
||||||
keys: (dict: Record<string, any>) => Object.keys(dict),
|
keys: (dict: Record<string, any>) => Object.keys(dict),
|
||||||
|
|
|
||||||
128
src/prelude/fs.ts
Normal file
128
src/prelude/fs.ts
Normal file
|
|
@ -0,0 +1,128 @@
|
||||||
|
import { join, resolve, basename, dirname, extname } from 'path'
|
||||||
|
import {
|
||||||
|
readdirSync, mkdirSync, rmdirSync,
|
||||||
|
readFileSync, writeFileSync, appendFileSync,
|
||||||
|
rmSync, copyFileSync,
|
||||||
|
statSync, lstatSync, chmodSync, symlinkSync, readlinkSync,
|
||||||
|
watch
|
||||||
|
} from "fs"
|
||||||
|
|
||||||
|
export const fs = {
|
||||||
|
// Directory operations
|
||||||
|
ls: (path: string) => readdirSync(path),
|
||||||
|
mkdir: (path: string) => mkdirSync(path, { recursive: true }),
|
||||||
|
rmdir: (path: string) => rmdirSync(path === '/' || path === '' ? '/tmp/*' : path, { recursive: true }),
|
||||||
|
pwd: () => process.cwd(),
|
||||||
|
cd: (path: string) => process.chdir(path),
|
||||||
|
|
||||||
|
// Reading
|
||||||
|
read: (path: string) => readFileSync(path, 'utf-8'),
|
||||||
|
cat: (path: string) => { }, // added below
|
||||||
|
'read-bytes': (path: string) => [...readFileSync(path)],
|
||||||
|
|
||||||
|
// Writing
|
||||||
|
write: (path: string, content: string) => writeFileSync(path, content),
|
||||||
|
append: (path: string, content: string) => appendFileSync(path, content),
|
||||||
|
|
||||||
|
// File operations
|
||||||
|
delete: (path: string) => rmSync(path),
|
||||||
|
rm: (path: string) => { }, // added below
|
||||||
|
copy: (from: string, to: string) => copyFileSync(from, to),
|
||||||
|
move: (from: string, to: string) => {
|
||||||
|
fs.copy(from, to)
|
||||||
|
fs.rm(from)
|
||||||
|
},
|
||||||
|
mv: (from: string, to: string) => { }, // added below
|
||||||
|
|
||||||
|
// Path operations
|
||||||
|
basename: (path: string) => basename(path),
|
||||||
|
dirname: (path: string) => dirname(path),
|
||||||
|
extname: (path: string) => extname(path),
|
||||||
|
join: (...paths: string[]) => join(...paths),
|
||||||
|
resolve: (...paths: string[]) => resolve(...paths),
|
||||||
|
|
||||||
|
// File info
|
||||||
|
stat: (path: string) => {
|
||||||
|
try {
|
||||||
|
const stats = statSync(path)
|
||||||
|
const record = Object.fromEntries(Object.entries(stats))
|
||||||
|
record['atime'] = record['atimeMs']
|
||||||
|
record['ctime'] = record['ctimeMs']
|
||||||
|
record['mtime'] = record['mtimeMs']
|
||||||
|
|
||||||
|
delete record['atimeMs']
|
||||||
|
delete record['ctimeMs']
|
||||||
|
delete record['mtimeMs']
|
||||||
|
|
||||||
|
return record
|
||||||
|
} catch {
|
||||||
|
return {}
|
||||||
|
}
|
||||||
|
|
||||||
|
},
|
||||||
|
'exists?': (path: string) => {
|
||||||
|
try {
|
||||||
|
statSync(path)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
catch {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'file?': (path: string) => {
|
||||||
|
try { return statSync(path).isFile() }
|
||||||
|
catch { return false }
|
||||||
|
},
|
||||||
|
'dir?': (path: string) => {
|
||||||
|
try { return statSync(path).isDirectory() }
|
||||||
|
catch { return false }
|
||||||
|
},
|
||||||
|
'symlink?': (path: string) => {
|
||||||
|
try { return lstatSync(path).isSymbolicLink() }
|
||||||
|
catch { return false }
|
||||||
|
},
|
||||||
|
'exec?': (path: string) => {
|
||||||
|
try {
|
||||||
|
const stats = statSync(path)
|
||||||
|
return !!(stats.mode & 0o111)
|
||||||
|
}
|
||||||
|
catch { return false }
|
||||||
|
},
|
||||||
|
size: (path: string) => {
|
||||||
|
try { return statSync(path).size }
|
||||||
|
catch { return 0 }
|
||||||
|
},
|
||||||
|
|
||||||
|
// Permissions
|
||||||
|
chmod: (path: string, mode: number | string) => {
|
||||||
|
const numMode = typeof mode === 'string' ? parseInt(mode, 8) : mode
|
||||||
|
chmodSync(path, numMode)
|
||||||
|
},
|
||||||
|
|
||||||
|
// Symlinks
|
||||||
|
symlink: (target: string, path: string) => symlinkSync(target, path),
|
||||||
|
readlink: (path: string) => readlinkSync(path, 'utf-8'),
|
||||||
|
|
||||||
|
// Other
|
||||||
|
glob: (pattern: string) => {
|
||||||
|
const dir = pattern.substring(0, pattern.lastIndexOf('/'))
|
||||||
|
const match = pattern.substring(pattern.lastIndexOf('/') + 1)
|
||||||
|
|
||||||
|
if (!match.includes('*')) throw new Error('only * patterns supported')
|
||||||
|
|
||||||
|
const ext = match.split('*').pop()!
|
||||||
|
return readdirSync(dir)
|
||||||
|
.filter((f) => f.endsWith(ext))
|
||||||
|
.map((f) => join(dir, f))
|
||||||
|
|
||||||
|
},
|
||||||
|
|
||||||
|
watch: (path: string, callback: Function) =>
|
||||||
|
watch(path, (event, filename) => callback(event, filename)),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
; (fs as any).cat = fs.read
|
||||||
|
; (fs as any).mv = fs.move
|
||||||
|
; (fs as any).cp = fs.copy
|
||||||
|
; (fs as any).rm = fs.delete
|
||||||
|
|
@ -1,23 +1,44 @@
|
||||||
// The prelude creates all the builtin Shrimp functions.
|
// The prelude creates all the builtin Shrimp functions.
|
||||||
|
|
||||||
|
import { join, resolve } from 'path'
|
||||||
import {
|
import {
|
||||||
type Value, toValue,
|
type Value, type VM, toValue,
|
||||||
extractParamInfo, isWrapped, getOriginalFunction,
|
extractParamInfo, isWrapped, getOriginalFunction,
|
||||||
} from 'reefvm'
|
} from 'reefvm'
|
||||||
|
|
||||||
|
import { date } from './date'
|
||||||
import { dict } from './dict'
|
import { dict } from './dict'
|
||||||
|
import { fs } from './fs'
|
||||||
|
import { json } from './json'
|
||||||
import { load } from './load'
|
import { load } from './load'
|
||||||
import { list } from './list'
|
import { list } from './list'
|
||||||
import { math } from './math'
|
import { math } from './math'
|
||||||
import { str } from './str'
|
import { str } from './str'
|
||||||
|
import { types } from './types'
|
||||||
|
|
||||||
export const globals = {
|
export const globals: Record<string, any> = {
|
||||||
|
date,
|
||||||
dict,
|
dict,
|
||||||
|
fs,
|
||||||
|
json,
|
||||||
load,
|
load,
|
||||||
list,
|
list,
|
||||||
math,
|
math,
|
||||||
str,
|
str,
|
||||||
|
|
||||||
|
// shrimp runtime info
|
||||||
|
$: {
|
||||||
|
args: Bun.argv.slice(3),
|
||||||
|
argv: Bun.argv.slice(1),
|
||||||
|
env: process.env,
|
||||||
|
pid: process.pid,
|
||||||
|
cwd: process.env.PWD,
|
||||||
|
script: {
|
||||||
|
name: Bun.argv[2] || '(shrimp)',
|
||||||
|
path: resolve(join('.', Bun.argv[2] ?? ''))
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
// hello
|
// hello
|
||||||
echo: (...args: any[]) => {
|
echo: (...args: any[]) => {
|
||||||
console.log(...args.map(a => {
|
console.log(...args.map(a => {
|
||||||
|
|
@ -34,30 +55,41 @@ export const globals = {
|
||||||
const val = toValue(v)
|
const val = toValue(v)
|
||||||
return `#<${val.type}: ${formatValue(val)}>`
|
return `#<${val.type}: ${formatValue(val)}>`
|
||||||
},
|
},
|
||||||
length: (v: any) => {
|
var: function (this: VM, v: any) {
|
||||||
const value = toValue(v)
|
return typeof v === 'string' ? this.scope.get(v) : v
|
||||||
switch (value.type) {
|
},
|
||||||
case 'string': case 'array': return value.value.length
|
'var?': function (this: VM, v: string) {
|
||||||
case 'dict': return value.value.size
|
return typeof v !== 'string' || this.scope.has(v)
|
||||||
default: return 0
|
},
|
||||||
|
ref: (fn: Function) => fn,
|
||||||
|
import: function (this: VM, atNamed: Record<any, string | string[]> = {}, ...idents: string[]) {
|
||||||
|
const onlyArray = Array.isArray(atNamed.only) ? atNamed.only : [atNamed.only].filter(a => a)
|
||||||
|
const only = new Set(onlyArray)
|
||||||
|
const wantsOnly = only.size > 0
|
||||||
|
|
||||||
|
|
||||||
|
for (const ident of idents) {
|
||||||
|
const module = this.get(ident)
|
||||||
|
|
||||||
|
if (!module) throw new Error(`import: can't find ${ident}`)
|
||||||
|
if (module.type !== 'dict') throw new Error(`import: can't import ${module.type}`)
|
||||||
|
|
||||||
|
for (const [name, value] of module.value.entries()) {
|
||||||
|
if (value.type === 'dict') throw new Error(`import: can't import dicts in dicts`)
|
||||||
|
if (wantsOnly && !only.has(name)) continue
|
||||||
|
this.set(name, value)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
|
// env
|
||||||
|
exit: (num: number) => process.exit(num ?? 0),
|
||||||
|
|
||||||
// type predicates
|
// type predicates
|
||||||
'string?': (v: any) => toValue(v).type === 'string',
|
|
||||||
'number?': (v: any) => toValue(v).type === 'number',
|
|
||||||
'boolean?': (v: any) => toValue(v).type === 'boolean',
|
|
||||||
'array?': (v: any) => toValue(v).type === 'array',
|
|
||||||
'dict?': (v: any) => toValue(v).type === 'dict',
|
|
||||||
'function?': (v: any) => {
|
|
||||||
const t = toValue(v).type
|
|
||||||
return t === 'function' || t === 'native'
|
|
||||||
},
|
|
||||||
'null?': (v: any) => toValue(v).type === 'null',
|
|
||||||
'some?': (v: any) => toValue(v).type !== 'null',
|
'some?': (v: any) => toValue(v).type !== 'null',
|
||||||
|
|
||||||
// boolean/logic
|
// boolean/logic
|
||||||
not: (v: any) => !v,
|
bnot: (n: number) => ~(n | 0),
|
||||||
|
|
||||||
// utilities
|
// utilities
|
||||||
inc: (n: number) => n + 1,
|
inc: (n: number) => n + 1,
|
||||||
|
|
@ -65,7 +97,32 @@ export const globals = {
|
||||||
identity: (v: any) => v,
|
identity: (v: any) => v,
|
||||||
|
|
||||||
// collections
|
// collections
|
||||||
at: (collection: any, index: number | string) => collection[index],
|
length: (v: any) => {
|
||||||
|
const value = toValue(v)
|
||||||
|
switch (value.type) {
|
||||||
|
case 'string': case 'array': return value.value.length
|
||||||
|
case 'dict': return value.value.size
|
||||||
|
default: throw new Error(`length: expected string, array, or dict, got ${value.type}`)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
at: (collection: any, index: number | string) => {
|
||||||
|
const value = toValue(collection)
|
||||||
|
if (value.type === 'string' || value.type === 'array') {
|
||||||
|
const idx = typeof index === 'number' ? index : parseInt(index as string)
|
||||||
|
if (idx < 0 || idx >= value.value.length) {
|
||||||
|
throw new Error(`at: index ${idx} out of bounds for ${value.type} of length ${value.value.length}`)
|
||||||
|
}
|
||||||
|
return value.value[idx]
|
||||||
|
} else if (value.type === 'dict') {
|
||||||
|
const key = String(index)
|
||||||
|
if (!value.value.has(key)) {
|
||||||
|
throw new Error(`at: key '${key}' not found in dict`)
|
||||||
|
}
|
||||||
|
return value.value.get(key)
|
||||||
|
} else {
|
||||||
|
throw new Error(`at: expected string, array, or dict, got ${value.type}`)
|
||||||
|
}
|
||||||
|
},
|
||||||
range: (start: number, end: number | null) => {
|
range: (start: number, end: number | null) => {
|
||||||
if (end === null) {
|
if (end === null) {
|
||||||
end = start
|
end = start
|
||||||
|
|
@ -125,8 +182,8 @@ export function formatValue(value: Value, inner = false): string {
|
||||||
return `${colors.blue}[${colors.reset}${items}${colors.blue}]${colors.reset}`
|
return `${colors.blue}[${colors.reset}${items}${colors.blue}]${colors.reset}`
|
||||||
}
|
}
|
||||||
case 'dict': {
|
case 'dict': {
|
||||||
const entries = Array.from(value.value.entries())
|
const entries = Array.from(value.value.entries()).reverse()
|
||||||
.map(([k, v]) => `${k}${colors.blue}=${colors.reset}${formatValue(v, true)}`)
|
.map(([k, v]) => `${k.trim()}${colors.blue}=${colors.reset}${formatValue(v, true)}`)
|
||||||
.join(' ')
|
.join(' ')
|
||||||
if (entries.length === 0)
|
if (entries.length === 0)
|
||||||
return `${colors.blue}[=]${colors.reset}`
|
return `${colors.blue}[=]${colors.reset}`
|
||||||
|
|
@ -146,4 +203,8 @@ export function formatValue(value: Value, inner = false): string {
|
||||||
default:
|
default:
|
||||||
return String(value)
|
return String(value)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// add types functions to top-level namespace
|
||||||
|
for (const [key, value] of Object.entries(types))
|
||||||
|
globals[key] = value
|
||||||
7
src/prelude/json.ts
Normal file
7
src/prelude/json.ts
Normal file
|
|
@ -0,0 +1,7 @@
|
||||||
|
export const json = {
|
||||||
|
encode: (s: any) => JSON.stringify(s),
|
||||||
|
decode: (s: string) => JSON.parse(s),
|
||||||
|
}
|
||||||
|
|
||||||
|
; (json as any).parse = json.decode
|
||||||
|
; (json as any).stringify = json.encode
|
||||||
|
|
@ -1,5 +1,7 @@
|
||||||
|
import { type Value, toValue, toNull } from 'reefvm'
|
||||||
|
|
||||||
export const list = {
|
export const list = {
|
||||||
slice: (list: any[], start: number, end?: number) => list.slice(start, end),
|
slice: (list: any[], start: number, end?: number) => list.slice(start, end ? end : undefined),
|
||||||
map: async (list: any[], cb: Function) => {
|
map: async (list: any[], cb: Function) => {
|
||||||
let acc: any[] = []
|
let acc: any[] = []
|
||||||
for (const value of list) acc.push(await cb(value))
|
for (const value of list) acc.push(await cb(value))
|
||||||
|
|
@ -12,6 +14,13 @@ export const list = {
|
||||||
}
|
}
|
||||||
return acc
|
return acc
|
||||||
},
|
},
|
||||||
|
reject: async (list: any[], cb: Function) => {
|
||||||
|
let acc: any[] = []
|
||||||
|
for (const value of list) {
|
||||||
|
if (!(await cb(value))) acc.push(value)
|
||||||
|
}
|
||||||
|
return acc
|
||||||
|
},
|
||||||
reduce: async (list: any[], cb: Function, initial: any) => {
|
reduce: async (list: any[], cb: Function, initial: any) => {
|
||||||
let acc = initial
|
let acc = initial
|
||||||
for (const value of list) acc = await cb(acc, value)
|
for (const value of list) acc = await cb(acc, value)
|
||||||
|
|
@ -27,6 +36,8 @@ export const list = {
|
||||||
// predicates
|
// predicates
|
||||||
'empty?': (list: any[]) => list.length === 0,
|
'empty?': (list: any[]) => list.length === 0,
|
||||||
'contains?': (list: any[], item: any) => list.includes(item),
|
'contains?': (list: any[], item: any) => list.includes(item),
|
||||||
|
'includes?': (list: any[], item: any) => list.includes(item),
|
||||||
|
'has?': (list: any[], item: any) => list.includes(item),
|
||||||
'any?': async (list: any[], cb: Function) => {
|
'any?': async (list: any[], cb: Function) => {
|
||||||
for (const value of list) {
|
for (const value of list) {
|
||||||
if (await cb(value)) return true
|
if (await cb(value)) return true
|
||||||
|
|
@ -40,9 +51,47 @@ export const list = {
|
||||||
return true
|
return true
|
||||||
},
|
},
|
||||||
|
|
||||||
|
// mutating
|
||||||
|
push: (list: Value, item: Value) => {
|
||||||
|
if (list.type !== 'array') return toNull()
|
||||||
|
return toValue(list.value.push(item))
|
||||||
|
},
|
||||||
|
pop: (list: Value) => {
|
||||||
|
if (list.type !== 'array') return toNull()
|
||||||
|
return toValue(list.value.pop())
|
||||||
|
},
|
||||||
|
shift: (list: Value) => {
|
||||||
|
if (list.type !== 'array') return toNull()
|
||||||
|
return toValue(list.value.shift())
|
||||||
|
},
|
||||||
|
unshift: (list: Value, item: Value) => {
|
||||||
|
if (list.type !== 'array') return toNull()
|
||||||
|
return toValue(list.value.unshift(item))
|
||||||
|
},
|
||||||
|
splice: (list: Value, start: Value, deleteCount: Value, ...items: Value[]) => {
|
||||||
|
const realList = list.value as any[]
|
||||||
|
const realStart = start.value as number
|
||||||
|
const realDeleteCount = deleteCount.value as number
|
||||||
|
return toValue(realList.splice(realStart, realDeleteCount, ...items))
|
||||||
|
},
|
||||||
|
insert: (list: Value, index: Value, item: Value) => {
|
||||||
|
if (list.type !== 'array') return toNull()
|
||||||
|
const realList = list.value as any[]
|
||||||
|
const realIndex = index.value as number
|
||||||
|
realList.splice(realIndex, 0, item)
|
||||||
|
return toValue(realList.length)
|
||||||
|
},
|
||||||
|
|
||||||
// sequence operations
|
// sequence operations
|
||||||
reverse: (list: any[]) => list.slice().reverse(),
|
reverse: (list: any[]) => list.slice().reverse(),
|
||||||
sort: (list: any[], cb?: (a: any, b: any) => number) => list.slice().sort(cb),
|
sort: async (list: any[], cb?: (a: any, b: any) => number) => {
|
||||||
|
const arr = [...list]
|
||||||
|
if (!cb) return arr.sort()
|
||||||
|
for (let i = 0; i < arr.length; i++)
|
||||||
|
for (let j = i + 1; j < arr.length; j++)
|
||||||
|
if ((await cb(arr[i], arr[j])) > 0) [arr[i], arr[j]] = [arr[j], arr[i]]
|
||||||
|
return arr
|
||||||
|
},
|
||||||
concat: (...lists: any[][]) => lists.flat(1),
|
concat: (...lists: any[][]) => lists.flat(1),
|
||||||
flatten: (list: any[], depth: number = 1) => list.flat(depth),
|
flatten: (list: any[], depth: number = 1) => list.flat(depth),
|
||||||
unique: (list: any[]) => Array.from(new Set(list)),
|
unique: (list: any[]) => Array.from(new Set(list)),
|
||||||
|
|
@ -52,8 +101,14 @@ export const list = {
|
||||||
first: (list: any[]) => list[0] ?? null,
|
first: (list: any[]) => list[0] ?? null,
|
||||||
last: (list: any[]) => list[list.length - 1] ?? null,
|
last: (list: any[]) => list[list.length - 1] ?? null,
|
||||||
rest: (list: any[]) => list.slice(1),
|
rest: (list: any[]) => list.slice(1),
|
||||||
take: (list: any[], n: number) => list.slice(0, n),
|
take: (list: any[], n: number) => {
|
||||||
drop: (list: any[], n: number) => list.slice(n),
|
if (n < 0) throw new Error(`take: count must be non-negative, got ${n}`)
|
||||||
|
return list.slice(0, n)
|
||||||
|
},
|
||||||
|
drop: (list: any[], n: number) => {
|
||||||
|
if (n < 0) throw new Error(`drop: count must be non-negative, got ${n}`)
|
||||||
|
return list.slice(n)
|
||||||
|
},
|
||||||
append: (list: any[], item: any) => [...list, item],
|
append: (list: any[], item: any) => [...list, item],
|
||||||
prepend: (list: any[], item: any) => [item, ...list],
|
prepend: (list: any[], item: any) => [item, ...list],
|
||||||
'index-of': (list: any[], item: any) => list.indexOf(item),
|
'index-of': (list: any[], item: any) => list.indexOf(item),
|
||||||
|
|
@ -86,4 +141,14 @@ export const list = {
|
||||||
}
|
}
|
||||||
return groups
|
return groups
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// raw functions deal directly in Value types, meaning we can modify collection
|
||||||
|
// careful - they MUST return a Value!
|
||||||
|
; (list.splice as any).raw = true
|
||||||
|
; (list.push as any).raw = true
|
||||||
|
; (list.pop as any).raw = true
|
||||||
|
; (list.shift as any).raw = true
|
||||||
|
; (list.unshift as any).raw = true
|
||||||
|
; (list.insert as any).raw = true
|
||||||
|
|
@ -7,7 +7,9 @@ export const load = async function (this: VM, path: string): Promise<Record<stri
|
||||||
const scope = this.scope
|
const scope = this.scope
|
||||||
const pc = this.pc
|
const pc = this.pc
|
||||||
|
|
||||||
const fullPath = resolve(path) + '.sh'
|
let fullPath = resolve(path)
|
||||||
|
if (!path.includes('.')) fullPath += '.sh'
|
||||||
|
|
||||||
const code = readFileSync(fullPath, 'utf-8')
|
const code = readFileSync(fullPath, 'utf-8')
|
||||||
|
|
||||||
this.pc = this.instructions.length
|
this.pc = this.instructions.length
|
||||||
|
|
|
||||||
|
|
@ -3,12 +3,27 @@ export const math = {
|
||||||
floor: (n: number) => Math.floor(n),
|
floor: (n: number) => Math.floor(n),
|
||||||
ceil: (n: number) => Math.ceil(n),
|
ceil: (n: number) => Math.ceil(n),
|
||||||
round: (n: number) => Math.round(n),
|
round: (n: number) => Math.round(n),
|
||||||
min: (...nums: number[]) => Math.min(...nums),
|
min: (...nums: number[]) => {
|
||||||
max: (...nums: number[]) => Math.max(...nums),
|
if (nums.length === 0) throw new Error('min: expected at least one argument')
|
||||||
|
return Math.min(...nums)
|
||||||
|
},
|
||||||
|
max: (...nums: number[]) => {
|
||||||
|
if (nums.length === 0) throw new Error('max: expected at least one argument')
|
||||||
|
return Math.max(...nums)
|
||||||
|
},
|
||||||
pow: (base: number, exp: number) => Math.pow(base, exp),
|
pow: (base: number, exp: number) => Math.pow(base, exp),
|
||||||
sqrt: (n: number) => Math.sqrt(n),
|
sqrt: (n: number) => {
|
||||||
random: () => Math.random(),
|
if (n < 0) throw new Error(`sqrt: cannot take square root of negative number ${n}`)
|
||||||
clamp: (n: number, min: number, max: number) => Math.min(Math.max(n, min), max),
|
return Math.sqrt(n)
|
||||||
|
},
|
||||||
|
random: (min = 0, max = 1) => {
|
||||||
|
if (min === 0 && max === 1) return Math.random()
|
||||||
|
return Math.floor(Math.random() * (max - min + 1)) + min
|
||||||
|
},
|
||||||
|
clamp: (n: number, min: number, max: number) => {
|
||||||
|
if (min > max) throw new Error(`clamp: min (${min}) must be less than or equal to max (${max})`)
|
||||||
|
return Math.min(Math.max(n, min), max)
|
||||||
|
},
|
||||||
sign: (n: number) => Math.sign(n),
|
sign: (n: number) => Math.sign(n),
|
||||||
trunc: (n: number) => Math.trunc(n),
|
trunc: (n: number) => Math.trunc(n),
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,33 +1,47 @@
|
||||||
// strings
|
// strings
|
||||||
export const str = {
|
export const str = {
|
||||||
join: (arr: string[], sep: string = ',') => arr.join(sep),
|
join: (arr: string[], sep: string = ',') => arr.join(sep),
|
||||||
split: (str: string, sep: string = ',') => str.split(sep),
|
split: (str: string, sep: string = ',') => String(str ?? '').split(sep),
|
||||||
'to-upper': (str: string) => str.toUpperCase(),
|
'to-upper': (str: string) => String(str ?? '').toUpperCase(),
|
||||||
'to-lower': (str: string) => str.toLowerCase(),
|
'to-lower': (str: string) => String(str ?? '').toLowerCase(),
|
||||||
trim: (str: string) => str.trim(),
|
trim: (str: string) => String(str ?? '').trim(),
|
||||||
|
|
||||||
// predicates
|
// predicates
|
||||||
'starts-with?': (str: string, prefix: string) => str.startsWith(prefix),
|
'starts-with?': (str: string, prefix: string) => String(str ?? '').startsWith(prefix),
|
||||||
'ends-with?': (str: string, suffix: string) => str.endsWith(suffix),
|
'ends-with?': (str: string, suffix: string) => String(str ?? '').endsWith(suffix),
|
||||||
'contains?': (str: string, substr: string) => str.includes(substr),
|
'contains?': (str: string, substr: string) => String(str ?? '').includes(substr),
|
||||||
'empty?': (str: string) => str.length === 0,
|
'empty?': (str: string) => String(str ?? '').length === 0,
|
||||||
|
|
||||||
// inspection
|
// inspection
|
||||||
'index-of': (str: string, search: string) => str.indexOf(search),
|
'index-of': (str: string, search: string) => String(str ?? '').indexOf(search),
|
||||||
'last-index-of': (str: string, search: string) => str.lastIndexOf(search),
|
'last-index-of': (str: string, search: string) => String(str ?? '').lastIndexOf(search),
|
||||||
|
|
||||||
// transformations
|
// transformations
|
||||||
replace: (str: string, search: string, replacement: string) => str.replace(search, replacement),
|
replace: (str: string, search: string, replacement: string) => String(str ?? '').replace(search, replacement),
|
||||||
'replace-all': (str: string, search: string, replacement: string) => str.replaceAll(search, replacement),
|
'replace-all': (str: string, search: string, replacement: string) => String(str ?? '').replaceAll(search, replacement),
|
||||||
slice: (str: string, start: number, end?: number | null) => str.slice(start, end ?? undefined),
|
slice: (str: string, start: number, end?: number | null) => String(str ?? '').slice(start, end ?? undefined),
|
||||||
substring: (str: string, start: number, end?: number | null) => str.substring(start, end ?? undefined),
|
substring: (str: string, start: number, end?: number | null) => String(str ?? '').substring(start, end ?? undefined),
|
||||||
repeat: (str: string, count: number) => str.repeat(count),
|
repeat: (str: string, count: number) => {
|
||||||
'pad-start': (str: string, length: number, pad: string = ' ') => str.padStart(length, pad),
|
if (count < 0) throw new Error(`repeat: count must be non-negative, got ${count}`)
|
||||||
'pad-end': (str: string, length: number, pad: string = ' ') => str.padEnd(length, pad),
|
if (!Number.isInteger(count)) throw new Error(`repeat: count must be an integer, got ${count}`)
|
||||||
lines: (str: string) => str.split('\n'),
|
return String(str ?? '').repeat(count)
|
||||||
chars: (str: string) => str.split(''),
|
},
|
||||||
|
'pad-start': (str: string, length: number, pad: string = ' ') => String(str ?? '').padStart(length, pad),
|
||||||
|
'pad-end': (str: string, length: number, pad: string = ' ') => String(str ?? '').padEnd(length, pad),
|
||||||
|
capitalize: (str: string) => {
|
||||||
|
const s = String(str ?? '')
|
||||||
|
return s.charAt(0).toUpperCase() + s.slice(1).toLowerCase()
|
||||||
|
},
|
||||||
|
titlecase: (s: string) => {
|
||||||
|
return String(s ?? '')
|
||||||
|
.split(' ')
|
||||||
|
.map(str.capitalize)
|
||||||
|
.join(' ')
|
||||||
|
},
|
||||||
|
lines: (str: string) => String(str ?? '').split('\n'),
|
||||||
|
chars: (str: string) => String(str ?? '').split(''),
|
||||||
|
|
||||||
// regex
|
// regex
|
||||||
match: (str: string, regex: RegExp) => str.match(regex),
|
match: (str: string, regex: RegExp) => String(str ?? '').match(regex),
|
||||||
'test?': (str: string, regex: RegExp) => regex.test(str),
|
'test?': (str: string, regex: RegExp) => regex.test(String(str ?? '')),
|
||||||
}
|
}
|
||||||
170
src/prelude/tests/date.test.ts
Normal file
170
src/prelude/tests/date.test.ts
Normal file
|
|
@ -0,0 +1,170 @@
|
||||||
|
import { expect, describe, test } from 'bun:test'
|
||||||
|
|
||||||
|
describe('date', () => {
|
||||||
|
test('date.now returns current timestamp', () => {
|
||||||
|
expect(`date.now | number?`).toEvaluateTo(true)
|
||||||
|
|
||||||
|
expect(`(date.now) > 1577836800000`).toEvaluateTo(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('date.new creates timestamp from components', () => {
|
||||||
|
expect(`
|
||||||
|
t = date.new 2024 0 1 12 0 0 500
|
||||||
|
[
|
||||||
|
(date.year t)
|
||||||
|
(date.month t)
|
||||||
|
(date.date t)
|
||||||
|
(date.hour t)
|
||||||
|
(date.minute t)
|
||||||
|
(date.second t)
|
||||||
|
(date.ms t)
|
||||||
|
]
|
||||||
|
`).toEvaluateTo([2024, 0, 1, 12, 0, 0, 500])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('date.new with minimal arguments', () => {
|
||||||
|
expect(`
|
||||||
|
t = date.new 2024 5 15
|
||||||
|
[
|
||||||
|
(date.year t)
|
||||||
|
(date.month t)
|
||||||
|
(date.date t)
|
||||||
|
(date.hour t)
|
||||||
|
(date.minute t)
|
||||||
|
(date.second t)
|
||||||
|
(date.ms t)
|
||||||
|
]
|
||||||
|
`).toEvaluateTo([2024, 5, 15, 0, 0, 0, 0])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('date.year extracts year', () => {
|
||||||
|
expect(`
|
||||||
|
t = date.new 2024 0 1
|
||||||
|
date.year t
|
||||||
|
`).toEvaluateTo(2024)
|
||||||
|
|
||||||
|
expect(`
|
||||||
|
t = date.new 1999 11 31
|
||||||
|
date.year t
|
||||||
|
`).toEvaluateTo(1999)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('date.month extracts month (0-indexed)', () => {
|
||||||
|
// January = 0, December = 11
|
||||||
|
expect(`
|
||||||
|
jan = date.new 2024 0 1
|
||||||
|
dec = date.new 2024 11 31
|
||||||
|
[(date.month jan) (date.month dec)]
|
||||||
|
`).toEvaluateTo([0, 11])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('date.date extracts day of month', () => {
|
||||||
|
expect(`
|
||||||
|
t = date.new 2024 5 15
|
||||||
|
date.date t
|
||||||
|
`).toEvaluateTo(15)
|
||||||
|
|
||||||
|
expect(`
|
||||||
|
date.new 2024 0 1 | date.date
|
||||||
|
`).toEvaluateTo(1)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('date.hour extracts hour', () => {
|
||||||
|
expect(`
|
||||||
|
t = date.new 2024 0 1 14 30 45
|
||||||
|
date.hour t
|
||||||
|
`).toEvaluateTo(14)
|
||||||
|
|
||||||
|
expect(`
|
||||||
|
t = date.new 2024 0 1 0 0 0
|
||||||
|
date.hour t
|
||||||
|
`).toEvaluateTo(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('date.minute extracts minute', () => {
|
||||||
|
expect(`
|
||||||
|
t = date.new 2024 0 1 14 30 45
|
||||||
|
date.minute t
|
||||||
|
`).toEvaluateTo(30)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('date.second extracts second', () => {
|
||||||
|
expect(`
|
||||||
|
t = date.new 2024 0 1 14 30 45
|
||||||
|
date.second t
|
||||||
|
`).toEvaluateTo(45)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('date.ms extracts milliseconds', () => {
|
||||||
|
expect(`
|
||||||
|
t = date.new 2024 0 1 14 30 45 250
|
||||||
|
date.ms t
|
||||||
|
`).toEvaluateTo(250)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('round-trip: create and extract components', () => {
|
||||||
|
expect(`
|
||||||
|
t = date.new 2024 6 4 15 30 45 123
|
||||||
|
year = date.year t
|
||||||
|
month = date.month t
|
||||||
|
day = date.date t
|
||||||
|
hour = date.hour t
|
||||||
|
min = date.minute t
|
||||||
|
sec = date.second t
|
||||||
|
ms = date.ms t
|
||||||
|
[year month day hour min sec ms]
|
||||||
|
`).toEvaluateTo([2024, 6, 4, 15, 30, 45, 123])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('edge cases - midnight', () => {
|
||||||
|
expect(`
|
||||||
|
t = date.new 2024 0 1 0 0 0 0
|
||||||
|
[
|
||||||
|
(date.hour t)
|
||||||
|
(date.minute t)
|
||||||
|
(date.second t)
|
||||||
|
(date.ms t)
|
||||||
|
]
|
||||||
|
`).toEvaluateTo([0, 0, 0, 0])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('edge cases - end of day', () => {
|
||||||
|
expect(`
|
||||||
|
t = date.new 2024 0 1 23 59 59 999
|
||||||
|
[
|
||||||
|
(date.hour t)
|
||||||
|
(date.minute t)
|
||||||
|
(date.second t)
|
||||||
|
(date.ms t)
|
||||||
|
]
|
||||||
|
`).toEvaluateTo([23, 59, 59, 999])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('edge cases - leap year', () => {
|
||||||
|
expect(`
|
||||||
|
t = date.new 2024 1 29
|
||||||
|
[
|
||||||
|
(date.year t)
|
||||||
|
(date.month t)
|
||||||
|
(date.date t)
|
||||||
|
]
|
||||||
|
`).toEvaluateTo([2024, 1, 29])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('combining date functions with arithmetic', () => {
|
||||||
|
expect(`
|
||||||
|
t = date.new 2024 5 15 10 30 0
|
||||||
|
next-hour = date.new 2024 5 15 11 30 0
|
||||||
|
(date.hour next-hour) - (date.hour t)
|
||||||
|
`).toEvaluateTo(1)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('using date.now in calculations', () => {
|
||||||
|
// Check that date.now is in the past compared to a future timestamp
|
||||||
|
expect(`
|
||||||
|
now = (date.now)
|
||||||
|
future = date.new 2030 0 1
|
||||||
|
future > now
|
||||||
|
`).toEvaluateTo(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
329
src/prelude/tests/fs.test.ts
Normal file
329
src/prelude/tests/fs.test.ts
Normal file
|
|
@ -0,0 +1,329 @@
|
||||||
|
import { expect, describe, test, beforeEach, afterEach } from 'bun:test'
|
||||||
|
import { mkdirSync, writeFileSync, rmSync, existsSync } from 'fs'
|
||||||
|
import { join, resolve } from 'path'
|
||||||
|
import { fs } from '../fs'
|
||||||
|
|
||||||
|
const TEST_DIR = resolve('./tmp/shrimp-fs-test')
|
||||||
|
const CWD = process.cwd()
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
if (existsSync(TEST_DIR)) {
|
||||||
|
rmSync(TEST_DIR, { recursive: true })
|
||||||
|
}
|
||||||
|
mkdirSync(TEST_DIR, { recursive: true })
|
||||||
|
})
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
process.chdir(CWD)
|
||||||
|
if (existsSync(TEST_DIR)) {
|
||||||
|
rmSync(TEST_DIR, { recursive: true })
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('fs - directory operations', () => {
|
||||||
|
test('fs.ls lists directory contents', () => {
|
||||||
|
writeFileSync(join(TEST_DIR, 'file1.txt'), 'content1')
|
||||||
|
writeFileSync(join(TEST_DIR, 'file2.txt'), 'content2')
|
||||||
|
|
||||||
|
const result = fs.ls(TEST_DIR)
|
||||||
|
expect(result).toContain('file1.txt')
|
||||||
|
expect(result).toContain('file2.txt')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('fs.mkdir creates directory', () => {
|
||||||
|
const newDir = join(TEST_DIR, 'newdir')
|
||||||
|
fs.mkdir(newDir)
|
||||||
|
expect(existsSync(newDir)).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('fs.rmdir removes empty directory', () => {
|
||||||
|
const dir = join(TEST_DIR, 'toremove')
|
||||||
|
mkdirSync(dir)
|
||||||
|
fs.rmdir(dir)
|
||||||
|
expect(existsSync(dir)).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('fs.pwd returns current working directory', () => {
|
||||||
|
const result = fs.pwd()
|
||||||
|
expect(typeof result).toBe('string')
|
||||||
|
expect(result.length).toBeGreaterThan(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('fs.cd changes current working directory', () => {
|
||||||
|
const originalCwd = process.cwd()
|
||||||
|
fs.cd(TEST_DIR)
|
||||||
|
expect(process.cwd()).toBe(TEST_DIR)
|
||||||
|
process.chdir(originalCwd) // restore
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('fs - reading', () => {
|
||||||
|
test('fs.read reads file contents as string', () => {
|
||||||
|
const file = join(TEST_DIR, 'test.txt')
|
||||||
|
writeFileSync(file, 'hello world')
|
||||||
|
|
||||||
|
const result = fs.read(file)
|
||||||
|
expect(result).toBe('hello world')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('fs.cat is alias for fs.read', () => {
|
||||||
|
const file = join(TEST_DIR, 'test.txt')
|
||||||
|
writeFileSync(file, 'hello world')
|
||||||
|
|
||||||
|
const result = fs.cat(file)
|
||||||
|
expect(result).toBe('hello world')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('fs.read-bytes reads file as buffer', () => {
|
||||||
|
const file = join(TEST_DIR, 'test.bin')
|
||||||
|
writeFileSync(file, Buffer.from([1, 2, 3, 4]))
|
||||||
|
|
||||||
|
const result = fs['read-bytes'](file)
|
||||||
|
expect(result).toBeInstanceOf(Array)
|
||||||
|
expect(result).toEqual([1, 2, 3, 4])
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('fs - writing', () => {
|
||||||
|
test('fs.write writes string to file', async () => {
|
||||||
|
const file = join(TEST_DIR, 'output.txt')
|
||||||
|
fs.write(file, 'test content')
|
||||||
|
|
||||||
|
const content = Bun.file(file).text()
|
||||||
|
expect(await content).toBe('test content')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('fs.append appends to existing file', async () => {
|
||||||
|
const file = join(TEST_DIR, 'append.txt')
|
||||||
|
writeFileSync(file, 'first')
|
||||||
|
fs.append(file, ' second')
|
||||||
|
|
||||||
|
const content = await Bun.file(file).text()
|
||||||
|
expect(content).toBe('first second')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('fs - file operations', () => {
|
||||||
|
test('fs.rm removes file', () => {
|
||||||
|
const file = join(TEST_DIR, 'remove.txt')
|
||||||
|
writeFileSync(file, 'content')
|
||||||
|
|
||||||
|
fs.rm(file)
|
||||||
|
expect(existsSync(file)).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('fs.delete is alias for fs.rm', () => {
|
||||||
|
const file = join(TEST_DIR, 'delete.txt')
|
||||||
|
writeFileSync(file, 'content')
|
||||||
|
|
||||||
|
fs.delete(file)
|
||||||
|
expect(existsSync(file)).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('fs.copy copies file', async () => {
|
||||||
|
const src = join(TEST_DIR, 'source.txt')
|
||||||
|
const dest = join(TEST_DIR, 'dest.txt')
|
||||||
|
writeFileSync(src, 'content')
|
||||||
|
|
||||||
|
fs.copy(src, dest)
|
||||||
|
expect(await Bun.file(dest).text()).toBe('content')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('fs.cp is alias for fs.copy', async () => {
|
||||||
|
const src = join(TEST_DIR, 'source2.txt')
|
||||||
|
const dest = join(TEST_DIR, 'dest2.txt')
|
||||||
|
writeFileSync(src, 'content')
|
||||||
|
|
||||||
|
fs.cp(src, dest)
|
||||||
|
expect(await Bun.file(dest).text()).toBe('content')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('fs.move moves file', async () => {
|
||||||
|
const src = join(TEST_DIR, 'source.txt')
|
||||||
|
const dest = join(TEST_DIR, 'moved.txt')
|
||||||
|
writeFileSync(src, 'content')
|
||||||
|
|
||||||
|
fs.move(src, dest)
|
||||||
|
expect(existsSync(src)).toBe(false)
|
||||||
|
expect(await Bun.file(dest).text()).toBe('content')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('fs.mv is alias for fs.move', async () => {
|
||||||
|
const src = join(TEST_DIR, 'source2.txt')
|
||||||
|
const dest = join(TEST_DIR, 'moved2.txt')
|
||||||
|
writeFileSync(src, 'content')
|
||||||
|
|
||||||
|
fs.mv(src, dest)
|
||||||
|
expect(existsSync(src)).toBe(false)
|
||||||
|
expect(await Bun.file(dest).text()).toBe('content')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('fs - path operations', () => {
|
||||||
|
test('fs.basename extracts filename from path', () => {
|
||||||
|
expect(fs.basename('/path/to/file.txt')).toBe('file.txt')
|
||||||
|
expect(fs.basename('/path/to/dir/')).toBe('dir')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('fs.dirname extracts directory from path', () => {
|
||||||
|
expect(fs.dirname('/path/to/file.txt')).toBe('/path/to')
|
||||||
|
expect(fs.dirname('/path/to/dir/')).toBe('/path/to')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('fs.extname extracts file extension', () => {
|
||||||
|
expect(fs.extname('file.txt')).toBe('.txt')
|
||||||
|
expect(fs.extname('file.tar.gz')).toBe('.gz')
|
||||||
|
expect(fs.extname('noext')).toBe('')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('fs.join joins path segments', () => {
|
||||||
|
expect(fs.join('path', 'to', 'file.txt')).toBe('path/to/file.txt')
|
||||||
|
expect(fs.join('/absolute', 'path')).toBe('/absolute/path')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('fs.resolve resolves to absolute path', () => {
|
||||||
|
const result = fs.resolve('relative', 'path')
|
||||||
|
expect(result.startsWith('/')).toBe(true)
|
||||||
|
expect(result).toContain('relative')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('fs - file info', () => {
|
||||||
|
test('fs.stat returns file stats', () => {
|
||||||
|
const file = join(TEST_DIR, 'stat.txt')
|
||||||
|
writeFileSync(file, 'content')
|
||||||
|
|
||||||
|
const stats = fs.stat(file)
|
||||||
|
expect(stats).toHaveProperty('size')
|
||||||
|
expect(stats).toHaveProperty('mtime')
|
||||||
|
expect(stats.size).toBe(7) // 'content' is 7 bytes
|
||||||
|
})
|
||||||
|
|
||||||
|
test('fs.exists? checks if path exists', () => {
|
||||||
|
const file = join(TEST_DIR, 'exists.txt')
|
||||||
|
expect(fs['exists?'](file)).toBe(false)
|
||||||
|
|
||||||
|
writeFileSync(file, 'content')
|
||||||
|
expect(fs['exists?'](file)).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('fs.file? checks if path is a file', () => {
|
||||||
|
const file = join(TEST_DIR, 'isfile.txt')
|
||||||
|
writeFileSync(file, 'content')
|
||||||
|
|
||||||
|
expect(fs['file?'](file)).toBe(true)
|
||||||
|
expect(fs['file?'](TEST_DIR)).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('fs.dir? checks if path is a directory', () => {
|
||||||
|
const dir = join(TEST_DIR, 'isdir')
|
||||||
|
mkdirSync(dir)
|
||||||
|
|
||||||
|
expect(fs['dir?'](dir)).toBe(true)
|
||||||
|
expect(fs['dir?'](join(TEST_DIR, 'isfile.txt'))).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('fs.symlink? checks if path is a symbolic link', () => {
|
||||||
|
const file = join(TEST_DIR, 'target.txt')
|
||||||
|
const link = join(TEST_DIR, 'link.txt')
|
||||||
|
writeFileSync(file, 'content')
|
||||||
|
|
||||||
|
fs.symlink(file, link)
|
||||||
|
expect(fs['symlink?'](link)).toBe(true)
|
||||||
|
expect(fs['symlink?'](file)).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('fs.exec? checks if file is executable', () => {
|
||||||
|
const file = join(TEST_DIR, 'script.sh')
|
||||||
|
writeFileSync(file, '#!/bin/bash\necho hello')
|
||||||
|
|
||||||
|
fs.chmod(file, 0o755)
|
||||||
|
expect(fs['exec?'](file)).toBe(true)
|
||||||
|
|
||||||
|
fs.chmod(file, 0o644)
|
||||||
|
expect(fs['exec?'](file)).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('fs.size returns file size in bytes', () => {
|
||||||
|
const file = join(TEST_DIR, 'sizeme.txt')
|
||||||
|
writeFileSync(file, 'content')
|
||||||
|
|
||||||
|
expect(fs.size(file)).toBe(7) // 'content' is 7 bytes
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('fs - permissions', () => {
|
||||||
|
test('fs.chmod changes file permissions with octal number', () => {
|
||||||
|
const file = join(TEST_DIR, 'perms.txt')
|
||||||
|
writeFileSync(file, 'content')
|
||||||
|
|
||||||
|
fs.chmod(file, 0o755)
|
||||||
|
expect(fs['exec?'](file)).toBe(true)
|
||||||
|
|
||||||
|
fs.chmod(file, 0o644)
|
||||||
|
expect(fs['exec?'](file)).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('fs.chmod changes file permissions with string', () => {
|
||||||
|
const file = join(TEST_DIR, 'perms2.txt')
|
||||||
|
writeFileSync(file, 'content')
|
||||||
|
|
||||||
|
fs.chmod(file, '755')
|
||||||
|
expect(fs['exec?'](file)).toBe(true)
|
||||||
|
|
||||||
|
fs.chmod(file, '644')
|
||||||
|
expect(fs['exec?'](file)).toBe(false)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('fs - symlinks', () => {
|
||||||
|
test('fs.symlink creates symbolic link', () => {
|
||||||
|
const target = join(TEST_DIR, 'target.txt')
|
||||||
|
const link = join(TEST_DIR, 'link.txt')
|
||||||
|
writeFileSync(target, 'content')
|
||||||
|
|
||||||
|
fs.symlink(target, link)
|
||||||
|
expect(fs['symlink?'](link)).toBe(true)
|
||||||
|
expect(fs.read(link)).toBe('content')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('fs.readlink reads symbolic link target', () => {
|
||||||
|
const target = join(TEST_DIR, 'target.txt')
|
||||||
|
const link = join(TEST_DIR, 'link.txt')
|
||||||
|
writeFileSync(target, 'content')
|
||||||
|
|
||||||
|
fs.symlink(target, link)
|
||||||
|
expect(fs.readlink(link)).toBe(target)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('fs - other', () => {
|
||||||
|
test('fs.glob matches file patterns', () => {
|
||||||
|
writeFileSync(join(TEST_DIR, 'file1.txt'), '')
|
||||||
|
writeFileSync(join(TEST_DIR, 'file2.txt'), '')
|
||||||
|
writeFileSync(join(TEST_DIR, 'file3.md'), '')
|
||||||
|
|
||||||
|
const result = fs.glob(join(TEST_DIR, '*.txt'))
|
||||||
|
expect(result).toHaveLength(2)
|
||||||
|
expect(result).toContain(join(TEST_DIR, 'file1.txt'))
|
||||||
|
expect(result).toContain(join(TEST_DIR, 'file2.txt'))
|
||||||
|
})
|
||||||
|
|
||||||
|
test('fs.watch calls callback on file change', async () => {
|
||||||
|
const file = join(TEST_DIR, 'watch.txt')
|
||||||
|
writeFileSync(file, 'initial')
|
||||||
|
|
||||||
|
let called = false
|
||||||
|
const watcher = fs.watch(file, () => { called = true })
|
||||||
|
|
||||||
|
// Trigger change
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 100))
|
||||||
|
writeFileSync(file, 'updated')
|
||||||
|
|
||||||
|
// Wait for watcher
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 500))
|
||||||
|
|
||||||
|
expect(called).toBe(true)
|
||||||
|
watcher.close?.()
|
||||||
|
})
|
||||||
|
})
|
||||||
139
src/prelude/tests/info.test.ts
Normal file
139
src/prelude/tests/info.test.ts
Normal file
|
|
@ -0,0 +1,139 @@
|
||||||
|
import { expect, describe, test } from 'bun:test'
|
||||||
|
|
||||||
|
describe('var and var?', () => {
|
||||||
|
test('var? checks if a variable exists', async () => {
|
||||||
|
await expect(`var? 'nada'`).toEvaluateTo(false)
|
||||||
|
await expect(`var? 'info'`).toEvaluateTo(false)
|
||||||
|
await expect(`abc = abc; var? 'abc'`).toEvaluateTo(true)
|
||||||
|
await expect(`var? 'var?'`).toEvaluateTo(true)
|
||||||
|
|
||||||
|
await expect(`var? 'dict'`).toEvaluateTo(true)
|
||||||
|
await expect(`var? dict`).toEvaluateTo(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('var returns a value or null', async () => {
|
||||||
|
await expect(`var 'nada'`).toEvaluateTo(null)
|
||||||
|
await expect(`var nada`).toEvaluateTo(null)
|
||||||
|
await expect(`var 'info'`).toEvaluateTo(null)
|
||||||
|
await expect(`abc = my-string; var 'abc'`).toEvaluateTo('my-string')
|
||||||
|
await expect(`abc = my-string; var abc`).toEvaluateTo(null)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('type predicates', () => {
|
||||||
|
test('string? checks for string type', async () => {
|
||||||
|
await expect(`string? 'hello'`).toEvaluateTo(true)
|
||||||
|
await expect(`string? 42`).toEvaluateTo(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('number? checks for number type', async () => {
|
||||||
|
await expect(`number? 42`).toEvaluateTo(true)
|
||||||
|
await expect(`number? 'hello'`).toEvaluateTo(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('boolean? checks for boolean type', async () => {
|
||||||
|
await expect(`boolean? true`).toEvaluateTo(true)
|
||||||
|
await expect(`boolean? 42`).toEvaluateTo(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('array? checks for array type', async () => {
|
||||||
|
await expect(`array? [1 2 3]`).toEvaluateTo(true)
|
||||||
|
await expect(`array? 42`).toEvaluateTo(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('dict? checks for dict type', async () => {
|
||||||
|
await expect(`dict? [a=1]`).toEvaluateTo(true)
|
||||||
|
await expect(`dict? []`).toEvaluateTo(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('null? checks for null type', async () => {
|
||||||
|
await expect(`null? null`).toEvaluateTo(true)
|
||||||
|
await expect(`null? 42`).toEvaluateTo(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('some? checks for non-null', async () => {
|
||||||
|
await expect(`some? 42`).toEvaluateTo(true)
|
||||||
|
await expect(`some? null`).toEvaluateTo(false)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('introspection', () => {
|
||||||
|
test('type returns proper types', async () => {
|
||||||
|
await expect(`type 'hello'`).toEvaluateTo('string')
|
||||||
|
await expect(`type 42`).toEvaluateTo('number')
|
||||||
|
await expect(`type true`).toEvaluateTo('boolean')
|
||||||
|
await expect(`type false`).toEvaluateTo('boolean')
|
||||||
|
await expect(`type null`).toEvaluateTo('null')
|
||||||
|
await expect(`type [1 2 3]`).toEvaluateTo('array')
|
||||||
|
await expect(`type [a=1 b=2]`).toEvaluateTo('dict')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('inspect formats values', async () => {
|
||||||
|
await expect(`inspect 'hello'`).toEvaluateTo("\u001b[32m'hello\u001b[32m'\u001b[0m")
|
||||||
|
})
|
||||||
|
|
||||||
|
test('describe describes values', async () => {
|
||||||
|
await expect(`describe 'hello'`).toEvaluateTo("#<string: \u001b[32m'hello\u001b[32m'\u001b[0m>")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('environment', () => {
|
||||||
|
test('args is an array', async () => {
|
||||||
|
await expect(`array? $.args`).toEvaluateTo(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('args can be accessed', async () => {
|
||||||
|
await expect(`type $.args`).toEvaluateTo('array')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('argv includes more than just the args', async () => {
|
||||||
|
await expect(`list.first $.argv | str.ends-with? 'shrimp.test.ts'`).toEvaluateTo(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('ref', () => {
|
||||||
|
expect(`rnd = do x: true end; rnd | type`).toEvaluateTo('boolean')
|
||||||
|
expect(`rnd = do x: true end; ref rnd | type`).toEvaluateTo('function')
|
||||||
|
|
||||||
|
expect(`math.random | type`).toEvaluateTo('number')
|
||||||
|
expect(`ref math.random | type`).toEvaluateTo('native')
|
||||||
|
|
||||||
|
expect(`rnd = math.random; rnd | type`).toEvaluateTo('number')
|
||||||
|
expect(`rnd = ref math.random; rnd | type`).toEvaluateTo('number')
|
||||||
|
expect(`rnd = ref math.random; ref rnd | type`).toEvaluateTo('native')
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('$ global dictionary', () => {
|
||||||
|
test('$.args is an array', async () => {
|
||||||
|
await expect(`$.args | array?`).toEvaluateTo(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('$.args can be accessed', async () => {
|
||||||
|
await expect(`$.args | type`).toEvaluateTo('array')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('$.script.name is a string', async () => {
|
||||||
|
await expect(`$.script.name | string?`).toEvaluateTo(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('$.script.path is a string', async () => {
|
||||||
|
await expect(`$.script.path | string?`).toEvaluateTo(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('$.env is a dict', async () => {
|
||||||
|
await expect(`$.env | dict?`).toEvaluateTo(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('$.pid is a number', async () => {
|
||||||
|
await expect(`$.pid | number?`).toEvaluateTo(true)
|
||||||
|
await expect(`$.pid > 0`).toEvaluateTo(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('$.cwd is a string', async () => {
|
||||||
|
await expect(`$.cwd | string?`).toEvaluateTo(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('$.cwd returns current working directory', async () => {
|
||||||
|
await expect(`$.cwd`).toEvaluateTo(process.cwd())
|
||||||
|
})
|
||||||
|
})
|
||||||
84
src/prelude/tests/json.test.ts
Normal file
84
src/prelude/tests/json.test.ts
Normal file
|
|
@ -0,0 +1,84 @@
|
||||||
|
import { expect, describe, test } from 'bun:test'
|
||||||
|
|
||||||
|
describe('json', () => {
|
||||||
|
test('json.decode', () => {
|
||||||
|
expect(`json.decode '[1,2,3]'`).toEvaluateTo([1, 2, 3])
|
||||||
|
expect(`json.decode '"heya"'`).toEvaluateTo('heya')
|
||||||
|
expect(`json.decode '[true, false, null]'`).toEvaluateTo([true, false, null])
|
||||||
|
expect(`json.decode '{"a": true, "b": false, "c": "yeah"}'`).toEvaluateTo({ a: true, b: false, c: "yeah" })
|
||||||
|
})
|
||||||
|
|
||||||
|
test('json.encode', () => {
|
||||||
|
expect(`json.encode [1 2 3]`).toEvaluateTo('[1,2,3]')
|
||||||
|
expect(`json.encode 'heya'`).toEvaluateTo('"heya"')
|
||||||
|
expect(`json.encode [true false null]`).toEvaluateTo('[true,false,null]')
|
||||||
|
expect(`json.encode [a=true b=false c='yeah'] | json.decode`).toEvaluateTo({ a: true, b: false, c: "yeah" })
|
||||||
|
})
|
||||||
|
|
||||||
|
test('edge cases - empty structures', () => {
|
||||||
|
expect(`json.decode '[]'`).toEvaluateTo([])
|
||||||
|
expect(`json.decode '{}'`).toEvaluateTo({})
|
||||||
|
expect(`json.encode []`).toEvaluateTo('[]')
|
||||||
|
expect(`json.encode [=]`).toEvaluateTo('{}')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('edge cases - special characters in strings', () => {
|
||||||
|
expect(`json.decode '"hello\\\\nworld"'`).toEvaluateTo('hello\nworld')
|
||||||
|
expect(`json.decode '"tab\\\\there"'`).toEvaluateTo('tab\there')
|
||||||
|
expect(`json.decode '"forward/slash"'`).toEvaluateTo('forward/slash')
|
||||||
|
expect(`json.decode '"with\\\\\\\\backslash"'`).toEvaluateTo('with\\backslash')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('numbers - integers and floats', () => {
|
||||||
|
expect(`json.decode '42'`).toEvaluateTo(42)
|
||||||
|
expect(`json.decode '0'`).toEvaluateTo(0)
|
||||||
|
expect(`json.decode '-17'`).toEvaluateTo(-17)
|
||||||
|
expect(`json.decode '3.14159'`).toEvaluateTo(3.14159)
|
||||||
|
expect(`json.decode '-0.5'`).toEvaluateTo(-0.5)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('numbers - scientific notation', () => {
|
||||||
|
expect(`json.decode '1e10'`).toEvaluateTo(1e10)
|
||||||
|
expect(`json.decode '2.5e-3'`).toEvaluateTo(2.5e-3)
|
||||||
|
expect(`json.decode '1.23E+5'`).toEvaluateTo(1.23e5)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('unicode - emoji and special characters', () => {
|
||||||
|
expect(`json.decode '"hello 👋"'`).toEvaluateTo('hello 👋')
|
||||||
|
expect(`json.decode '"🎉🚀✨"'`).toEvaluateTo('🎉🚀✨')
|
||||||
|
expect(`json.encode '你好'`).toEvaluateTo('"你好"')
|
||||||
|
expect(`json.encode 'café'`).toEvaluateTo('"café"')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('nested structures - arrays', () => {
|
||||||
|
expect(`json.decode '[[1,2],[3,4],[5,6]]'`).toEvaluateTo([[1, 2], [3, 4], [5, 6]])
|
||||||
|
expect(`json.decode '[1,[2,[3,[4]]]]'`).toEvaluateTo([1, [2, [3, [4]]]])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('nested structures - objects', () => {
|
||||||
|
expect(`json.decode '{"user":{"name":"Alice","age":30}}'`).toEvaluateTo({
|
||||||
|
user: { name: 'Alice', age: 30 }
|
||||||
|
})
|
||||||
|
expect(`json.decode '{"a":{"b":{"c":"deep"}}}'`).toEvaluateTo({
|
||||||
|
a: { b: { c: 'deep' } }
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
test('nested structures - mixed arrays and objects', () => {
|
||||||
|
expect(`json.decode '[{"id":1,"tags":["a","b"]},{"id":2,"tags":["c"]}]'`).toEvaluateTo([
|
||||||
|
{ id: 1, tags: ['a', 'b'] },
|
||||||
|
{ id: 2, tags: ['c'] }
|
||||||
|
])
|
||||||
|
expect(`json.decode '{"items":[1,2,3],"meta":{"count":3}}'`).toEvaluateTo({
|
||||||
|
items: [1, 2, 3],
|
||||||
|
meta: { count: 3 }
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
test('error handling - invalid json', () => {
|
||||||
|
expect(`json.decode '{invalid}'`).toFailEvaluation()
|
||||||
|
expect(`json.decode '[1,2,3'`).toFailEvaluation()
|
||||||
|
expect(`json.decode 'undefined'`).toFailEvaluation()
|
||||||
|
expect(`json.decode ''`).toFailEvaluation()
|
||||||
|
})
|
||||||
|
})
|
||||||
41
src/prelude/tests/load.test.ts
Normal file
41
src/prelude/tests/load.test.ts
Normal file
|
|
@ -0,0 +1,41 @@
|
||||||
|
import { expect, describe, test } from 'bun:test'
|
||||||
|
|
||||||
|
describe('loading a file', () => {
|
||||||
|
test(`imports all a file's functions`, async () => {
|
||||||
|
expect(`
|
||||||
|
math = load ./src/prelude/tests/math.sh
|
||||||
|
math.double 4
|
||||||
|
`).toEvaluateTo(8)
|
||||||
|
|
||||||
|
expect(`
|
||||||
|
math = load ./src/prelude/tests/math.sh
|
||||||
|
math.double (math.double 4)
|
||||||
|
`).toEvaluateTo(16)
|
||||||
|
|
||||||
|
expect(`
|
||||||
|
math = load ./src/prelude/tests/math.sh
|
||||||
|
dbl = ref math.double
|
||||||
|
dbl (dbl 2)
|
||||||
|
`).toEvaluateTo(8)
|
||||||
|
|
||||||
|
expect(`
|
||||||
|
math = load ./src/prelude/tests/math.sh
|
||||||
|
math.pi
|
||||||
|
`).toEvaluateTo(3.14)
|
||||||
|
|
||||||
|
expect(`
|
||||||
|
math = load ./src/prelude/tests/math.sh
|
||||||
|
math | at 🥧
|
||||||
|
`).toEvaluateTo(3.14159265359)
|
||||||
|
|
||||||
|
expect(`
|
||||||
|
math = load ./src/prelude/tests/math.sh
|
||||||
|
math.🥧
|
||||||
|
`).toEvaluateTo(3.14159265359)
|
||||||
|
|
||||||
|
expect(`
|
||||||
|
math = load ./src/prelude/tests/math.sh
|
||||||
|
math.add1 5
|
||||||
|
`).toEvaluateTo(6)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
@ -1,42 +0,0 @@
|
||||||
import { expect, describe, test } from 'bun:test'
|
|
||||||
import { globals } from '#prelude'
|
|
||||||
|
|
||||||
describe('use', () => {
|
|
||||||
test(`imports all a file's functions`, async () => {
|
|
||||||
expect(`
|
|
||||||
math = load ./src/prelude/tests/math
|
|
||||||
math.double 4
|
|
||||||
`).toEvaluateTo(8, globals)
|
|
||||||
|
|
||||||
expect(`
|
|
||||||
math = load ./src/prelude/tests/math
|
|
||||||
math.double (math.double 4)
|
|
||||||
`).toEvaluateTo(16, globals)
|
|
||||||
|
|
||||||
expect(`
|
|
||||||
math = load ./src/prelude/tests/math
|
|
||||||
dbl = math.double
|
|
||||||
dbl (dbl 2)
|
|
||||||
`).toEvaluateTo(8, globals)
|
|
||||||
|
|
||||||
expect(`
|
|
||||||
math = load ./src/prelude/tests/math
|
|
||||||
math.pi
|
|
||||||
`).toEvaluateTo(3.14, globals)
|
|
||||||
|
|
||||||
expect(`
|
|
||||||
math = load ./src/prelude/tests/math
|
|
||||||
math | at 🥧
|
|
||||||
`).toEvaluateTo(3.14159265359, globals)
|
|
||||||
|
|
||||||
expect(`
|
|
||||||
math = load ./src/prelude/tests/math
|
|
||||||
math.🥧
|
|
||||||
`).toEvaluateTo(3.14159265359, globals)
|
|
||||||
|
|
||||||
expect(`
|
|
||||||
math = load ./src/prelude/tests/math
|
|
||||||
math.add1 5
|
|
||||||
`).toEvaluateTo(6, globals)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
@ -1,245 +1,210 @@
|
||||||
import { expect, describe, test } from 'bun:test'
|
import { expect, describe, test } from 'bun:test'
|
||||||
import { globals } from '#prelude'
|
|
||||||
|
|
||||||
describe('string operations', () => {
|
describe('string operations', () => {
|
||||||
test('to-upper converts to uppercase', async () => {
|
test('to-upper converts to uppercase', async () => {
|
||||||
await expect(`str.to-upper 'hello'`).toEvaluateTo('HELLO', globals)
|
await expect(`str.to-upper 'hello'`).toEvaluateTo('HELLO')
|
||||||
await expect(`str.to-upper 'Hello World!'`).toEvaluateTo('HELLO WORLD!', globals)
|
await expect(`str.to-upper 'Hello World!'`).toEvaluateTo('HELLO WORLD!')
|
||||||
})
|
})
|
||||||
|
|
||||||
test('to-lower converts to lowercase', async () => {
|
test('to-lower converts to lowercase', async () => {
|
||||||
await expect(`str.to-lower 'HELLO'`).toEvaluateTo('hello', globals)
|
await expect(`str.to-lower 'HELLO'`).toEvaluateTo('hello')
|
||||||
await expect(`str.to-lower 'Hello World!'`).toEvaluateTo('hello world!', globals)
|
await expect(`str.to-lower 'Hello World!'`).toEvaluateTo('hello world!')
|
||||||
})
|
})
|
||||||
|
|
||||||
test('trim removes whitespace', async () => {
|
test('trim removes whitespace', async () => {
|
||||||
await expect(`str.trim ' hello '`).toEvaluateTo('hello', globals)
|
await expect(`str.trim ' hello '`).toEvaluateTo('hello')
|
||||||
await expect(`str.trim '\\n\\thello\\t\\n'`).toEvaluateTo('hello', globals)
|
await expect(`str.trim '\\n\\thello\\t\\n'`).toEvaluateTo('hello')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('capitalize makes first char uppercase', async () => {
|
||||||
|
await expect(`str.capitalize 'hello'`).toEvaluateTo('Hello')
|
||||||
|
await expect(`str.capitalize 'HELLO'`).toEvaluateTo('Hello')
|
||||||
|
await expect(`str.capitalize 'hello world'`).toEvaluateTo('Hello world')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('titlecase capitalizes each word', async () => {
|
||||||
|
await expect(`str.titlecase 'hello world'`).toEvaluateTo('Hello World')
|
||||||
|
await expect(`str.titlecase 'HELLO WORLD'`).toEvaluateTo('Hello World')
|
||||||
|
await expect(`str.titlecase 'the quick brown fox'`).toEvaluateTo('The Quick Brown Fox')
|
||||||
})
|
})
|
||||||
|
|
||||||
test('split divides string by separator', async () => {
|
test('split divides string by separator', async () => {
|
||||||
await expect(`str.split 'a,b,c' ','`).toEvaluateTo(['a', 'b', 'c'], globals)
|
await expect(`str.split 'a,b,c' ','`).toEvaluateTo(['a', 'b', 'c'])
|
||||||
await expect(`str.split 'hello' ''`).toEvaluateTo(['h', 'e', 'l', 'l', 'o'], globals)
|
await expect(`str.split 'hello' ''`).toEvaluateTo(['h', 'e', 'l', 'l', 'o'])
|
||||||
})
|
})
|
||||||
|
|
||||||
test('split with comma separator', async () => {
|
test('split with comma separator', async () => {
|
||||||
await expect(`str.split 'a,b,c' ','`).toEvaluateTo(['a', 'b', 'c'], globals)
|
await expect(`str.split 'a,b,c' ','`).toEvaluateTo(['a', 'b', 'c'])
|
||||||
})
|
})
|
||||||
|
|
||||||
test('join combines array elements', async () => {
|
test('join combines array elements', async () => {
|
||||||
await expect(`str.join ['a' 'b' 'c'] '-'`).toEvaluateTo('a-b-c', globals)
|
await expect(`str.join ['a' 'b' 'c'] '-'`).toEvaluateTo('a-b-c')
|
||||||
await expect(`str.join ['hello' 'world'] ' '`).toEvaluateTo('hello world', globals)
|
await expect(`str.join ['hello' 'world'] ' '`).toEvaluateTo('hello world')
|
||||||
})
|
})
|
||||||
|
|
||||||
test('join with comma separator', async () => {
|
test('join with comma separator', async () => {
|
||||||
await expect(`str.join ['a' 'b' 'c'] ','`).toEvaluateTo('a,b,c', globals)
|
await expect(`str.join ['a' 'b' 'c'] ','`).toEvaluateTo('a,b,c')
|
||||||
})
|
})
|
||||||
|
|
||||||
test('starts-with? checks string prefix', async () => {
|
test('starts-with? checks string prefix', async () => {
|
||||||
await expect(`str.starts-with? 'hello' 'hel'`).toEvaluateTo(true, globals)
|
await expect(`str.starts-with? 'hello' 'hel'`).toEvaluateTo(true)
|
||||||
await expect(`str.starts-with? 'hello' 'bye'`).toEvaluateTo(false, globals)
|
await expect(`str.starts-with? 'hello' 'bye'`).toEvaluateTo(false)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('ends-with? checks string suffix', async () => {
|
test('ends-with? checks string suffix', async () => {
|
||||||
await expect(`str.ends-with? 'hello' 'lo'`).toEvaluateTo(true, globals)
|
await expect(`str.ends-with? 'hello' 'lo'`).toEvaluateTo(true)
|
||||||
await expect(`str.ends-with? 'hello' 'he'`).toEvaluateTo(false, globals)
|
await expect(`str.ends-with? 'hello' 'he'`).toEvaluateTo(false)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('contains? checks for substring', async () => {
|
test('contains? checks for substring', async () => {
|
||||||
await expect(`str.contains? 'hello world' 'o w'`).toEvaluateTo(true, globals)
|
await expect(`str.contains? 'hello world' 'o w'`).toEvaluateTo(true)
|
||||||
await expect(`str.contains? 'hello' 'bye'`).toEvaluateTo(false, globals)
|
await expect(`str.contains? 'hello' 'bye'`).toEvaluateTo(false)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('empty? checks if string is empty', async () => {
|
test('empty? checks if string is empty', async () => {
|
||||||
await expect(`str.empty? ''`).toEvaluateTo(true, globals)
|
await expect(`str.empty? ''`).toEvaluateTo(true)
|
||||||
await expect(`str.empty? 'hello'`).toEvaluateTo(false, globals)
|
await expect(`str.empty? 'hello'`).toEvaluateTo(false)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('replace replaces first occurrence', async () => {
|
test('replace replaces first occurrence', async () => {
|
||||||
await expect(`str.replace 'hello hello' 'hello' 'hi'`).toEvaluateTo('hi hello', globals)
|
await expect(`str.replace 'hello hello' 'hello' 'hi'`).toEvaluateTo('hi hello')
|
||||||
})
|
})
|
||||||
|
|
||||||
test('replace-all replaces all occurrences', async () => {
|
test('replace-all replaces all occurrences', async () => {
|
||||||
await expect(`str.replace-all 'hello hello' 'hello' 'hi'`).toEvaluateTo('hi hi', globals)
|
await expect(`str.replace-all 'hello hello' 'hello' 'hi'`).toEvaluateTo('hi hi')
|
||||||
})
|
})
|
||||||
|
|
||||||
test('slice extracts substring', async () => {
|
test('slice extracts substring', async () => {
|
||||||
await expect(`str.slice 'hello' 1 3`).toEvaluateTo('el', globals)
|
await expect(`str.slice 'hello' 1 3`).toEvaluateTo('el')
|
||||||
await expect(`str.slice 'hello' 2 null`).toEvaluateTo('llo', globals)
|
await expect(`str.slice 'hello' 2 null`).toEvaluateTo('llo')
|
||||||
|
await expect(`str.slice 'hello' 2`).toEvaluateTo('llo')
|
||||||
})
|
})
|
||||||
|
|
||||||
test('repeat repeats string', async () => {
|
test('repeat repeats string', async () => {
|
||||||
await expect(`str.repeat 'ha' 3`).toEvaluateTo('hahaha', globals)
|
await expect(`str.repeat 'ha' 3`).toEvaluateTo('hahaha')
|
||||||
})
|
})
|
||||||
|
|
||||||
test('pad-start pads beginning', async () => {
|
test('pad-start pads beginning', async () => {
|
||||||
await expect(`str.pad-start '5' 3 '0'`).toEvaluateTo('005', globals)
|
await expect(`str.pad-start '5' 3 '0'`).toEvaluateTo('005')
|
||||||
})
|
})
|
||||||
|
|
||||||
test('pad-end pads end', async () => {
|
test('pad-end pads end', async () => {
|
||||||
await expect(`str.pad-end '5' 3 '0'`).toEvaluateTo('500', globals)
|
await expect(`str.pad-end '5' 3 '0'`).toEvaluateTo('500')
|
||||||
})
|
})
|
||||||
|
|
||||||
test('lines splits by newlines', async () => {
|
test('lines splits by newlines', async () => {
|
||||||
await expect(`str.lines 'a\\nb\\nc'`).toEvaluateTo(['a', 'b', 'c'], globals)
|
await expect(`str.lines 'a\\nb\\nc'`).toEvaluateTo(['a', 'b', 'c'])
|
||||||
})
|
})
|
||||||
|
|
||||||
test('chars splits into characters', async () => {
|
test('chars splits into characters', async () => {
|
||||||
await expect(`str.chars 'abc'`).toEvaluateTo(['a', 'b', 'c'], globals)
|
await expect(`str.chars 'abc'`).toEvaluateTo(['a', 'b', 'c'])
|
||||||
})
|
})
|
||||||
|
|
||||||
test('index-of finds substring position', async () => {
|
test('index-of finds substring position', async () => {
|
||||||
await expect(`str.index-of 'hello world' 'world'`).toEvaluateTo(6, globals)
|
await expect(`str.index-of 'hello world' 'world'`).toEvaluateTo(6)
|
||||||
await expect(`str.index-of 'hello' 'bye'`).toEvaluateTo(-1, globals)
|
await expect(`str.index-of 'hello' 'bye'`).toEvaluateTo(-1)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('last-index-of finds last occurrence', async () => {
|
test('last-index-of finds last occurrence', async () => {
|
||||||
await expect(`str.last-index-of 'hello hello' 'hello'`).toEvaluateTo(6, globals)
|
await expect(`str.last-index-of 'hello hello' 'hello'`).toEvaluateTo(6)
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
describe('type predicates', () => {
|
|
||||||
test('string? checks for string type', async () => {
|
|
||||||
await expect(`string? 'hello'`).toEvaluateTo(true, globals)
|
|
||||||
await expect(`string? 42`).toEvaluateTo(false, globals)
|
|
||||||
})
|
|
||||||
|
|
||||||
test('number? checks for number type', async () => {
|
|
||||||
await expect(`number? 42`).toEvaluateTo(true, globals)
|
|
||||||
await expect(`number? 'hello'`).toEvaluateTo(false, globals)
|
|
||||||
})
|
|
||||||
|
|
||||||
test('boolean? checks for boolean type', async () => {
|
|
||||||
await expect(`boolean? true`).toEvaluateTo(true, globals)
|
|
||||||
await expect(`boolean? 42`).toEvaluateTo(false, globals)
|
|
||||||
})
|
|
||||||
|
|
||||||
test('array? checks for array type', async () => {
|
|
||||||
await expect(`array? [1 2 3]`).toEvaluateTo(true, globals)
|
|
||||||
await expect(`array? 42`).toEvaluateTo(false, globals)
|
|
||||||
})
|
|
||||||
|
|
||||||
test('dict? checks for dict type', async () => {
|
|
||||||
await expect(`dict? [a=1]`).toEvaluateTo(true, globals)
|
|
||||||
await expect(`dict? []`).toEvaluateTo(false, globals)
|
|
||||||
})
|
|
||||||
|
|
||||||
test('null? checks for null type', async () => {
|
|
||||||
await expect(`null? null`).toEvaluateTo(true, globals)
|
|
||||||
await expect(`null? 42`).toEvaluateTo(false, globals)
|
|
||||||
})
|
|
||||||
|
|
||||||
test('some? checks for non-null', async () => {
|
|
||||||
await expect(`some? 42`).toEvaluateTo(true, globals)
|
|
||||||
await expect(`some? null`).toEvaluateTo(false, globals)
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('boolean logic', () => {
|
describe('boolean logic', () => {
|
||||||
test('not negates value', async () => {
|
test('not negates value', async () => {
|
||||||
await expect(`not true`).toEvaluateTo(false, globals)
|
await expect(`not true`).toEvaluateTo(false)
|
||||||
await expect(`not false`).toEvaluateTo(true, globals)
|
await expect(`not false`).toEvaluateTo(true)
|
||||||
await expect(`not 42`).toEvaluateTo(false, globals)
|
await expect(`not 42`).toEvaluateTo(false)
|
||||||
await expect(`not null`).toEvaluateTo(true, globals)
|
await expect(`not null`).toEvaluateTo(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('not works with function calls', async () => {
|
||||||
|
await expect(`equals = do x y: x == y end; not equals 5 5`).toEvaluateTo(false)
|
||||||
|
await expect(`equals = do x y: x == y end; not equals 5 10`).toEvaluateTo(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('not works with binary operations and comparisons', async () => {
|
||||||
|
await expect(`not 5 > 10`).toEvaluateTo(true)
|
||||||
|
await expect(`not 10 > 5`).toEvaluateTo(false)
|
||||||
|
await expect(`not true and false`).toEvaluateTo(true)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('utilities', () => {
|
describe('utilities', () => {
|
||||||
test('inc increments by 1', async () => {
|
test('inc increments by 1', async () => {
|
||||||
await expect(`inc 5`).toEvaluateTo(6, globals)
|
await expect(`inc 5`).toEvaluateTo(6)
|
||||||
await expect(`inc -1`).toEvaluateTo(0, globals)
|
await expect(`inc -1`).toEvaluateTo(0)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('dec decrements by 1', async () => {
|
test('dec decrements by 1', async () => {
|
||||||
await expect(`dec 5`).toEvaluateTo(4, globals)
|
await expect(`dec 5`).toEvaluateTo(4)
|
||||||
await expect(`dec 0`).toEvaluateTo(-1, globals)
|
await expect(`dec 0`).toEvaluateTo(-1)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('identity returns value as-is', async () => {
|
test('identity returns value as-is', async () => {
|
||||||
await expect(`identity 42`).toEvaluateTo(42, globals)
|
await expect(`identity 42`).toEvaluateTo(42)
|
||||||
await expect(`identity 'hello'`).toEvaluateTo('hello', globals)
|
await expect(`identity 'hello'`).toEvaluateTo('hello')
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
describe('introspection', () => {
|
|
||||||
test('type returns proper types', async () => {
|
|
||||||
await expect(`type 'hello'`).toEvaluateTo('string', globals)
|
|
||||||
await expect(`type 42`).toEvaluateTo('number', globals)
|
|
||||||
await expect(`type true`).toEvaluateTo('boolean', globals)
|
|
||||||
await expect(`type false`).toEvaluateTo('boolean', globals)
|
|
||||||
await expect(`type null`).toEvaluateTo('null', globals)
|
|
||||||
await expect(`type [1 2 3]`).toEvaluateTo('array', globals)
|
|
||||||
await expect(`type [a=1 b=2]`).toEvaluateTo('dict', globals)
|
|
||||||
})
|
|
||||||
|
|
||||||
test('length', async () => {
|
|
||||||
await expect(`length 'hello'`).toEvaluateTo(5, globals)
|
|
||||||
await expect(`length [1 2 3]`).toEvaluateTo(3, globals)
|
|
||||||
await expect(`length [a=1 b=2]`).toEvaluateTo(2, globals)
|
|
||||||
await expect(`length 42`).toEvaluateTo(0, globals)
|
|
||||||
await expect(`length true`).toEvaluateTo(0, globals)
|
|
||||||
await expect(`length null`).toEvaluateTo(0, globals)
|
|
||||||
})
|
|
||||||
|
|
||||||
test('inspect formats values', async () => {
|
|
||||||
// Just test that inspect returns something for now
|
|
||||||
// (we'd need more complex assertion to check the actual format)
|
|
||||||
await expect(`type (inspect 'hello')`).toEvaluateTo('string', globals)
|
|
||||||
})
|
|
||||||
|
|
||||||
test('describe describes values', async () => {
|
|
||||||
// Just test that inspect returns something for now
|
|
||||||
// (we'd need more complex assertion to check the actual format)
|
|
||||||
await expect(`describe 'hello'`).toEvaluateTo("#<string: \u001b[32m'hello\u001b[32m'\u001b[0m>", globals)
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('collections', () => {
|
describe('collections', () => {
|
||||||
|
test('length', async () => {
|
||||||
|
await expect(`length 'hello'`).toEvaluateTo(5)
|
||||||
|
await expect(`length [1 2 3]`).toEvaluateTo(3)
|
||||||
|
await expect(`length [a=1 b=2]`).toEvaluateTo(2)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('length throws on invalid types', async () => {
|
||||||
|
await expect(`try: length 42 catch e: 'error' end`).toEvaluateTo('error')
|
||||||
|
await expect(`try: length true catch e: 'error' end`).toEvaluateTo('error')
|
||||||
|
await expect(`try: length null catch e: 'error' end`).toEvaluateTo('error')
|
||||||
|
})
|
||||||
|
|
||||||
test('literal array creates array from arguments', async () => {
|
test('literal array creates array from arguments', async () => {
|
||||||
await expect(`[ 1 2 3 ]`).toEvaluateTo([1, 2, 3], globals)
|
await expect(`[ 1 2 3 ]`).toEvaluateTo([1, 2, 3])
|
||||||
await expect(`['a' 'b']`).toEvaluateTo(['a', 'b'], globals)
|
await expect(`['a' 'b']`).toEvaluateTo(['a', 'b'])
|
||||||
await expect(`[]`).toEvaluateTo([], globals)
|
await expect(`[]`).toEvaluateTo([])
|
||||||
})
|
})
|
||||||
|
|
||||||
test('literal dict creates object from named arguments', async () => {
|
test('literal dict creates object from named arguments', async () => {
|
||||||
await expect(`[ a=1 b=2 ]`).toEvaluateTo({ a: 1, b: 2 }, globals)
|
await expect(`[ a=1 b=2 ]`).toEvaluateTo({ a: 1, b: 2 })
|
||||||
await expect(`[=]`).toEvaluateTo({}, globals)
|
await expect(`[=]`).toEvaluateTo({})
|
||||||
})
|
})
|
||||||
|
|
||||||
test('at retrieves element at index', async () => {
|
test('at retrieves element at index', async () => {
|
||||||
await expect(`at [10 20 30] 0`).toEvaluateTo(10, globals)
|
await expect(`at [10 20 30] 0`).toEvaluateTo(10)
|
||||||
await expect(`at [10 20 30] 2`).toEvaluateTo(30, globals)
|
await expect(`at [10 20 30] 2`).toEvaluateTo(30)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('at retrieves property from object', async () => {
|
test('at retrieves property from object', async () => {
|
||||||
await expect(`at [name='test'] 'name'`).toEvaluateTo('test', globals)
|
await expect(`at [name='test'] 'name'`).toEvaluateTo('test')
|
||||||
})
|
})
|
||||||
|
|
||||||
test('slice extracts array subset', async () => {
|
test('slice extracts array subset', async () => {
|
||||||
await expect(`list.slice [1 2 3 4 5] 1 3`).toEvaluateTo([2, 3], globals)
|
await expect(`list.slice [1 2 3 4 5] 1 3`).toEvaluateTo([2, 3])
|
||||||
await expect(`list.slice [1 2 3 4 5] 2 5`).toEvaluateTo([3, 4, 5], globals)
|
await expect(`list.slice [1 2 3 4 5] 2 5`).toEvaluateTo([3, 4, 5])
|
||||||
})
|
})
|
||||||
|
|
||||||
test('range creates number sequence', async () => {
|
test('range creates number sequence', async () => {
|
||||||
await expect(`range 0 5`).toEvaluateTo([0, 1, 2, 3, 4, 5], globals)
|
await expect(`range 0 5`).toEvaluateTo([0, 1, 2, 3, 4, 5])
|
||||||
await expect(`range 3 6`).toEvaluateTo([3, 4, 5, 6], globals)
|
await expect(`range 3 6`).toEvaluateTo([3, 4, 5, 6])
|
||||||
})
|
})
|
||||||
|
|
||||||
test('range with single argument starts from 0', async () => {
|
test('range with single argument starts from 0', async () => {
|
||||||
await expect(`range 3 null`).toEvaluateTo([0, 1, 2, 3], globals)
|
await expect(`range 3 null`).toEvaluateTo([0, 1, 2, 3])
|
||||||
await expect(`range 0 null`).toEvaluateTo([0], globals)
|
await expect(`range 0 null`).toEvaluateTo([0])
|
||||||
})
|
})
|
||||||
|
|
||||||
test('empty? checks if list, dict, string is empty', async () => {
|
test('empty? checks if list, dict, string is empty', async () => {
|
||||||
await expect(`empty? []`).toEvaluateTo(true, globals)
|
await expect(`empty? []`).toEvaluateTo(true)
|
||||||
await expect(`empty? [1]`).toEvaluateTo(false, globals)
|
await expect(`empty? [1]`).toEvaluateTo(false)
|
||||||
|
|
||||||
await expect(`empty? [=]`).toEvaluateTo(true, globals)
|
await expect(`empty? [=]`).toEvaluateTo(true)
|
||||||
await expect(`empty? [a=true]`).toEvaluateTo(false, globals)
|
await expect(`empty? [a=true]`).toEvaluateTo(false)
|
||||||
|
|
||||||
await expect(`empty? ''`).toEvaluateTo(true, globals)
|
await expect(`empty? ''`).toEvaluateTo(true)
|
||||||
await expect(`empty? 'cat'`).toEvaluateTo(false, globals)
|
await expect(`empty? 'cat'`).toEvaluateTo(false)
|
||||||
await expect(`empty? meow`).toEvaluateTo(false, globals)
|
await expect(`empty? meow`).toEvaluateTo(false)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('list.filter keeps matching elements', async () => {
|
test('list.filter keeps matching elements', async () => {
|
||||||
|
|
@ -248,7 +213,16 @@ describe('collections', () => {
|
||||||
x == 3 or x == 4 or x == 5
|
x == 3 or x == 4 or x == 5
|
||||||
end
|
end
|
||||||
list.filter [1 2 3 4 5] is-positive
|
list.filter [1 2 3 4 5] is-positive
|
||||||
`).toEvaluateTo([3, 4, 5], globals)
|
`).toEvaluateTo([3, 4, 5])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('list.reject doesnt keep matching elements', async () => {
|
||||||
|
await expect(`
|
||||||
|
is-even = do x:
|
||||||
|
(x % 2) == 0
|
||||||
|
end
|
||||||
|
list.reject [1 2 3 4 5] is-even
|
||||||
|
`).toEvaluateTo([1, 3, 5])
|
||||||
})
|
})
|
||||||
|
|
||||||
test('list.reduce accumulates values', async () => {
|
test('list.reduce accumulates values', async () => {
|
||||||
|
|
@ -257,7 +231,7 @@ describe('collections', () => {
|
||||||
acc + x
|
acc + x
|
||||||
end
|
end
|
||||||
list.reduce [1 2 3 4] add 0
|
list.reduce [1 2 3 4] add 0
|
||||||
`).toEvaluateTo(10, globals)
|
`).toEvaluateTo(10)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('list.find returns first match', async () => {
|
test('list.find returns first match', async () => {
|
||||||
|
|
@ -266,124 +240,218 @@ describe('collections', () => {
|
||||||
x == 4
|
x == 4
|
||||||
end
|
end
|
||||||
list.find [1 2 4 5] is-four
|
list.find [1 2 4 5] is-four
|
||||||
`).toEvaluateTo(4, globals)
|
`).toEvaluateTo(4)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('list.find returns null if no match', async () => {
|
test('list.find returns null if no match', async () => {
|
||||||
await expect(`
|
await expect(`
|
||||||
is-ten = do x: x == 10 end
|
is-ten = do x: x == 10 end
|
||||||
list.find [1 2 3] is-ten
|
list.find [1 2 3] is-ten
|
||||||
`).toEvaluateTo(null, globals)
|
`).toEvaluateTo(null)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('list.empty? checks if list is empty', async () => {
|
test('list.empty? checks if list is empty', async () => {
|
||||||
await expect(`list.empty? []`).toEvaluateTo(true, globals)
|
await expect(`list.empty? []`).toEvaluateTo(true)
|
||||||
await expect(`list.empty? [1]`).toEvaluateTo(false, globals)
|
await expect(`list.empty? [1]`).toEvaluateTo(false)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('list.contains? checks for element', async () => {
|
test('list.contains? checks for element', async () => {
|
||||||
await expect(`list.contains? [1 2 3] 2`).toEvaluateTo(true, globals)
|
await expect(`list.contains? [1 2 3] 2`).toEvaluateTo(true)
|
||||||
await expect(`list.contains? [1 2 3] 5`).toEvaluateTo(false, globals)
|
await expect(`list.contains? [1 2 3] 5`).toEvaluateTo(false)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('list.reverse reverses array', async () => {
|
test('list.reverse reverses array', async () => {
|
||||||
await expect(`list.reverse [1 2 3]`).toEvaluateTo([3, 2, 1], globals)
|
await expect(`list.reverse [1 2 3]`).toEvaluateTo([3, 2, 1])
|
||||||
})
|
})
|
||||||
|
|
||||||
test('list.concat combines arrays', async () => {
|
test('list.concat combines arrays', async () => {
|
||||||
await expect(`list.concat [1 2] [3 4]`).toEvaluateTo([1, 2, 3, 4], globals)
|
await expect(`list.concat [1 2] [3 4]`).toEvaluateTo([1, 2, 3, 4])
|
||||||
})
|
})
|
||||||
|
|
||||||
test('list.flatten flattens nested arrays', async () => {
|
test('list.flatten flattens nested arrays', async () => {
|
||||||
await expect(`list.flatten [[1 2] [3 4]] 1`).toEvaluateTo([1, 2, 3, 4], globals)
|
await expect(`list.flatten [[1 2] [3 4]] 1`).toEvaluateTo([1, 2, 3, 4])
|
||||||
})
|
})
|
||||||
|
|
||||||
test('list.unique removes duplicates', async () => {
|
test('list.unique removes duplicates', async () => {
|
||||||
await expect(`list.unique [1 2 2 3 1]`).toEvaluateTo([1, 2, 3], globals)
|
await expect(`list.unique [1 2 2 3 1]`).toEvaluateTo([1, 2, 3])
|
||||||
})
|
})
|
||||||
|
|
||||||
test('list.zip combines two arrays', async () => {
|
test('list.zip combines two arrays', async () => {
|
||||||
await expect(`list.zip [1 2] [3 4]`).toEvaluateTo([[1, 3], [2, 4]], globals)
|
await expect(`list.zip [1 2] [3 4]`).toEvaluateTo([[1, 3], [2, 4]])
|
||||||
})
|
})
|
||||||
|
|
||||||
test('list.first returns first element', async () => {
|
test('list.first returns first element', async () => {
|
||||||
await expect(`list.first [1 2 3]`).toEvaluateTo(1, globals)
|
await expect(`list.first [1 2 3]`).toEvaluateTo(1)
|
||||||
await expect(`list.first []`).toEvaluateTo(null, globals)
|
await expect(`list.first []`).toEvaluateTo(null)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('list.last returns last element', async () => {
|
test('list.last returns last element', async () => {
|
||||||
await expect(`list.last [1 2 3]`).toEvaluateTo(3, globals)
|
await expect(`list.last [1 2 3]`).toEvaluateTo(3)
|
||||||
await expect(`list.last []`).toEvaluateTo(null, globals)
|
await expect(`list.last []`).toEvaluateTo(null)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('list.rest returns all but first', async () => {
|
test('list.rest returns all but first', async () => {
|
||||||
await expect(`list.rest [1 2 3]`).toEvaluateTo([2, 3], globals)
|
await expect(`list.rest [1 2 3]`).toEvaluateTo([2, 3])
|
||||||
})
|
})
|
||||||
|
|
||||||
test('list.take returns first n elements', async () => {
|
test('list.take returns first n elements', async () => {
|
||||||
await expect(`list.take [1 2 3 4 5] 3`).toEvaluateTo([1, 2, 3], globals)
|
await expect(`list.take [1 2 3 4 5] 3`).toEvaluateTo([1, 2, 3])
|
||||||
})
|
})
|
||||||
|
|
||||||
test('list.drop skips first n elements', async () => {
|
test('list.drop skips first n elements', async () => {
|
||||||
await expect(`list.drop [1 2 3 4 5] 2`).toEvaluateTo([3, 4, 5], globals)
|
await expect(`list.drop [1 2 3 4 5] 2`).toEvaluateTo([3, 4, 5])
|
||||||
})
|
})
|
||||||
|
|
||||||
test('list.append adds to end', async () => {
|
test('list.append adds to end', async () => {
|
||||||
await expect(`list.append [1 2] 3`).toEvaluateTo([1, 2, 3], globals)
|
await expect(`list.append [1 2] 3`).toEvaluateTo([1, 2, 3])
|
||||||
})
|
})
|
||||||
|
|
||||||
test('list.prepend adds to start', async () => {
|
test('list.prepend adds to start', async () => {
|
||||||
await expect(`list.prepend [2 3] 1`).toEvaluateTo([1, 2, 3], globals)
|
await expect(`list.prepend [2 3] 1`).toEvaluateTo([1, 2, 3])
|
||||||
})
|
})
|
||||||
|
|
||||||
test('list.index-of finds element index', async () => {
|
test('list.index-of finds element index', async () => {
|
||||||
await expect(`list.index-of [1 2 3] 2`).toEvaluateTo(1, globals)
|
await expect(`list.index-of [1 2 3] 2`).toEvaluateTo(1)
|
||||||
await expect(`list.index-of [1 2 3] 5`).toEvaluateTo(-1, globals)
|
await expect(`list.index-of [1 2 3] 5`).toEvaluateTo(-1)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('list.push adds to end and mutates array', async () => {
|
||||||
|
await expect(`arr = [1 2]; list.push arr 3; arr`).toEvaluateTo([1, 2, 3])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('list.push returns the size of the array', async () => {
|
||||||
|
await expect(`arr = [1 2]; arr | list.push 3`).toEvaluateTo(3)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('list.pop removes from end and mutates array', async () => {
|
||||||
|
await expect(`arr = [1 2 3]; list.pop arr; arr`).toEvaluateTo([1, 2])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('list.pop returns removed element', async () => {
|
||||||
|
await expect(`list.pop [1 2 3]`).toEvaluateTo(3)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('list.pop returns null for empty array', async () => {
|
||||||
|
await expect(`list.pop []`).toEvaluateTo(null)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('list.shift removes from start and mutates array', async () => {
|
||||||
|
await expect(`arr = [1 2 3]; list.shift arr; arr`).toEvaluateTo([2, 3])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('list.shift returns removed element', async () => {
|
||||||
|
await expect(`list.shift [1 2 3]`).toEvaluateTo(1)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('list.shift returns null for empty array', async () => {
|
||||||
|
await expect(`list.shift []`).toEvaluateTo(null)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('list.unshift adds to start and mutates array', async () => {
|
||||||
|
await expect(`arr = [2 3]; list.unshift arr 1; arr`).toEvaluateTo([1, 2, 3])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('list.unshift returns the length of the array', async () => {
|
||||||
|
await expect(`arr = [2 3]; arr | list.unshift 1`).toEvaluateTo(3)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('list.splice removes elements and mutates array', async () => {
|
||||||
|
await expect(`arr = [1 2 3 4 5]; list.splice arr 1 2; arr`).toEvaluateTo([1, 4, 5])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('list.splice returns removed elements', async () => {
|
||||||
|
await expect(`list.splice [1 2 3 4 5] 1 2`).toEvaluateTo([2, 3])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('list.splice from start', async () => {
|
||||||
|
await expect(`list.splice [1 2 3 4 5] 0 2`).toEvaluateTo([1, 2])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('list.splice to end', async () => {
|
||||||
|
await expect(`arr = [1 2 3 4 5]; list.splice arr 3 2; arr`).toEvaluateTo([1, 2, 3])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('list.insert adds element at index and mutates array', async () => {
|
||||||
|
await expect(`arr = [1 2 4 5]; list.insert arr 2 3; arr`).toEvaluateTo([1, 2, 3, 4, 5])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('list.insert returns array length', async () => {
|
||||||
|
await expect(`list.insert [1 2 4] 2 3`).toEvaluateTo(4)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('list.insert at start', async () => {
|
||||||
|
await expect(`arr = [2 3]; list.insert arr 0 1; arr`).toEvaluateTo([1, 2, 3])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('list.insert at end', async () => {
|
||||||
|
await expect(`arr = [1 2]; list.insert arr 2 99; arr`).toEvaluateTo([1, 2, 99])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('list.sort with no callback sorts ascending', async () => {
|
||||||
|
await expect(`list.sort [3 1 4 1 5] null`).toEvaluateTo([1, 1, 3, 4, 5])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('list.sort with callback sorts using comparator', async () => {
|
||||||
|
await expect(`
|
||||||
|
desc = do a b:
|
||||||
|
b - a
|
||||||
|
end
|
||||||
|
list.sort [3 1 4 1 5] desc
|
||||||
|
`).toEvaluateTo([5, 4, 3, 1, 1])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('list.sort with callback for strings by length', async () => {
|
||||||
|
await expect(`
|
||||||
|
by-length = do a b:
|
||||||
|
(length a) - (length b)
|
||||||
|
end
|
||||||
|
list.sort ['cat' 'a' 'dog' 'elephant'] by-length
|
||||||
|
`).toEvaluateTo(['a', 'cat', 'dog', 'elephant'])
|
||||||
})
|
})
|
||||||
|
|
||||||
test('list.any? checks if any element matches', async () => {
|
test('list.any? checks if any element matches', async () => {
|
||||||
await expect(`
|
await expect(`
|
||||||
gt-three = do x: x > 3 end
|
gt-three = do x: x > 3 end
|
||||||
list.any? [1 2 4 5] gt-three
|
list.any? [1 2 4 5] gt-three
|
||||||
`).toEvaluateTo(true, globals)
|
`).toEvaluateTo(true)
|
||||||
await expect(`
|
await expect(`
|
||||||
gt-ten = do x: x > 10 end
|
gt-ten = do x: x > 10 end
|
||||||
list.any? [1 2 3] gt-ten
|
list.any? [1 2 3] gt-ten
|
||||||
`).toEvaluateTo(false, globals)
|
`).toEvaluateTo(false)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('list.all? checks if all elements match', async () => {
|
test('list.all? checks if all elements match', async () => {
|
||||||
await expect(`
|
await expect(`
|
||||||
positive = do x: x > 0 end
|
positive = do x: x > 0 end
|
||||||
list.all? [1 2 3] positive
|
list.all? [1 2 3] positive
|
||||||
`).toEvaluateTo(true, globals)
|
`).toEvaluateTo(true)
|
||||||
await expect(`
|
await expect(`
|
||||||
positive = do x: x > 0 end
|
positive = do x: x > 0 end
|
||||||
list.all? [1 -2 3] positive
|
list.all? [1 -2 3] positive
|
||||||
`).toEvaluateTo(false, globals)
|
`).toEvaluateTo(false)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('list.sum adds all numbers', async () => {
|
test('list.sum adds all numbers', async () => {
|
||||||
await expect(`list.sum [1 2 3 4]`).toEvaluateTo(10, globals)
|
await expect(`list.sum [1 2 3 4]`).toEvaluateTo(10)
|
||||||
await expect(`list.sum []`).toEvaluateTo(0, globals)
|
await expect(`list.sum []`).toEvaluateTo(0)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('list.count counts matching elements', async () => {
|
test('list.count counts matching elements', async () => {
|
||||||
await expect(`
|
await expect(`
|
||||||
gt-two = do x: x > 2 end
|
gt-two = do x: x > 2 end
|
||||||
list.count [1 2 3 4 5] gt-two
|
list.count [1 2 3 4 5] gt-two
|
||||||
`).toEvaluateTo(3, globals)
|
`).toEvaluateTo(3)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('list.partition splits array by predicate', async () => {
|
test('list.partition splits array by predicate', async () => {
|
||||||
await expect(`
|
await expect(`
|
||||||
gt-two = do x: x > 2 end
|
gt-two = do x: x > 2 end
|
||||||
list.partition [1 2 3 4 5] gt-two
|
list.partition [1 2 3 4 5] gt-two
|
||||||
`).toEvaluateTo([[3, 4, 5], [1, 2]], globals)
|
`).toEvaluateTo([[3, 4, 5], [1, 2]])
|
||||||
})
|
})
|
||||||
|
|
||||||
test('list.compact removes null values', async () => {
|
test('list.compact removes null values', async () => {
|
||||||
await expect(`list.compact [1 null 2 null 3]`).toEvaluateTo([1, 2, 3], globals)
|
await expect(`list.compact [1 null 2 null 3]`).toEvaluateTo([1, 2, 3])
|
||||||
})
|
})
|
||||||
|
|
||||||
test('list.group-by groups by key function', async () => {
|
test('list.group-by groups by key function', async () => {
|
||||||
|
|
@ -396,7 +464,7 @@ describe('collections', () => {
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
list.group-by ['a' 1 'b' 2] get-type
|
list.group-by ['a' 1 'b' 2] get-type
|
||||||
`).toEvaluateTo({ str: ['a', 'b'], num: [1, 2] }, globals)
|
`).toEvaluateTo({ str: ['a', 'b'], num: [1, 2] })
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
@ -405,14 +473,14 @@ describe('enumerables', () => {
|
||||||
await expect(`
|
await expect(`
|
||||||
double = do x: x * 2 end
|
double = do x: x * 2 end
|
||||||
list.map [1 2 3] double
|
list.map [1 2 3] double
|
||||||
`).toEvaluateTo([2, 4, 6], globals)
|
`).toEvaluateTo([2, 4, 6])
|
||||||
})
|
})
|
||||||
|
|
||||||
test('map handles empty array', async () => {
|
test('map handles empty array', async () => {
|
||||||
await expect(`
|
await expect(`
|
||||||
double = do x: x * 2 end
|
double = do x: x * 2 end
|
||||||
list.map [] double
|
list.map [] double
|
||||||
`).toEvaluateTo([], globals)
|
`).toEvaluateTo([])
|
||||||
})
|
})
|
||||||
|
|
||||||
test('each iterates over array', async () => {
|
test('each iterates over array', async () => {
|
||||||
|
|
@ -421,165 +489,146 @@ describe('enumerables', () => {
|
||||||
await expect(`
|
await expect(`
|
||||||
double = do x: x * 2 end
|
double = do x: x * 2 end
|
||||||
each [1 2 3] double
|
each [1 2 3] double
|
||||||
`).toEvaluateTo([1, 2, 3], globals)
|
`).toEvaluateTo([1, 2, 3])
|
||||||
})
|
})
|
||||||
|
|
||||||
test('each handles empty array', async () => {
|
test('each handles empty array', async () => {
|
||||||
await expect(`
|
await expect(`
|
||||||
fn = do x: x end
|
fn = do x: x end
|
||||||
each [] fn
|
each [] fn
|
||||||
`).toEvaluateTo([], globals)
|
`).toEvaluateTo([])
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('dict operations', () => {
|
describe('dict operations', () => {
|
||||||
test('dict.keys returns all keys', async () => {
|
test('dict.keys returns all keys', async () => {
|
||||||
const result = await (async () => {
|
await expect(`dict.keys [a=1 b=2 c=3] | list.sort`).toEvaluateTo(['a', 'b', 'c'].sort())
|
||||||
const { Compiler } = await import('#compiler/compiler')
|
|
||||||
const { run, fromValue } = await import('reefvm')
|
|
||||||
const { setGlobals } = await import('#parser/tokenizer')
|
|
||||||
setGlobals(Object.keys(globals))
|
|
||||||
const c = new Compiler('dict.keys [a=1 b=2 c=3]')
|
|
||||||
const r = await run(c.bytecode, globals)
|
|
||||||
return fromValue(r)
|
|
||||||
})()
|
|
||||||
// Check that all expected keys are present (order may vary)
|
|
||||||
expect(result.sort()).toEqual(['a', 'b', 'c'])
|
|
||||||
})
|
})
|
||||||
|
|
||||||
test('dict.values returns all values', async () => {
|
test('dict.values returns all values', async () => {
|
||||||
const result = await (async () => {
|
await expect('dict.values [a=1 b=2] | list.sort').toEvaluateTo([1, 2].sort())
|
||||||
const { Compiler } = await import('#compiler/compiler')
|
|
||||||
const { run, fromValue } = await import('reefvm')
|
|
||||||
const { setGlobals } = await import('#parser/tokenizer')
|
|
||||||
setGlobals(Object.keys(globals))
|
|
||||||
const c = new Compiler('dict.values [a=1 b=2]')
|
|
||||||
const r = await run(c.bytecode, globals)
|
|
||||||
return fromValue(r)
|
|
||||||
})()
|
|
||||||
// Check that all expected values are present (order may vary)
|
|
||||||
expect(result.sort()).toEqual([1, 2])
|
|
||||||
})
|
})
|
||||||
|
|
||||||
test('dict.has? checks for key', async () => {
|
test('dict.has? checks for key', async () => {
|
||||||
await expect(`dict.has? [a=1 b=2] 'a'`).toEvaluateTo(true, globals)
|
await expect(`dict.has? [a=1 b=2] 'a'`).toEvaluateTo(true)
|
||||||
await expect(`dict.has? [a=1 b=2] 'c'`).toEvaluateTo(false, globals)
|
await expect(`dict.has? [a=1 b=2] 'c'`).toEvaluateTo(false)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('dict.get retrieves value with default', async () => {
|
test('dict.get retrieves value with default', async () => {
|
||||||
await expect(`dict.get [a=1] 'a' 0`).toEvaluateTo(1, globals)
|
await expect(`dict.get [a=1] 'a' 0`).toEvaluateTo(1)
|
||||||
await expect(`dict.get [a=1] 'b' 99`).toEvaluateTo(99, globals)
|
await expect(`dict.get [a=1] 'b' 99`).toEvaluateTo(99)
|
||||||
|
await expect(`dict.get [a=1] 'b'`).toEvaluateTo(null)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('dict.set sets value', async () => {
|
test('dict.set sets value', async () => {
|
||||||
await expect(`map = [a=1]; dict.set map 'b' 99; map.b`).toEvaluateTo(99, globals)
|
await expect(`map = [a=1]; dict.set map 'b' 99; map.b`).toEvaluateTo(99)
|
||||||
await expect(`map = [a=1]; dict.set map 'a' 100; map.a`).toEvaluateTo(100, globals)
|
await expect(`map = [a=1]; dict.set map 'a' 100; map.a`).toEvaluateTo(100)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('dict.empty? checks if dict is empty', async () => {
|
test('dict.empty? checks if dict is empty', async () => {
|
||||||
await expect(`dict.empty? [=]`).toEvaluateTo(true, globals)
|
await expect(`dict.empty? [=]`).toEvaluateTo(true)
|
||||||
await expect(`dict.empty? [a=1]`).toEvaluateTo(false, globals)
|
await expect(`dict.empty? [a=1]`).toEvaluateTo(false)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('dict.merge combines dicts', async () => {
|
test('dict.merge combines dicts', async () => {
|
||||||
await expect(`dict.merge [a=1] [b=2]`).toEvaluateTo({ a: 1, b: 2 }, globals)
|
await expect(`dict.merge [a=1] [b=2]`).toEvaluateTo({ a: 1, b: 2 })
|
||||||
})
|
})
|
||||||
|
|
||||||
test('dict.map transforms values', async () => {
|
test('dict.map transforms values', async () => {
|
||||||
await expect(`
|
await expect(`
|
||||||
double = do v k: v * 2 end
|
double = do v k: v * 2 end
|
||||||
dict.map [a=1 b=2] double
|
dict.map [a=1 b=2] double
|
||||||
`).toEvaluateTo({ a: 2, b: 4 }, globals)
|
`).toEvaluateTo({ a: 2, b: 4 })
|
||||||
})
|
})
|
||||||
|
|
||||||
test('dict.filter keeps matching entries', async () => {
|
test('dict.filter keeps matching entries', async () => {
|
||||||
await expect(`
|
await expect(`
|
||||||
gt-one = do v k: v > 1 end
|
gt-one = do v k: v > 1 end
|
||||||
dict.filter [a=1 b=2 c=3] gt-one
|
dict.filter [a=1 b=2 c=3] gt-one
|
||||||
`).toEvaluateTo({ b: 2, c: 3 }, globals)
|
`).toEvaluateTo({ b: 2, c: 3 })
|
||||||
})
|
})
|
||||||
|
|
||||||
test('dict.from-entries creates dict from array', async () => {
|
test('dict.from-entries creates dict from array', async () => {
|
||||||
await expect(`dict.from-entries [['a' 1] ['b' 2]]`).toEvaluateTo({ a: 1, b: 2 }, globals)
|
await expect(`dict.from-entries [['a' 1] ['b' 2]]`).toEvaluateTo({ a: 1, b: 2 })
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('math operations', () => {
|
describe('math operations', () => {
|
||||||
test('math.abs returns absolute value', async () => {
|
test('math.abs returns absolute value', async () => {
|
||||||
await expect(`math.abs -5`).toEvaluateTo(5, globals)
|
await expect(`math.abs -5`).toEvaluateTo(5)
|
||||||
await expect(`math.abs 5`).toEvaluateTo(5, globals)
|
await expect(`math.abs 5`).toEvaluateTo(5)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('math.floor rounds down', async () => {
|
test('math.floor rounds down', async () => {
|
||||||
await expect(`math.floor 3.7`).toEvaluateTo(3, globals)
|
await expect(`math.floor 3.7`).toEvaluateTo(3)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('math.ceil rounds up', async () => {
|
test('math.ceil rounds up', async () => {
|
||||||
await expect(`math.ceil 3.2`).toEvaluateTo(4, globals)
|
await expect(`math.ceil 3.2`).toEvaluateTo(4)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('math.round rounds to nearest', async () => {
|
test('math.round rounds to nearest', async () => {
|
||||||
await expect(`math.round 3.4`).toEvaluateTo(3, globals)
|
await expect(`math.round 3.4`).toEvaluateTo(3)
|
||||||
await expect(`math.round 3.6`).toEvaluateTo(4, globals)
|
await expect(`math.round 3.6`).toEvaluateTo(4)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('math.min returns minimum', async () => {
|
test('math.min returns minimum', async () => {
|
||||||
await expect(`math.min 5 2 8 1`).toEvaluateTo(1, globals)
|
await expect(`math.min 5 2 8 1`).toEvaluateTo(1)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('math.max returns maximum', async () => {
|
test('math.max returns maximum', async () => {
|
||||||
await expect(`math.max 5 2 8 1`).toEvaluateTo(8, globals)
|
await expect(`math.max 5 2 8 1`).toEvaluateTo(8)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('math.pow computes power', async () => {
|
test('math.pow computes power', async () => {
|
||||||
await expect(`math.pow 2 3`).toEvaluateTo(8, globals)
|
await expect(`math.pow 2 3`).toEvaluateTo(8)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('math.sqrt computes square root', async () => {
|
test('math.sqrt computes square root', async () => {
|
||||||
await expect(`math.sqrt 16`).toEvaluateTo(4, globals)
|
await expect(`math.sqrt 16`).toEvaluateTo(4)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('math.even? checks if even', async () => {
|
test('math.even? checks if even', async () => {
|
||||||
await expect(`math.even? 4`).toEvaluateTo(true, globals)
|
await expect(`math.even? 4`).toEvaluateTo(true)
|
||||||
await expect(`math.even? 5`).toEvaluateTo(false, globals)
|
await expect(`math.even? 5`).toEvaluateTo(false)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('math.odd? checks if odd', async () => {
|
test('math.odd? checks if odd', async () => {
|
||||||
await expect(`math.odd? 5`).toEvaluateTo(true, globals)
|
await expect(`math.odd? 5`).toEvaluateTo(true)
|
||||||
await expect(`math.odd? 4`).toEvaluateTo(false, globals)
|
await expect(`math.odd? 4`).toEvaluateTo(false)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('math.positive? checks if positive', async () => {
|
test('math.positive? checks if positive', async () => {
|
||||||
await expect(`math.positive? 5`).toEvaluateTo(true, globals)
|
await expect(`math.positive? 5`).toEvaluateTo(true)
|
||||||
await expect(`math.positive? -5`).toEvaluateTo(false, globals)
|
await expect(`math.positive? -5`).toEvaluateTo(false)
|
||||||
await expect(`math.positive? 0`).toEvaluateTo(false, globals)
|
await expect(`math.positive? 0`).toEvaluateTo(false)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('math.negative? checks if negative', async () => {
|
test('math.negative? checks if negative', async () => {
|
||||||
await expect(`math.negative? -5`).toEvaluateTo(true, globals)
|
await expect(`math.negative? -5`).toEvaluateTo(true)
|
||||||
await expect(`math.negative? 5`).toEvaluateTo(false, globals)
|
await expect(`math.negative? 5`).toEvaluateTo(false)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('math.zero? checks if zero', async () => {
|
test('math.zero? checks if zero', async () => {
|
||||||
await expect(`math.zero? 0`).toEvaluateTo(true, globals)
|
await expect(`math.zero? 0`).toEvaluateTo(true)
|
||||||
await expect(`math.zero? 5`).toEvaluateTo(false, globals)
|
await expect(`math.zero? 5`).toEvaluateTo(false)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('math.clamp restricts value to range', async () => {
|
test('math.clamp restricts value to range', async () => {
|
||||||
await expect(`math.clamp 5 0 10`).toEvaluateTo(5, globals)
|
await expect(`math.clamp 5 0 10`).toEvaluateTo(5)
|
||||||
await expect(`math.clamp -5 0 10`).toEvaluateTo(0, globals)
|
await expect(`math.clamp -5 0 10`).toEvaluateTo(0)
|
||||||
await expect(`math.clamp 15 0 10`).toEvaluateTo(10, globals)
|
await expect(`math.clamp 15 0 10`).toEvaluateTo(10)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('math.sign returns sign of number', async () => {
|
test('math.sign returns sign of number', async () => {
|
||||||
await expect(`math.sign 5`).toEvaluateTo(1, globals)
|
await expect(`math.sign 5`).toEvaluateTo(1)
|
||||||
await expect(`math.sign -5`).toEvaluateTo(-1, globals)
|
await expect(`math.sign -5`).toEvaluateTo(-1)
|
||||||
await expect(`math.sign 0`).toEvaluateTo(0, globals)
|
await expect(`math.sign 0`).toEvaluateTo(0)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('math.trunc truncates decimal', async () => {
|
test('math.trunc truncates decimal', async () => {
|
||||||
await expect(`math.trunc 3.7`).toEvaluateTo(3, globals)
|
await expect(`math.trunc 3.7`).toEvaluateTo(3)
|
||||||
await expect(`math.trunc -3.7`).toEvaluateTo(-3, globals)
|
await expect(`math.trunc -3.7`).toEvaluateTo(-3)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
||||||
143
src/prelude/tests/types.test.ts
Normal file
143
src/prelude/tests/types.test.ts
Normal file
|
|
@ -0,0 +1,143 @@
|
||||||
|
import { expect, describe, test } from 'bun:test'
|
||||||
|
|
||||||
|
describe('type predicates', () => {
|
||||||
|
test('boolean? checks if value is boolean', async () => {
|
||||||
|
await expect(`boolean? true`).toEvaluateTo(true)
|
||||||
|
await expect(`boolean? false`).toEvaluateTo(true)
|
||||||
|
await expect(`boolean? 42`).toEvaluateTo(false)
|
||||||
|
await expect(`boolean? 'hello'`).toEvaluateTo(false)
|
||||||
|
await expect(`boolean? null`).toEvaluateTo(false)
|
||||||
|
await expect(`boolean? [1 2 3]`).toEvaluateTo(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('number? checks if value is number', async () => {
|
||||||
|
await expect(`number? 42`).toEvaluateTo(true)
|
||||||
|
await expect(`number? 3.14`).toEvaluateTo(true)
|
||||||
|
await expect(`number? 0`).toEvaluateTo(true)
|
||||||
|
await expect(`number? -5`).toEvaluateTo(true)
|
||||||
|
await expect(`number? 'hello'`).toEvaluateTo(false)
|
||||||
|
await expect(`number? true`).toEvaluateTo(false)
|
||||||
|
await expect(`number? null`).toEvaluateTo(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('string? checks if value is string', async () => {
|
||||||
|
await expect(`string? 'hello'`).toEvaluateTo(true)
|
||||||
|
await expect(`string? ''`).toEvaluateTo(true)
|
||||||
|
await expect(`string? world`).toEvaluateTo(true)
|
||||||
|
await expect(`string? 42`).toEvaluateTo(false)
|
||||||
|
await expect(`string? true`).toEvaluateTo(false)
|
||||||
|
await expect(`string? null`).toEvaluateTo(false)
|
||||||
|
await expect(`string? [1 2 3]`).toEvaluateTo(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('array? checks if value is array', async () => {
|
||||||
|
await expect(`array? [1 2 3]`).toEvaluateTo(true)
|
||||||
|
await expect(`array? []`).toEvaluateTo(true)
|
||||||
|
await expect(`array? ['a' 'b']`).toEvaluateTo(true)
|
||||||
|
await expect(`array? [a=1 b=2]`).toEvaluateTo(false)
|
||||||
|
await expect(`array? 42`).toEvaluateTo(false)
|
||||||
|
await expect(`array? 'hello'`).toEvaluateTo(false)
|
||||||
|
await expect(`array? null`).toEvaluateTo(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('list? is alias for array?', async () => {
|
||||||
|
await expect(`list? [1 2 3]`).toEvaluateTo(true)
|
||||||
|
await expect(`list? []`).toEvaluateTo(true)
|
||||||
|
await expect(`list? [a=1 b=2]`).toEvaluateTo(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('dict? checks if value is dict', async () => {
|
||||||
|
await expect(`dict? [a=1 b=2]`).toEvaluateTo(true)
|
||||||
|
await expect(`dict? [=]`).toEvaluateTo(true)
|
||||||
|
await expect(`dict? [1 2 3]`).toEvaluateTo(false)
|
||||||
|
await expect(`dict? []`).toEvaluateTo(false)
|
||||||
|
await expect(`dict? 42`).toEvaluateTo(false)
|
||||||
|
await expect(`dict? 'hello'`).toEvaluateTo(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('function? checks if value is function', async () => {
|
||||||
|
await expect(`
|
||||||
|
my-fn = do x: x * 2 end
|
||||||
|
function? my-fn
|
||||||
|
`).toEvaluateTo(true)
|
||||||
|
await expect(`function? inc`).toEvaluateTo(true)
|
||||||
|
await expect(`function? list.map`).toEvaluateTo(true)
|
||||||
|
await expect(`function? 42`).toEvaluateTo(false)
|
||||||
|
await expect(`function? 'hello'`).toEvaluateTo(false)
|
||||||
|
await expect(`function? [1 2 3]`).toEvaluateTo(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('null? checks if value is null', async () => {
|
||||||
|
await expect(`null? null`).toEvaluateTo(true)
|
||||||
|
await expect(`null? 0`).toEvaluateTo(false)
|
||||||
|
await expect(`null? false`).toEvaluateTo(false)
|
||||||
|
await expect(`null? ''`).toEvaluateTo(false)
|
||||||
|
await expect(`null? []`).toEvaluateTo(false)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('type coercion', () => {
|
||||||
|
test('boolean coerces to boolean', async () => {
|
||||||
|
await expect(`boolean true`).toEvaluateTo(true)
|
||||||
|
await expect(`boolean false`).toEvaluateTo(false)
|
||||||
|
await expect(`boolean 1`).toEvaluateTo(true)
|
||||||
|
await expect(`boolean 0`).toEvaluateTo(false)
|
||||||
|
await expect(`boolean 'hello'`).toEvaluateTo(true)
|
||||||
|
await expect(`boolean ''`).toEvaluateTo(false)
|
||||||
|
await expect(`boolean null`).toEvaluateTo(false)
|
||||||
|
await expect(`boolean [1 2 3]`).toEvaluateTo(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('number coerces to number', async () => {
|
||||||
|
await expect(`number 42`).toEvaluateTo(42)
|
||||||
|
await expect(`number '42'`).toEvaluateTo(42)
|
||||||
|
await expect(`number '3.14'`).toEvaluateTo(3.14)
|
||||||
|
await expect(`number true`).toEvaluateTo(1)
|
||||||
|
await expect(`number false`).toEvaluateTo(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('string coerces to string', async () => {
|
||||||
|
await expect(`string 'hello'`).toEvaluateTo('hello')
|
||||||
|
await expect(`string 42`).toEvaluateTo('42')
|
||||||
|
await expect(`string true`).toEvaluateTo('true')
|
||||||
|
await expect(`string false`).toEvaluateTo('false')
|
||||||
|
await expect(`string null`).toEvaluateTo('null')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('type predicates in conditionals', () => {
|
||||||
|
test('using type predicates in if statements', async () => {
|
||||||
|
await expect(`
|
||||||
|
x = 42
|
||||||
|
if (number? x):
|
||||||
|
'is-num'
|
||||||
|
else:
|
||||||
|
'not-num'
|
||||||
|
end
|
||||||
|
`).toEvaluateTo('is-num')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('filtering by type', async () => {
|
||||||
|
await expect(`
|
||||||
|
items = [1 'hello' 2 'world' 3]
|
||||||
|
list.filter items number?
|
||||||
|
`).toEvaluateTo([1, 2, 3])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('filtering strings', async () => {
|
||||||
|
await expect(`
|
||||||
|
items = [1 'hello' 2 'world' 3]
|
||||||
|
list.filter items string?
|
||||||
|
`).toEvaluateTo(['hello', 'world'])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('checking for functions', async () => {
|
||||||
|
await expect(`
|
||||||
|
double = do x: x * 2 end
|
||||||
|
not-fn = 42
|
||||||
|
is-fn = function? double
|
||||||
|
is-not-fn = function? not-fn
|
||||||
|
is-fn and (not is-not-fn)
|
||||||
|
`).toEvaluateTo(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
22
src/prelude/types.ts
Normal file
22
src/prelude/types.ts
Normal file
|
|
@ -0,0 +1,22 @@
|
||||||
|
import { toValue } from 'reefvm'
|
||||||
|
|
||||||
|
export const types = {
|
||||||
|
'boolean?': (v: any) => toValue(v).type === 'boolean',
|
||||||
|
boolean: (v: any) => Boolean(v),
|
||||||
|
|
||||||
|
'number?': (v: any) => toValue(v).type === 'number',
|
||||||
|
number: (v: any) => Number(v),
|
||||||
|
|
||||||
|
'string?': (v: any) => toValue(v).type === 'string',
|
||||||
|
string: (v: any) => String(v),
|
||||||
|
|
||||||
|
|
||||||
|
'array?': (v: any) => toValue(v).type === 'array',
|
||||||
|
'list?': (v: any) => toValue(v).type === 'array',
|
||||||
|
|
||||||
|
'dict?': (v: any) => toValue(v).type === 'dict',
|
||||||
|
|
||||||
|
'function?': (v: any) => ['function', 'native'].includes(toValue(v).type),
|
||||||
|
|
||||||
|
'null?': (v: any) => toValue(v).type === 'null',
|
||||||
|
}
|
||||||
176
src/testSetup.ts
176
src/testSetup.ts
|
|
@ -1,32 +1,14 @@
|
||||||
import { expect } from 'bun:test'
|
import { expect } from 'bun:test'
|
||||||
import { parser } from '#parser/shrimp'
|
import { diffLines } from 'diff'
|
||||||
import { setGlobals } from '#parser/tokenizer'
|
import color from 'kleur'
|
||||||
import { $ } from 'bun'
|
import { Scanner, TokenType, type Token } from '#parser/tokenizer2'
|
||||||
|
import { parse, setGlobals } from '#parser/parser2'
|
||||||
|
import { Tree } from '#parser/node'
|
||||||
|
import { globals as prelude } from '#prelude'
|
||||||
import { assert, errorMessage } from '#utils/utils'
|
import { assert, errorMessage } from '#utils/utils'
|
||||||
import { Compiler } from '#compiler/compiler'
|
import { Compiler } from '#compiler/compiler'
|
||||||
import { run, VM } from 'reefvm'
|
import { run, VM } from 'reefvm'
|
||||||
import { treeToString, VMResultToValue } from '#utils/tree'
|
import { treeToString2, VMResultToValue } from '#utils/tree'
|
||||||
|
|
||||||
const regenerateParser = async () => {
|
|
||||||
let generate = true
|
|
||||||
try {
|
|
||||||
const grammarStat = await Bun.file('./src/parser/shrimp.grammar').stat()
|
|
||||||
const tokenizerStat = await Bun.file('./src/parser/tokenizer.ts').stat()
|
|
||||||
const parserStat = await Bun.file('./src/parser/shrimp.ts').stat()
|
|
||||||
|
|
||||||
if (grammarStat.mtime <= parserStat.mtime && tokenizerStat.mtime <= parserStat.mtime) {
|
|
||||||
generate = false
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
console.error('Error checking or regenerating parser:', e)
|
|
||||||
} finally {
|
|
||||||
if (generate) {
|
|
||||||
await $`bun generate-parser`
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
await regenerateParser()
|
|
||||||
|
|
||||||
// Type declaration for TypeScript
|
// Type declaration for TypeScript
|
||||||
declare module 'bun:test' {
|
declare module 'bun:test' {
|
||||||
|
|
@ -36,6 +18,9 @@ declare module 'bun:test' {
|
||||||
toFailParse(): T
|
toFailParse(): T
|
||||||
toEvaluateTo(expected: unknown, globals?: Record<string, any>): Promise<T>
|
toEvaluateTo(expected: unknown, globals?: Record<string, any>): Promise<T>
|
||||||
toFailEvaluation(): Promise<T>
|
toFailEvaluation(): Promise<T>
|
||||||
|
toBeToken(expected: string): T
|
||||||
|
toMatchToken(typeOrValue: string, value?: string): T
|
||||||
|
toMatchTokens(...tokens: { type: string, value?: string }[]): T
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -43,9 +28,10 @@ expect.extend({
|
||||||
toMatchTree(received: unknown, expected: string, globals?: Record<string, any>) {
|
toMatchTree(received: unknown, expected: string, globals?: Record<string, any>) {
|
||||||
assert(typeof received === 'string', 'toMatchTree can only be used with string values')
|
assert(typeof received === 'string', 'toMatchTree can only be used with string values')
|
||||||
|
|
||||||
if (globals) setGlobals(Object.keys(globals))
|
const allGlobals = { ...prelude, ...(globals || {}) }
|
||||||
const tree = parser.parse(received)
|
setGlobals(Object.keys(allGlobals))
|
||||||
const actual = treeToString(tree, received)
|
const tree = parse(received)
|
||||||
|
const actual = treeToString2(tree, received)
|
||||||
const normalizedExpected = trimWhitespace(expected)
|
const normalizedExpected = trimWhitespace(expected)
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
|
@ -64,7 +50,8 @@ expect.extend({
|
||||||
assert(typeof received === 'string', 'toFailParse can only be used with string values')
|
assert(typeof received === 'string', 'toFailParse can only be used with string values')
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const tree = parser.parse(received)
|
const node = parse(received)
|
||||||
|
const tree = new Tree(node)
|
||||||
let hasErrors = false
|
let hasErrors = false
|
||||||
tree.iterate({
|
tree.iterate({
|
||||||
enter(n) {
|
enter(n) {
|
||||||
|
|
@ -81,7 +68,7 @@ expect.extend({
|
||||||
pass: true,
|
pass: true,
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
const actual = treeToString(tree, received)
|
const actual = treeToString2(node, received)
|
||||||
return {
|
return {
|
||||||
message: () => `Expected input to fail parsing, but it parsed successfully:\n${actual}`,
|
message: () => `Expected input to fail parsing, but it parsed successfully:\n${actual}`,
|
||||||
pass: false,
|
pass: false,
|
||||||
|
|
@ -99,9 +86,10 @@ expect.extend({
|
||||||
assert(typeof received === 'string', 'toEvaluateTo can only be used with string values')
|
assert(typeof received === 'string', 'toEvaluateTo can only be used with string values')
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (globals) setGlobals(Object.keys(globals))
|
const allGlobals = { ...prelude, ...(globals || {}) }
|
||||||
|
setGlobals(Object.keys(allGlobals))
|
||||||
const compiler = new Compiler(received)
|
const compiler = new Compiler(received)
|
||||||
const result = await run(compiler.bytecode, globals)
|
const result = await run(compiler.bytecode, allGlobals)
|
||||||
let value = VMResultToValue(result)
|
let value = VMResultToValue(result)
|
||||||
|
|
||||||
// Just treat regex as strings for comparison purposes
|
// Just treat regex as strings for comparison purposes
|
||||||
|
|
@ -141,8 +129,107 @@ expect.extend({
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
toBeToken(received: unknown, expected: string) {
|
||||||
|
assert(typeof received === 'string', 'toBeToken can only be used with string values')
|
||||||
|
|
||||||
|
try {
|
||||||
|
const tokens = tokenize(received)
|
||||||
|
const value = tokens[0] as Token
|
||||||
|
const target = TokenType[expected as keyof typeof TokenType]
|
||||||
|
|
||||||
|
if (!value) {
|
||||||
|
return {
|
||||||
|
message: () => `Expected token type to be ${expected}, but got ${value}`,
|
||||||
|
pass: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
message: () => `Expected token type to be ${expected}, but got ${TokenType[value.type]}`,
|
||||||
|
pass: value.type === target
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
return {
|
||||||
|
message: () => `Tokenization failed: ${errorMessage(error)}`,
|
||||||
|
pass: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
toMatchToken(received: unknown, typeOrValue: string, value?: string) {
|
||||||
|
assert(typeof received === 'string', 'toMatchToken can only be used with string values')
|
||||||
|
const expectedValue = value ? value : typeOrValue
|
||||||
|
const expectedType = value ? typeOrValue : undefined
|
||||||
|
|
||||||
|
try {
|
||||||
|
const tokens = tokenize(received)
|
||||||
|
const token = tokens[0] as Token
|
||||||
|
|
||||||
|
if (!token) {
|
||||||
|
return {
|
||||||
|
message: () => `Expected token to be ${expectedValue.replaceAll('\n', '\\n')}, got ${token}`,
|
||||||
|
pass: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (expectedType && TokenType[expectedType as keyof typeof TokenType] !== token.type) {
|
||||||
|
return {
|
||||||
|
message: () => `Expected token to be ${expectedType}, but got ${TokenType[token.type]}`,
|
||||||
|
pass: false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
message: () => `Expected token to be ${expectedValue.replaceAll('\n', '\\n')}, but got ${token.value}`,
|
||||||
|
pass: token.value === expectedValue
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
return {
|
||||||
|
message: () => `Tokenization failed: ${errorMessage(error)} `,
|
||||||
|
pass: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
toMatchTokens(received: unknown, ...tokens: { type: string, value?: string }[]) {
|
||||||
|
assert(typeof received === 'string', 'toMatchTokens can only be used with string values')
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = tokenize(received).map(t => toHumanToken(t))
|
||||||
|
|
||||||
|
if (result.length === 0 && tokens.length > 0) {
|
||||||
|
return {
|
||||||
|
message: () => `Expected tokens ${JSON.stringify(tokens)}, got nothing`,
|
||||||
|
pass: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const expected = JSON.stringify(tokens, null, 2)
|
||||||
|
const actual = JSON.stringify(result, null, 2)
|
||||||
|
|
||||||
|
return {
|
||||||
|
message: () => `Tokens don't match: \n\n${diff(actual, expected)}`,
|
||||||
|
pass: expected == actual
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
return {
|
||||||
|
message: () => `Tokenization failed: ${errorMessage(error)} `,
|
||||||
|
pass: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
|
const tokenize = (code: string): Token[] => {
|
||||||
|
const scanner = new Scanner
|
||||||
|
return scanner.tokenize(code)
|
||||||
|
}
|
||||||
|
|
||||||
|
const toHumanToken = (tok: Token): { type: string, value?: string } => {
|
||||||
|
return {
|
||||||
|
type: TokenType[tok.type],
|
||||||
|
value: tok.value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const trimWhitespace = (str: string): string => {
|
const trimWhitespace = (str: string): string => {
|
||||||
const lines = str.split('\n').filter((line) => line.trim().length > 0)
|
const lines = str.split('\n').filter((line) => line.trim().length > 0)
|
||||||
const firstLine = lines[0]
|
const firstLine = lines[0]
|
||||||
|
|
@ -154,10 +241,33 @@ const trimWhitespace = (str: string): string => {
|
||||||
if (!line.startsWith(leadingWhitespace)) {
|
if (!line.startsWith(leadingWhitespace)) {
|
||||||
let foundWhitespace = line.match(/^(\s*)/)?.[1] || ''
|
let foundWhitespace = line.match(/^(\s*)/)?.[1] || ''
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`Line has inconsistent leading whitespace: "${line}" (found "${foundWhitespace}", expected "${leadingWhitespace}")`
|
`Line has inconsistent leading whitespace: "${line}"(found "${foundWhitespace}", expected "${leadingWhitespace}")`
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
return line.slice(leadingWhitespace.length)
|
return line.slice(leadingWhitespace.length)
|
||||||
})
|
})
|
||||||
.join('\n')
|
.join('\n')
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const diff = (a: string, b: string): string => {
|
||||||
|
const expected = a.trim()
|
||||||
|
const actual = b.trim()
|
||||||
|
const lines = []
|
||||||
|
|
||||||
|
if (expected !== actual) {
|
||||||
|
const changes = diffLines(actual, expected)
|
||||||
|
for (const part of changes) {
|
||||||
|
const sign = part.added ? "+" : part.removed ? "-" : " "
|
||||||
|
let line = sign + part.value
|
||||||
|
if (part.added) {
|
||||||
|
line = color.green(line)
|
||||||
|
} else if (part.removed) {
|
||||||
|
line = color.red(line)
|
||||||
|
}
|
||||||
|
|
||||||
|
lines.push(line.endsWith("\n") || line.endsWith("\n\u001b[39m") ? line : line + "\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return lines.join('\n')
|
||||||
|
}
|
||||||
452
src/tests/shrimp.test.ts
Normal file
452
src/tests/shrimp.test.ts
Normal file
|
|
@ -0,0 +1,452 @@
|
||||||
|
import { describe } from 'bun:test'
|
||||||
|
import { expect, test } from 'bun:test'
|
||||||
|
import { Shrimp, runCode, compileCode, parseCode, bytecodeToString } from '..'
|
||||||
|
|
||||||
|
describe('Shrimp', () => {
|
||||||
|
test('allows running Shrimp code', async () => {
|
||||||
|
const shrimp = new Shrimp()
|
||||||
|
expect(await shrimp.run(`1 + 5`)).toEqual(6)
|
||||||
|
expect(await shrimp.run(`type 5`)).toEqual('number')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('maintains state across runs', async () => {
|
||||||
|
const shrimp = new Shrimp()
|
||||||
|
|
||||||
|
await shrimp.run(`abc = true`)
|
||||||
|
expect(shrimp.get('abc')).toEqual(true)
|
||||||
|
|
||||||
|
await shrimp.run(`name = Bob`)
|
||||||
|
expect(shrimp.get('abc')).toEqual(true)
|
||||||
|
expect(shrimp.get('name')).toEqual('Bob')
|
||||||
|
|
||||||
|
await shrimp.run(`abc = false`)
|
||||||
|
expect(shrimp.get('abc')).toEqual(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('allows setting your own globals', async () => {
|
||||||
|
const shrimp = new Shrimp({ hiya: () => 'hey there' })
|
||||||
|
|
||||||
|
await shrimp.run('abc = hiya')
|
||||||
|
expect(shrimp.get('abc')).toEqual('hey there')
|
||||||
|
expect(await shrimp.run('type abc')).toEqual('string')
|
||||||
|
|
||||||
|
// still there
|
||||||
|
expect(await shrimp.run('hiya')).toEqual('hey there')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('allows setting your own locals', async () => {
|
||||||
|
const shrimp = new Shrimp({ 'my-global': () => 'hey there' })
|
||||||
|
|
||||||
|
await shrimp.run('abc = my-global')
|
||||||
|
expect(shrimp.get('abc')).toEqual('hey there')
|
||||||
|
|
||||||
|
await shrimp.run('abc = my-global', { 'my-global': 'now a local' })
|
||||||
|
expect(shrimp.get('abc')).toEqual('now a local')
|
||||||
|
|
||||||
|
await shrimp.run('abc = nothing')
|
||||||
|
expect(shrimp.get('abc')).toEqual('nothing')
|
||||||
|
await shrimp.run('abc = nothing', { nothing: 'something' })
|
||||||
|
expect(shrimp.get('abc')).toEqual('something')
|
||||||
|
await shrimp.run('abc = nothing')
|
||||||
|
expect(shrimp.get('abc')).toEqual('nothing')
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('set()', () => {
|
||||||
|
test('allows setting variables', async () => {
|
||||||
|
const shrimp = new Shrimp()
|
||||||
|
|
||||||
|
shrimp.set('foo', 42)
|
||||||
|
expect(shrimp.get('foo')).toEqual(42)
|
||||||
|
|
||||||
|
shrimp.set('bar', 'hello')
|
||||||
|
expect(shrimp.get('bar')).toEqual('hello')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('set variables are accessible in code', async () => {
|
||||||
|
const shrimp = new Shrimp()
|
||||||
|
|
||||||
|
shrimp.set('x', 10)
|
||||||
|
shrimp.set('y', 20)
|
||||||
|
|
||||||
|
const result = await shrimp.run('x + y')
|
||||||
|
expect(result).toEqual(30)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('allows setting functions', async () => {
|
||||||
|
const shrimp = new Shrimp()
|
||||||
|
|
||||||
|
shrimp.set('double', (n: number) => n * 2)
|
||||||
|
|
||||||
|
const result = await shrimp.run('double 21')
|
||||||
|
expect(result).toEqual(42)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('overwrites existing variables', async () => {
|
||||||
|
const shrimp = new Shrimp()
|
||||||
|
|
||||||
|
await shrimp.run('x = 100')
|
||||||
|
expect(shrimp.get('x')).toEqual(100)
|
||||||
|
|
||||||
|
shrimp.set('x', 200)
|
||||||
|
expect(shrimp.get('x')).toEqual(200)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('has()', () => {
|
||||||
|
test('returns true for existing variables', async () => {
|
||||||
|
const shrimp = new Shrimp()
|
||||||
|
|
||||||
|
await shrimp.run('x = 5')
|
||||||
|
expect(shrimp.has('x')).toEqual(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('returns false for non-existing variables', () => {
|
||||||
|
const shrimp = new Shrimp()
|
||||||
|
|
||||||
|
expect(shrimp.has('nonexistent')).toEqual(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('returns true for globals', () => {
|
||||||
|
const shrimp = new Shrimp({ myGlobal: 42 })
|
||||||
|
|
||||||
|
expect(shrimp.has('myGlobal')).toEqual(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('returns true for prelude functions', () => {
|
||||||
|
const shrimp = new Shrimp()
|
||||||
|
|
||||||
|
expect(shrimp.has('echo')).toEqual(true)
|
||||||
|
expect(shrimp.has('type')).toEqual(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('call()', () => {
|
||||||
|
test('calls Shrimp functions with positional args', async () => {
|
||||||
|
const shrimp = new Shrimp()
|
||||||
|
|
||||||
|
await shrimp.run(`add = do x y:
|
||||||
|
x + y
|
||||||
|
end`)
|
||||||
|
|
||||||
|
const result = await shrimp.call('add', 5, 10)
|
||||||
|
expect(result).toEqual(15)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('calls Shrimp functions with named args', async () => {
|
||||||
|
const shrimp = new Shrimp()
|
||||||
|
|
||||||
|
await shrimp.run(`greet = do name:
|
||||||
|
str.join [ 'Hello ' name ] ''
|
||||||
|
end`)
|
||||||
|
|
||||||
|
const result = await shrimp.call('greet', { name: 'World' })
|
||||||
|
expect(result).toEqual('Hello World')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('calls native functions', async () => {
|
||||||
|
const shrimp = new Shrimp()
|
||||||
|
|
||||||
|
shrimp.set('multiply', (a: number, b: number) => a * b)
|
||||||
|
|
||||||
|
const result = await shrimp.call('multiply', 6, 7)
|
||||||
|
expect(result).toEqual(42)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('calls prelude functions', async () => {
|
||||||
|
const shrimp = new Shrimp()
|
||||||
|
|
||||||
|
const result = await shrimp.call('type', 42)
|
||||||
|
expect(result).toEqual('number')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('calls async functions', async () => {
|
||||||
|
const shrimp = new Shrimp()
|
||||||
|
|
||||||
|
shrimp.set('fetchData', async () => {
|
||||||
|
return await Promise.resolve('async data')
|
||||||
|
})
|
||||||
|
|
||||||
|
const result = await shrimp.call('fetchData')
|
||||||
|
expect(result).toEqual('async data')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('compile()', () => {
|
||||||
|
test('compiles code to bytecode', () => {
|
||||||
|
const shrimp = new Shrimp()
|
||||||
|
|
||||||
|
const bytecode = shrimp.compile('x = 5')
|
||||||
|
|
||||||
|
expect(bytecode).toHaveProperty('instructions')
|
||||||
|
expect(bytecode).toHaveProperty('constants')
|
||||||
|
expect(bytecode).toHaveProperty('labels')
|
||||||
|
expect(bytecode.instructions.length).toBeGreaterThan(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('respects globals when compiling', () => {
|
||||||
|
const shrimp = new Shrimp({ customGlobal: 42 })
|
||||||
|
|
||||||
|
const bytecode = shrimp.compile('x = customGlobal')
|
||||||
|
expect(bytecode.instructions.length).toBeGreaterThan(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('compiled bytecode can be run', async () => {
|
||||||
|
const shrimp = new Shrimp()
|
||||||
|
|
||||||
|
const bytecode = shrimp.compile('2 * 21')
|
||||||
|
const result = await shrimp.run(bytecode)
|
||||||
|
|
||||||
|
expect(result).toEqual(42)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('parse()', () => {
|
||||||
|
test('parses code to syntax tree', () => {
|
||||||
|
const shrimp = new Shrimp()
|
||||||
|
|
||||||
|
const tree = shrimp.parse('x = 5')
|
||||||
|
|
||||||
|
expect(tree).toHaveProperty('length')
|
||||||
|
expect(tree).toHaveProperty('cursor')
|
||||||
|
expect(tree.length).toBeGreaterThan(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('respects globals when parsing', () => {
|
||||||
|
const shrimp = new Shrimp({ myVar: 42 })
|
||||||
|
|
||||||
|
const tree = shrimp.parse('x = myVar + 10')
|
||||||
|
|
||||||
|
// Should parse without errors
|
||||||
|
expect(tree).toHaveProperty('length')
|
||||||
|
expect(tree.length).toBeGreaterThan(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('parses function definitions', () => {
|
||||||
|
const shrimp = new Shrimp()
|
||||||
|
|
||||||
|
const tree = shrimp.parse(`add = do x y:
|
||||||
|
x + y
|
||||||
|
end`)
|
||||||
|
|
||||||
|
expect(tree.length).toBeGreaterThan(0)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('get()', () => {
|
||||||
|
test('returns null for undefined variables', () => {
|
||||||
|
const shrimp = new Shrimp()
|
||||||
|
|
||||||
|
expect(shrimp.get('undefined')).toEqual(null)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('returns values from code execution', async () => {
|
||||||
|
const shrimp = new Shrimp()
|
||||||
|
|
||||||
|
await shrimp.run('x = 42')
|
||||||
|
expect(shrimp.get('x')).toEqual(42)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('returns arrays', async () => {
|
||||||
|
const shrimp = new Shrimp()
|
||||||
|
|
||||||
|
await shrimp.run('arr = [1 2 3]')
|
||||||
|
expect(shrimp.get('arr')).toEqual([1, 2, 3])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('returns dicts', async () => {
|
||||||
|
const shrimp = new Shrimp()
|
||||||
|
|
||||||
|
await shrimp.run('dict = [a=1 b=2]')
|
||||||
|
expect(shrimp.get('dict')).toEqual({ a: 1, b: 2 })
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('running bytecode directly', () => {
|
||||||
|
test('can run pre-compiled bytecode', async () => {
|
||||||
|
const shrimp = new Shrimp()
|
||||||
|
|
||||||
|
const bytecode = shrimp.compile('x = 100')
|
||||||
|
const result = await shrimp.run(bytecode)
|
||||||
|
|
||||||
|
expect(result).toEqual(100)
|
||||||
|
expect(shrimp.get('x')).toEqual(100)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('maintains state across bytecode runs', async () => {
|
||||||
|
const shrimp = new Shrimp()
|
||||||
|
|
||||||
|
const bytecode1 = shrimp.compile('x = 10')
|
||||||
|
const bytecode2 = shrimp.compile('x + 5')
|
||||||
|
|
||||||
|
await shrimp.run(bytecode1)
|
||||||
|
const result = await shrimp.run(bytecode2)
|
||||||
|
|
||||||
|
expect(result).toEqual(15)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('Functional API', () => {
|
||||||
|
describe('runCode()', () => {
|
||||||
|
test('runs code and returns result', async () => {
|
||||||
|
const result = await runCode('1 + 1')
|
||||||
|
expect(result).toEqual(2)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('works with globals', async () => {
|
||||||
|
const result = await runCode('greet', { greet: () => 'hello' })
|
||||||
|
expect(result).toEqual('hello')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('has access to prelude', async () => {
|
||||||
|
const result = await runCode('type 42')
|
||||||
|
expect(result).toEqual('number')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('returns null for empty code', async () => {
|
||||||
|
const result = await runCode('')
|
||||||
|
expect(result).toEqual(null)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('compileCode()', () => {
|
||||||
|
test('compiles code to bytecode', () => {
|
||||||
|
const bytecode = compileCode('x = 5')
|
||||||
|
|
||||||
|
expect(bytecode).toHaveProperty('instructions')
|
||||||
|
expect(bytecode).toHaveProperty('constants')
|
||||||
|
expect(bytecode.instructions.length).toBeGreaterThan(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('respects globals', () => {
|
||||||
|
const bytecode = compileCode('x = myGlobal', { myGlobal: 42 })
|
||||||
|
|
||||||
|
expect(bytecode.instructions.length).toBeGreaterThan(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('compiled bytecode is usable', async () => {
|
||||||
|
const bytecode = compileCode('21 * 2')
|
||||||
|
const result = await runCode('21 * 2')
|
||||||
|
|
||||||
|
expect(result).toEqual(42)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('parseCode()', () => {
|
||||||
|
test('parses code to syntax tree', () => {
|
||||||
|
const tree = parseCode('x = 5')
|
||||||
|
|
||||||
|
expect(tree).toHaveProperty('length')
|
||||||
|
expect(tree.length).toBeGreaterThan(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('respects globals', () => {
|
||||||
|
const tree = parseCode('x = myGlobal', { myGlobal: 42 })
|
||||||
|
|
||||||
|
expect(tree.length).toBeGreaterThan(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('handles complex expressions', () => {
|
||||||
|
const tree = parseCode(`add = do x y:
|
||||||
|
x + y
|
||||||
|
end
|
||||||
|
result = add 5 10`)
|
||||||
|
|
||||||
|
expect(tree.length).toBeGreaterThan(0)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('bytecodeToString()', () => {
|
||||||
|
test('converts bytecode to human-readable format', () => {
|
||||||
|
const bytecode = compileCode('x = 42')
|
||||||
|
const str = bytecodeToString(bytecode)
|
||||||
|
|
||||||
|
expect(typeof str).toEqual('string')
|
||||||
|
expect(str.length).toBeGreaterThan(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('shows instructions', () => {
|
||||||
|
const bytecode = compileCode('1 + 1')
|
||||||
|
const str = bytecodeToString(bytecode)
|
||||||
|
|
||||||
|
// Should contain some opcodes
|
||||||
|
expect(str).toContain('PUSH')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('Integration tests', () => {
|
||||||
|
test('complex REPL-like workflow', async () => {
|
||||||
|
const shrimp = new Shrimp()
|
||||||
|
|
||||||
|
// Define a function
|
||||||
|
await shrimp.run(`double = do x:
|
||||||
|
x * 2
|
||||||
|
end`)
|
||||||
|
expect(shrimp.has('double')).toEqual(true)
|
||||||
|
|
||||||
|
// Use the function
|
||||||
|
const result1 = await shrimp.run('double 21')
|
||||||
|
expect(result1).toEqual(42)
|
||||||
|
|
||||||
|
// Call it from TypeScript
|
||||||
|
const result2 = await shrimp.call('double', 50)
|
||||||
|
expect(result2).toEqual(100)
|
||||||
|
|
||||||
|
// Define another function using the first
|
||||||
|
await shrimp.run(`quadruple = do x:
|
||||||
|
double (double x)
|
||||||
|
end`)
|
||||||
|
|
||||||
|
const result3 = await shrimp.run('quadruple 5')
|
||||||
|
expect(result3).toEqual(20)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('mixing native and Shrimp functions', async () => {
|
||||||
|
const shrimp = new Shrimp({
|
||||||
|
log: (msg: string) => `Logged: ${msg}`,
|
||||||
|
multiply: (a: number, b: number) => a * b,
|
||||||
|
})
|
||||||
|
|
||||||
|
await shrimp.run(`greet = do name:
|
||||||
|
log name
|
||||||
|
end`)
|
||||||
|
|
||||||
|
const result1 = await shrimp.run('greet Alice')
|
||||||
|
expect(result1).toEqual('Logged: Alice')
|
||||||
|
|
||||||
|
await shrimp.run(`calc = do x:
|
||||||
|
multiply x 3
|
||||||
|
end`)
|
||||||
|
|
||||||
|
const result2 = await shrimp.run('calc 7')
|
||||||
|
expect(result2).toEqual(21)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('working with arrays and dicts', async () => {
|
||||||
|
const shrimp = new Shrimp()
|
||||||
|
|
||||||
|
await shrimp.run('nums = [1 2 3 4 5]')
|
||||||
|
expect(shrimp.get('nums')).toEqual([1, 2, 3, 4, 5])
|
||||||
|
|
||||||
|
await shrimp.run("config = [host='localhost' port=3000]")
|
||||||
|
expect(shrimp.get('config')).toEqual({ host: 'localhost', port: 3000 })
|
||||||
|
|
||||||
|
const result = await shrimp.run('length nums')
|
||||||
|
expect(result).toEqual(5)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('compile once, run multiple times', async () => {
|
||||||
|
const bytecode = compileCode('x * 2')
|
||||||
|
|
||||||
|
const shrimp1 = new Shrimp()
|
||||||
|
shrimp1.set('x', 10)
|
||||||
|
const result1 = await shrimp1.run(bytecode)
|
||||||
|
expect(result1).toEqual(20)
|
||||||
|
|
||||||
|
const shrimp2 = new Shrimp()
|
||||||
|
shrimp2.set('x', 100)
|
||||||
|
const result2 = await shrimp2.run(bytecode)
|
||||||
|
expect(result2).toEqual(200)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
@ -1,5 +1,50 @@
|
||||||
import { Tree, TreeCursor } from '@lezer/common'
|
import { Tree, TreeCursor } from '@lezer/common'
|
||||||
import { type Value, fromValue } from 'reefvm'
|
import { type Value, fromValue } from 'reefvm'
|
||||||
|
import { SyntaxNode } from '#parser/node'
|
||||||
|
|
||||||
|
const nodeToString = (node: SyntaxNode, input: string, depth = 0): string => {
|
||||||
|
const indent = ' '.repeat(depth)
|
||||||
|
const text = input.slice(node.from, node.to)
|
||||||
|
const nodeName = node.name
|
||||||
|
|
||||||
|
if (node.firstChild) {
|
||||||
|
return `${indent}${nodeName}`
|
||||||
|
} else {
|
||||||
|
// Only strip quotes from whole String nodes (legacy DoubleQuote), not StringFragment/EscapeSeq/CurlyString
|
||||||
|
let cleanText = nodeName === 'String' ? text.slice(1, -1) : text
|
||||||
|
if (cleanText === ' ') cleanText = '(space)'
|
||||||
|
return cleanText ? `${indent}${nodeName} ${cleanText}` : `${indent}${nodeName}`
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const treeToString2 = (tree: SyntaxNode, input: string, depth = 0): string => {
|
||||||
|
let lines = []
|
||||||
|
let node: SyntaxNode | null = tree
|
||||||
|
|
||||||
|
if (node.name === 'Program') node = node.firstChild
|
||||||
|
|
||||||
|
while (node) {
|
||||||
|
// If this node is an error, print ⚠ instead of its content
|
||||||
|
if (node.isError && !node.firstChild) {
|
||||||
|
lines.push(' '.repeat(depth) + '⚠')
|
||||||
|
} else {
|
||||||
|
lines.push(nodeToString(node, input, depth))
|
||||||
|
|
||||||
|
if (node.firstChild) {
|
||||||
|
lines.push(treeToString2(node.firstChild, input, depth + 1))
|
||||||
|
}
|
||||||
|
|
||||||
|
// If this node has an error, add ⚠ after its children
|
||||||
|
if (node.isError && node.firstChild) {
|
||||||
|
lines.push(' '.repeat(depth === 0 ? 0 : depth + 1) + '⚠')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
node = node.nextSibling
|
||||||
|
}
|
||||||
|
|
||||||
|
return lines.join('\n')
|
||||||
|
}
|
||||||
|
|
||||||
export const treeToString = (tree: Tree, input: string): string => {
|
export const treeToString = (tree: Tree, input: string): string => {
|
||||||
const lines: string[] = []
|
const lines: string[] = []
|
||||||
|
|
|
||||||
19
vscode-extension/.vscode/launch.json
vendored
Normal file
19
vscode-extension/.vscode/launch.json
vendored
Normal file
|
|
@ -0,0 +1,19 @@
|
||||||
|
{
|
||||||
|
"version": "0.2.0",
|
||||||
|
"configurations": [
|
||||||
|
{
|
||||||
|
"name": "Run Extension",
|
||||||
|
"type": "extensionHost",
|
||||||
|
"request": "launch",
|
||||||
|
"args": [
|
||||||
|
"--extensionDevelopmentPath=${workspaceFolder}",
|
||||||
|
"--profile=Shrimp Dev"
|
||||||
|
],
|
||||||
|
"outFiles": [
|
||||||
|
"${workspaceFolder}/client/dist/**/*.js",
|
||||||
|
"${workspaceFolder}/server/dist/**/*.js"
|
||||||
|
],
|
||||||
|
"preLaunchTask": "bun: compile"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
18
vscode-extension/.vscode/tasks.json
vendored
Normal file
18
vscode-extension/.vscode/tasks.json
vendored
Normal file
|
|
@ -0,0 +1,18 @@
|
||||||
|
{
|
||||||
|
"version": "2.0.0",
|
||||||
|
"tasks": [
|
||||||
|
{
|
||||||
|
"type": "shell",
|
||||||
|
"label": "bun: compile",
|
||||||
|
"command": "bun",
|
||||||
|
"args": ["run", "compile"],
|
||||||
|
"options": {
|
||||||
|
"cwd": "${workspaceFolder}"
|
||||||
|
},
|
||||||
|
"problemMatcher": "$tsc",
|
||||||
|
"group": {
|
||||||
|
"kind": "build",
|
||||||
|
"isDefault": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
5
vscode-extension/.vscodeignore
Normal file
5
vscode-extension/.vscodeignore
Normal file
|
|
@ -0,0 +1,5 @@
|
||||||
|
.vscode/**
|
||||||
|
src/**
|
||||||
|
tsconfig.json
|
||||||
|
node_modules/**
|
||||||
|
*.map
|
||||||
49
vscode-extension/README.md
Normal file
49
vscode-extension/README.md
Normal file
|
|
@ -0,0 +1,49 @@
|
||||||
|
# Shrimp VSCode Extension
|
||||||
|
|
||||||
|
Language support for Shrimp in VSCode. This README is for probablycorey and defunkt.
|
||||||
|
|
||||||
|
**What it provides:**
|
||||||
|
|
||||||
|
- Syntax highlighting and semantic tokens
|
||||||
|
- Language server with error diagnostics
|
||||||
|
- Commands: "Show Parse Tree" (Alt+K Alt+I), "Show Bytecode" (Alt+K Alt+,), and "Run File" (Cmd+R)
|
||||||
|
- `.sh` file association
|
||||||
|
|
||||||
|
## Development Workflow
|
||||||
|
|
||||||
|
**Developing the extension:**
|
||||||
|
|
||||||
|
1. Open `vscode-extension/` in VSCode
|
||||||
|
2. Run `bun run watch` in a terminal (keeps it compiling as you make changes)
|
||||||
|
3. Use **Run > Start Debugging** to launch Extension Development Host
|
||||||
|
4. Make changes to the code
|
||||||
|
5. Press **Cmd+R** (or Ctrl+R) in the Extension Development Host window to reload
|
||||||
|
6. Repeat steps 4-5
|
||||||
|
|
||||||
|
The `.vscode/launch.json` is configured to compile before launching and use a separate "Shrimp Dev" profile. This means you can have the extension installed in your main VSCode while developing without conflicts.
|
||||||
|
|
||||||
|
**Installing for daily use:**
|
||||||
|
|
||||||
|
Run `bun run build-and-install` to build a VSIX and install it in your current VSCode profile. This lets you use the extension when working on Shrimp scripts outside of development mode.
|
||||||
|
|
||||||
|
## Project Structure
|
||||||
|
|
||||||
|
The extension has two parts: a **client** (`client/src/extension.ts`) that registers commands and starts the language server, and a **server** (`server/src/`) that implements the Language Server Protocol for diagnostics and semantic highlighting.
|
||||||
|
|
||||||
|
Both compile to their respective `dist/` folders.
|
||||||
|
|
||||||
|
## Next Steps
|
||||||
|
|
||||||
|
**Autocomplete:**
|
||||||
|
|
||||||
|
- [ ] Identifiers in scope
|
||||||
|
- [ ] Globals from the prelude (including native functions)
|
||||||
|
- [ ] Imports
|
||||||
|
- [ ] Dot-get properties
|
||||||
|
- [ ] Function argument completion
|
||||||
|
|
||||||
|
**Other features:**
|
||||||
|
|
||||||
|
- [ ] Better syntax coloring
|
||||||
|
- [ ] REPL integration
|
||||||
|
- [ ] Bundle shrimp binary with extension (currently uses `shrimp.binaryPath` setting)
|
||||||
47
vscode-extension/bun.lock
Normal file
47
vscode-extension/bun.lock
Normal file
|
|
@ -0,0 +1,47 @@
|
||||||
|
{
|
||||||
|
"lockfileVersion": 1,
|
||||||
|
"workspaces": {
|
||||||
|
"": {
|
||||||
|
"name": "shrimp",
|
||||||
|
"dependencies": {
|
||||||
|
"vscode-languageclient": "^9.0.1",
|
||||||
|
"vscode-languageserver": "^9.0.1",
|
||||||
|
"vscode-languageserver-textdocument": "^1.0.12",
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/node": "22.x",
|
||||||
|
"@types/vscode": "^1.105.0",
|
||||||
|
"typescript": "^5.9.3",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"packages": {
|
||||||
|
"@types/node": ["@types/node@22.19.0", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-xpr/lmLPQEj+TUnHmR+Ab91/glhJvsqcjB+yY0Ix9GO70H6Lb4FHH5GeqdOE5btAx7eIMwuHkp4H2MSkLcqWbA=="],
|
||||||
|
|
||||||
|
"@types/vscode": ["@types/vscode@1.105.0", "", {}, "sha512-Lotk3CTFlGZN8ray4VxJE7axIyLZZETQJVWi/lYoUVQuqfRxlQhVOfoejsD2V3dVXPSbS15ov5ZyowMAzgUqcw=="],
|
||||||
|
|
||||||
|
"balanced-match": ["balanced-match@1.0.2", "", {}, "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="],
|
||||||
|
|
||||||
|
"brace-expansion": ["brace-expansion@2.0.2", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ=="],
|
||||||
|
|
||||||
|
"minimatch": ["minimatch@5.1.6", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g=="],
|
||||||
|
|
||||||
|
"semver": ["semver@7.7.3", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q=="],
|
||||||
|
|
||||||
|
"typescript": ["typescript@5.9.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw=="],
|
||||||
|
|
||||||
|
"undici-types": ["undici-types@6.21.0", "", {}, "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ=="],
|
||||||
|
|
||||||
|
"vscode-jsonrpc": ["vscode-jsonrpc@8.2.0", "", {}, "sha512-C+r0eKJUIfiDIfwJhria30+TYWPtuHJXHtI7J0YlOmKAo7ogxP20T0zxB7HZQIFhIyvoBPwWskjxrvAtfjyZfA=="],
|
||||||
|
|
||||||
|
"vscode-languageclient": ["vscode-languageclient@9.0.1", "", { "dependencies": { "minimatch": "^5.1.0", "semver": "^7.3.7", "vscode-languageserver-protocol": "3.17.5" } }, "sha512-JZiimVdvimEuHh5olxhxkht09m3JzUGwggb5eRUkzzJhZ2KjCN0nh55VfiED9oez9DyF8/fz1g1iBV3h+0Z2EA=="],
|
||||||
|
|
||||||
|
"vscode-languageserver": ["vscode-languageserver@9.0.1", "", { "dependencies": { "vscode-languageserver-protocol": "3.17.5" }, "bin": { "installServerIntoExtension": "bin/installServerIntoExtension" } }, "sha512-woByF3PDpkHFUreUa7Hos7+pUWdeWMXRd26+ZX2A8cFx6v/JPTtd4/uN0/jB6XQHYaOlHbio03NTHCqrgG5n7g=="],
|
||||||
|
|
||||||
|
"vscode-languageserver-protocol": ["vscode-languageserver-protocol@3.17.5", "", { "dependencies": { "vscode-jsonrpc": "8.2.0", "vscode-languageserver-types": "3.17.5" } }, "sha512-mb1bvRJN8SVznADSGWM9u/b07H7Ecg0I3OgXDuLdn307rl/J3A9YD6/eYOssqhecL27hK1IPZAsaqh00i/Jljg=="],
|
||||||
|
|
||||||
|
"vscode-languageserver-textdocument": ["vscode-languageserver-textdocument@1.0.12", "", {}, "sha512-cxWNPesCnQCcMPeenjKKsOCKQZ/L6Tv19DTRIGuLWe32lyzWhihGVJ/rcckZXJxfdKCFvRLS3fpBIsV/ZGX4zA=="],
|
||||||
|
|
||||||
|
"vscode-languageserver-types": ["vscode-languageserver-types@3.17.5", "", {}, "sha512-Ld1VelNuX9pdF39h2Hgaeb5hEZM2Z3jUrrMgWQAu82jMtZp7p3vJT3BzToKtZI7NgQssZje5o0zryOrhQvzQAg=="],
|
||||||
|
}
|
||||||
|
}
|
||||||
100
vscode-extension/client/src/extension.ts
Normal file
100
vscode-extension/client/src/extension.ts
Normal file
|
|
@ -0,0 +1,100 @@
|
||||||
|
import {
|
||||||
|
LanguageClient,
|
||||||
|
LanguageClientOptions,
|
||||||
|
ServerOptions,
|
||||||
|
TransportKind,
|
||||||
|
} from 'vscode-languageclient/node'
|
||||||
|
import * as vscode from 'vscode'
|
||||||
|
|
||||||
|
export function activate(context: vscode.ExtensionContext) {
|
||||||
|
const serverModule = context.asAbsolutePath('server/dist/server.js')
|
||||||
|
|
||||||
|
const serverOptions: ServerOptions = {
|
||||||
|
run: { module: serverModule, transport: TransportKind.ipc },
|
||||||
|
debug: { module: serverModule, transport: TransportKind.ipc },
|
||||||
|
}
|
||||||
|
|
||||||
|
const clientOptions: LanguageClientOptions = {
|
||||||
|
documentSelector: [{ scheme: 'file', language: 'shrimp' }],
|
||||||
|
}
|
||||||
|
|
||||||
|
const client = new LanguageClient(
|
||||||
|
'shrimpLanguageServer',
|
||||||
|
'Shrimp Language Server',
|
||||||
|
serverOptions,
|
||||||
|
clientOptions
|
||||||
|
)
|
||||||
|
|
||||||
|
client.start()
|
||||||
|
context.subscriptions.push(client)
|
||||||
|
|
||||||
|
// Command: Show Parse Tree
|
||||||
|
context.subscriptions.push(
|
||||||
|
vscode.commands.registerCommand('shrimp.showParseTree', async () => {
|
||||||
|
const editor = vscode.window.activeTextEditor
|
||||||
|
if (!editor || editor.document.languageId !== 'shrimp') {
|
||||||
|
vscode.window.showErrorMessage('No active Shrimp file')
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await client.sendRequest<string>('shrimp/parseTree', {
|
||||||
|
uri: editor.document.uri.toString(),
|
||||||
|
})
|
||||||
|
|
||||||
|
const doc = await vscode.workspace.openTextDocument({
|
||||||
|
content: result,
|
||||||
|
language: 'text',
|
||||||
|
})
|
||||||
|
await vscode.window.showTextDocument(doc, { preview: false })
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
// Command: Show Bytecode
|
||||||
|
context.subscriptions.push(
|
||||||
|
vscode.commands.registerCommand('shrimp.showBytecode', async () => {
|
||||||
|
const editor = vscode.window.activeTextEditor
|
||||||
|
if (!editor || editor.document.languageId !== 'shrimp') {
|
||||||
|
vscode.window.showErrorMessage('No active Shrimp file')
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await client.sendRequest<string>('shrimp/bytecode', {
|
||||||
|
uri: editor.document.uri.toString(),
|
||||||
|
})
|
||||||
|
|
||||||
|
const doc = await vscode.workspace.openTextDocument({
|
||||||
|
content: result,
|
||||||
|
language: 'text',
|
||||||
|
})
|
||||||
|
await vscode.window.showTextDocument(doc, { preview: false })
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
// Command: Run File
|
||||||
|
context.subscriptions.push(
|
||||||
|
vscode.commands.registerCommand('shrimp.run', async () => {
|
||||||
|
const editor = vscode.window.activeTextEditor
|
||||||
|
if (!editor || editor.document.languageId !== 'shrimp') {
|
||||||
|
vscode.window.showErrorMessage('No active Shrimp file')
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Auto-save before running
|
||||||
|
await editor.document.save()
|
||||||
|
|
||||||
|
// Get binary path from settings
|
||||||
|
const config = vscode.workspace.getConfiguration('shrimp')
|
||||||
|
const binaryPath = config.get<string>('binaryPath', 'shrimp')
|
||||||
|
|
||||||
|
// Get the file path
|
||||||
|
const filePath = editor.document.uri.fsPath
|
||||||
|
|
||||||
|
// Create or show terminal
|
||||||
|
const terminal = vscode.window.createTerminal('Shrimp')
|
||||||
|
terminal.show()
|
||||||
|
terminal.sendText(`${binaryPath} "${filePath}"`)
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function deactivate() {}
|
||||||
25
vscode-extension/example.shrimp
Normal file
25
vscode-extension/example.shrimp
Normal file
|
|
@ -0,0 +1,25 @@
|
||||||
|
# This just has some stuff I use to make sure the extension is working!
|
||||||
|
|
||||||
|
like-a-function = do x y z:
|
||||||
|
echo 'This is a function with parameters: $x, $y, $z'
|
||||||
|
end
|
||||||
|
|
||||||
|
value = if true:
|
||||||
|
'This is true!'
|
||||||
|
else:
|
||||||
|
'This is false!'
|
||||||
|
end
|
||||||
|
|
||||||
|
echo 'value is $(value)'
|
||||||
|
|
||||||
|
html lang=en do:
|
||||||
|
head do:
|
||||||
|
meta charset='UTF-8'
|
||||||
|
meta name='viewport' content='width=device-width, initial-scale=1.0'
|
||||||
|
end
|
||||||
|
|
||||||
|
body do:
|
||||||
|
h1 'Hello, World!'
|
||||||
|
p 'This is a sample HTML generated by the extension.'
|
||||||
|
end
|
||||||
|
end
|
||||||
BIN
vscode-extension/icon.png
Normal file
BIN
vscode-extension/icon.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 270 KiB |
28
vscode-extension/language-configuration.json
Normal file
28
vscode-extension/language-configuration.json
Normal file
|
|
@ -0,0 +1,28 @@
|
||||||
|
{
|
||||||
|
"comments": {
|
||||||
|
"lineComment": {
|
||||||
|
"comment": "#"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"brackets": [
|
||||||
|
["(", ")"],
|
||||||
|
["[", "]"]
|
||||||
|
],
|
||||||
|
"autoClosingPairs": [
|
||||||
|
{ "open": "(", "close": ")" },
|
||||||
|
{ "open": "[", "close": "]" },
|
||||||
|
{ "open": "'", "close": "'", "notIn": ["string"] },
|
||||||
|
{ "open": "\"", "close": "\"", "notIn": ["string"] }
|
||||||
|
],
|
||||||
|
"surroundingPairs": [
|
||||||
|
["(", ")"],
|
||||||
|
["[", "]"],
|
||||||
|
["'", "'"],
|
||||||
|
["\"", "\""]
|
||||||
|
],
|
||||||
|
"wordPattern": "([a-z][a-z0-9-]*)|(-?\\d+\\.?\\d*)",
|
||||||
|
"indentationRules": {
|
||||||
|
"increaseIndentPattern": ":\\s*$",
|
||||||
|
"decreaseIndentPattern": "^\\s*(end|else)\\b"
|
||||||
|
}
|
||||||
|
}
|
||||||
97
vscode-extension/package.json
Normal file
97
vscode-extension/package.json
Normal file
|
|
@ -0,0 +1,97 @@
|
||||||
|
{
|
||||||
|
"name": "shrimp",
|
||||||
|
"version": "0.0.1",
|
||||||
|
"main": "./client/dist/extension.js",
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/vscode": "^1.105.0",
|
||||||
|
"@types/node": "22.x",
|
||||||
|
"typescript": "^5.9.3"
|
||||||
|
},
|
||||||
|
"categories": [
|
||||||
|
"Programming Languages"
|
||||||
|
],
|
||||||
|
"contributes": {
|
||||||
|
"languages": [
|
||||||
|
{
|
||||||
|
"id": "shrimp",
|
||||||
|
"aliases": [
|
||||||
|
"Shrimp",
|
||||||
|
"shrimp"
|
||||||
|
],
|
||||||
|
"extensions": [
|
||||||
|
".shrimp"
|
||||||
|
],
|
||||||
|
"configuration": "./language-configuration.json"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"configurationDefaults": {
|
||||||
|
"[shrimp]": {
|
||||||
|
"editor.semanticHighlighting.enabled": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"configuration": {
|
||||||
|
"title": "Shrimp",
|
||||||
|
"properties": {
|
||||||
|
"shrimp.binaryPath": {
|
||||||
|
"type": "string",
|
||||||
|
"default": "shrimp",
|
||||||
|
"description": "Path to the shrimp binary"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"commands": [
|
||||||
|
{
|
||||||
|
"command": "shrimp.showParseTree",
|
||||||
|
"title": "Shrimp: Show Parse Tree"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"command": "shrimp.showBytecode",
|
||||||
|
"title": "Shrimp: Show Bytecode"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"command": "shrimp.run",
|
||||||
|
"title": "Shrimp: Run File"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"keybindings": [
|
||||||
|
{
|
||||||
|
"command": "shrimp.showParseTree",
|
||||||
|
"key": "alt+k alt+i",
|
||||||
|
"when": "editorLangId == shrimp"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"command": "shrimp.showBytecode",
|
||||||
|
"key": "alt+k alt+,",
|
||||||
|
"when": "editorLangId == shrimp"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"command": "shrimp.run",
|
||||||
|
"key": "cmd+r",
|
||||||
|
"when": "editorLangId == shrimp"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"description": "Language support for Shrimp shell scripting language",
|
||||||
|
"displayName": "Shrimp",
|
||||||
|
"engines": {
|
||||||
|
"vscode": "^1.105.0"
|
||||||
|
},
|
||||||
|
"icon": "icon.png",
|
||||||
|
"publisher": "shrimp-lang",
|
||||||
|
"scripts": {
|
||||||
|
"vscode:prepublish": "bun run package",
|
||||||
|
"generate-prelude-metadata": "bun scripts/generate-prelude-metadata.ts",
|
||||||
|
"compile": "bun run generate-prelude-metadata && bun run compile:client && bun run compile:server",
|
||||||
|
"compile:client": "bun build client/src/extension.ts --outdir client/dist --target node --format cjs --external vscode",
|
||||||
|
"compile:server": "bun build server/src/server.ts --outdir server/dist --target node --format cjs",
|
||||||
|
"watch": "bun run compile:client --watch & bun run compile:server --watch",
|
||||||
|
"package": "bun run generate-prelude-metadata && bun run compile:client --minify && bun run compile:server --minify",
|
||||||
|
"check-types": "tsc --noEmit",
|
||||||
|
"build-and-install": "bun run package && bunx @vscode/vsce package --allow-missing-repository && code --install-extension shrimp-*.vsix"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"vscode-languageclient": "^9.0.1",
|
||||||
|
"vscode-languageserver": "^9.0.1",
|
||||||
|
"vscode-languageserver-textdocument": "^1.0.12"
|
||||||
|
}
|
||||||
|
}
|
||||||
117
vscode-extension/scripts/generate-prelude-metadata.ts
Normal file
117
vscode-extension/scripts/generate-prelude-metadata.ts
Normal file
|
|
@ -0,0 +1,117 @@
|
||||||
|
#!/usr/bin/env bun
|
||||||
|
/**
|
||||||
|
* Generates prelude metadata for the VSCode extension.
|
||||||
|
* - Prelude names (for parser scope tracking)
|
||||||
|
* - Function signatures (for autocomplete)
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { writeFileSync } from 'fs'
|
||||||
|
import { join } from 'path'
|
||||||
|
import { globals } from '../../src/prelude'
|
||||||
|
|
||||||
|
// Extract parameter names from a function
|
||||||
|
const extractParams = (fn: Function): string[] => {
|
||||||
|
const fnStr = fn.toString()
|
||||||
|
const match = fnStr.match(/\(([^)]*)\)/)
|
||||||
|
if (!match) return []
|
||||||
|
|
||||||
|
const paramsStr = match[1]!.trim()
|
||||||
|
if (!paramsStr) return []
|
||||||
|
|
||||||
|
// Split by comma, but be careful of default values with commas
|
||||||
|
const params: string[] = []
|
||||||
|
let current = ''
|
||||||
|
let inString = false
|
||||||
|
let stringChar = ''
|
||||||
|
|
||||||
|
for (let i = 0; i < paramsStr.length; i++) {
|
||||||
|
const char = paramsStr[i]
|
||||||
|
if ((char === '"' || char === "'") && (i === 0 || paramsStr[i - 1] !== '\\')) {
|
||||||
|
if (!inString) {
|
||||||
|
inString = true
|
||||||
|
stringChar = char
|
||||||
|
} else if (char === stringChar) {
|
||||||
|
inString = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (char === ',' && !inString) {
|
||||||
|
params.push(current.trim())
|
||||||
|
current = ''
|
||||||
|
} else {
|
||||||
|
current += char
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (current.trim()) params.push(current.trim())
|
||||||
|
|
||||||
|
return params
|
||||||
|
.map((p) => p.split(/[=:]/)[0]!.trim()) // Handle defaults and types
|
||||||
|
.filter((p) => p && p !== 'this')
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate metadata for a module
|
||||||
|
const generateModuleMetadata = (module: Record<string, any>) => {
|
||||||
|
const metadata: Record<string, { params: string[] }> = {}
|
||||||
|
|
||||||
|
for (const [name, value] of Object.entries(module)) {
|
||||||
|
if (typeof value === 'function') {
|
||||||
|
metadata[name] = { params: extractParams(value) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return metadata
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate names list
|
||||||
|
const names = Object.keys(globals).sort()
|
||||||
|
|
||||||
|
// Generate module metadata
|
||||||
|
const moduleMetadata: Record<string, any> = {}
|
||||||
|
for (const [name, value] of Object.entries(globals)) {
|
||||||
|
if (typeof value === 'object' && value !== null && name !== '$') {
|
||||||
|
moduleMetadata[name] = generateModuleMetadata(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate dollar metadata
|
||||||
|
const dollarMetadata: Record<string, { params: string[] }> = {}
|
||||||
|
if (globals.$ && typeof globals.$ === 'object') {
|
||||||
|
for (const key of Object.keys(globals.$)) {
|
||||||
|
dollarMetadata[key] = { params: [] }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write prelude-names.ts
|
||||||
|
const namesOutput = `// Auto-generated by scripts/generate-prelude-metadata.ts
|
||||||
|
// Do not edit manually - run 'bun run generate-prelude-metadata' to regenerate
|
||||||
|
|
||||||
|
export const PRELUDE_NAMES = ${JSON.stringify(names, null, 2)} as const
|
||||||
|
`
|
||||||
|
|
||||||
|
const namesPath = join(import.meta.dir, '../server/src/metadata/prelude-names.ts')
|
||||||
|
writeFileSync(namesPath, namesOutput)
|
||||||
|
|
||||||
|
// Write prelude-completions.ts
|
||||||
|
const completionsOutput = `// Auto-generated by scripts/generate-prelude-metadata.ts
|
||||||
|
// Do not edit manually - run 'bun run generate-prelude-metadata' to regenerate
|
||||||
|
|
||||||
|
export type CompletionMetadata = {
|
||||||
|
params: string[]
|
||||||
|
description?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export const completions = {
|
||||||
|
modules: ${JSON.stringify(moduleMetadata, null, 2)},
|
||||||
|
dollar: ${JSON.stringify(dollarMetadata, null, 2)},
|
||||||
|
} as const
|
||||||
|
`
|
||||||
|
|
||||||
|
const completionsPath = join(import.meta.dir, '../server/src/metadata/prelude-completions.ts')
|
||||||
|
writeFileSync(completionsPath, completionsOutput)
|
||||||
|
|
||||||
|
console.log(`✓ Generated ${names.length} prelude names to server/src/metadata/prelude-names.ts`)
|
||||||
|
console.log(
|
||||||
|
`✓ Generated completions for ${
|
||||||
|
Object.keys(moduleMetadata).length
|
||||||
|
} modules to server/src/metadata/prelude-completions.ts`
|
||||||
|
)
|
||||||
52
vscode-extension/server/src/completion/completionProvider.ts
Normal file
52
vscode-extension/server/src/completion/completionProvider.ts
Normal file
|
|
@ -0,0 +1,52 @@
|
||||||
|
import { CompletionItem, CompletionItemKind } from 'vscode-languageserver/node'
|
||||||
|
import { TextDocument } from 'vscode-languageserver-textdocument'
|
||||||
|
import { completions } from '../metadata/prelude-completions'
|
||||||
|
import { analyzeCompletionContext } from './contextAnalyzer'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Provides context-aware completions for Shrimp code.
|
||||||
|
* Returns module function completions (dict.*, list.*, str.*) or dollar property
|
||||||
|
* completions ($.*) based on the cursor position.
|
||||||
|
*/
|
||||||
|
export const provideCompletions = (
|
||||||
|
document: TextDocument,
|
||||||
|
position: { line: number; character: number }
|
||||||
|
): CompletionItem[] => {
|
||||||
|
const context = analyzeCompletionContext(document, position)
|
||||||
|
|
||||||
|
if (context.type === 'module') {
|
||||||
|
return buildModuleCompletions(context.moduleName)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (context.type === 'dollar') {
|
||||||
|
return buildDollarCompletions()
|
||||||
|
}
|
||||||
|
|
||||||
|
return [] // No completions for other contexts yet
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Builds completion items for module functions (dict.*, list.*, str.*).
|
||||||
|
*/
|
||||||
|
const buildModuleCompletions = (moduleName: string): CompletionItem[] => {
|
||||||
|
const functions = completions.modules[moduleName as keyof typeof completions.modules]
|
||||||
|
if (!functions) return []
|
||||||
|
|
||||||
|
return Object.entries(functions).map(([name, meta]) => ({
|
||||||
|
label: name,
|
||||||
|
kind: CompletionItemKind.Method,
|
||||||
|
detail: `(${meta.params.join(', ')})`,
|
||||||
|
insertText: name,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Builds completion items for dollar properties ($.*).
|
||||||
|
*/
|
||||||
|
const buildDollarCompletions = (): CompletionItem[] => {
|
||||||
|
return Object.entries(completions.dollar).map(([name, meta]) => ({
|
||||||
|
label: name,
|
||||||
|
kind: CompletionItemKind.Property,
|
||||||
|
insertText: name,
|
||||||
|
}))
|
||||||
|
}
|
||||||
66
vscode-extension/server/src/completion/contextAnalyzer.ts
Normal file
66
vscode-extension/server/src/completion/contextAnalyzer.ts
Normal file
|
|
@ -0,0 +1,66 @@
|
||||||
|
import { TextDocument } from 'vscode-languageserver-textdocument'
|
||||||
|
import { SyntaxNode } from '@lezer/common'
|
||||||
|
import { parser } from '../../../../src/parser/shrimp'
|
||||||
|
import * as Terms from '../../../../src/parser/shrimp.terms'
|
||||||
|
|
||||||
|
export type CompletionContext =
|
||||||
|
| { type: 'module'; moduleName: string }
|
||||||
|
| { type: 'dollar' }
|
||||||
|
| { type: 'none' }
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Analyzes the document at the given position to determine what kind of
|
||||||
|
* completion context we're in (module member access, dollar property, or none).
|
||||||
|
*/
|
||||||
|
export const analyzeCompletionContext = (
|
||||||
|
document: TextDocument,
|
||||||
|
position: { line: number; character: number }
|
||||||
|
): CompletionContext => {
|
||||||
|
const offset = document.offsetAt(position)
|
||||||
|
const text = document.getText()
|
||||||
|
const tree = parser.parse(text)
|
||||||
|
|
||||||
|
// Find node at cursor - could be DotGet or Identifier inside DotGet
|
||||||
|
const node = tree.resolveInner(offset, -1)
|
||||||
|
|
||||||
|
console.log(`🔍 Node at cursor: ${node.name} (type: ${node.type.id})`)
|
||||||
|
console.log(`🔍 Parent: ${node.parent?.name} (type: ${node.parent?.type.id})`)
|
||||||
|
console.log(`🔍 Node text: "${text.slice(node.from, node.to)}"`)
|
||||||
|
|
||||||
|
const SUPPORTED_MODULES = ['dict', 'list', 'str', 'math', 'fs', 'json', 'load']
|
||||||
|
|
||||||
|
// Case 1: Incomplete DotGet (dict. or $.)
|
||||||
|
// resolveInner returns DotGet node directly
|
||||||
|
if (node.type.id === Terms.DotGet) {
|
||||||
|
const leftSide = extractLeftSide(node, text)
|
||||||
|
console.log(`✅ Case 1: DotGet found, left side: "${leftSide}"`)
|
||||||
|
if (leftSide === '$') return { type: 'dollar' }
|
||||||
|
if (SUPPORTED_MODULES.includes(leftSide)) {
|
||||||
|
return { type: 'module', moduleName: leftSide }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Case 2: Partial identifier (dict.g or $.e)
|
||||||
|
// resolveInner returns Identifier, parent is DotGet
|
||||||
|
if (node.type.id === Terms.Identifier && node.parent?.type.id === Terms.DotGet) {
|
||||||
|
const dotGetNode = node.parent
|
||||||
|
const leftSide = extractLeftSide(dotGetNode, text)
|
||||||
|
console.log(`✅ Case 2: Identifier in DotGet found, left side: "${leftSide}"`)
|
||||||
|
if (leftSide === '$') return { type: 'dollar' }
|
||||||
|
if (SUPPORTED_MODULES.includes(leftSide)) {
|
||||||
|
return { type: 'module', moduleName: leftSide }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`❌ No matching context found`)
|
||||||
|
return { type: 'none' }
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extracts the text of the left side of a DotGet node (the part before the dot).
|
||||||
|
*/
|
||||||
|
const extractLeftSide = (dotGetNode: SyntaxNode, text: string): string => {
|
||||||
|
const firstChild = dotGetNode.firstChild
|
||||||
|
if (!firstChild) return ''
|
||||||
|
return text.slice(firstChild.from, firstChild.to)
|
||||||
|
}
|
||||||
91
vscode-extension/server/src/diagnostics.ts
Normal file
91
vscode-extension/server/src/diagnostics.ts
Normal file
|
|
@ -0,0 +1,91 @@
|
||||||
|
import { TextDocument, Position } from 'vscode-languageserver-textdocument'
|
||||||
|
import { Diagnostic, DiagnosticSeverity } from 'vscode-languageserver/node'
|
||||||
|
import { Tree } from '@lezer/common'
|
||||||
|
import { Compiler } from '../../../src/compiler/compiler'
|
||||||
|
import { CompilerError } from '../../../src/compiler/compilerError'
|
||||||
|
|
||||||
|
export const buildDiagnostics = (textDocument: TextDocument, tree: Tree): Diagnostic[] => {
|
||||||
|
const text = textDocument.getText()
|
||||||
|
const diagnostics = getParseErrors(textDocument, tree)
|
||||||
|
|
||||||
|
if (diagnostics.length > 0) {
|
||||||
|
return diagnostics
|
||||||
|
}
|
||||||
|
const diagnostic = getCompilerError(text)
|
||||||
|
if (diagnostic) return [diagnostic]
|
||||||
|
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
|
||||||
|
const getCompilerError = (text: string): Diagnostic | undefined => {
|
||||||
|
try {
|
||||||
|
new Compiler(text)
|
||||||
|
} catch (e) {
|
||||||
|
if (!(e instanceof CompilerError)) {
|
||||||
|
return unknownDiagnostic(getErrorMessage(e))
|
||||||
|
}
|
||||||
|
|
||||||
|
const lineInfo = e.lineAtPosition(text)!
|
||||||
|
const cause = e.cause ? ` Cause: ${e.cause}` : ''
|
||||||
|
const message = e.message
|
||||||
|
|
||||||
|
if (!lineInfo) {
|
||||||
|
return unknownDiagnostic(message + cause)
|
||||||
|
}
|
||||||
|
|
||||||
|
const diagnostic: Diagnostic = {
|
||||||
|
severity: DiagnosticSeverity.Error,
|
||||||
|
range: {
|
||||||
|
start: { line: lineInfo.lineNumber, character: lineInfo.columnStart },
|
||||||
|
end: { line: lineInfo.lineNumber, character: lineInfo.columnEnd },
|
||||||
|
},
|
||||||
|
message: `Compiler error: ${message}${cause}`,
|
||||||
|
source: 'shrimp',
|
||||||
|
}
|
||||||
|
return diagnostic
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const unknownDiagnostic = (message: string): Diagnostic => {
|
||||||
|
const diagnostic: Diagnostic = {
|
||||||
|
severity: DiagnosticSeverity.Error,
|
||||||
|
range: {
|
||||||
|
start: { line: 0, character: 0 },
|
||||||
|
end: { line: -1, character: -1 },
|
||||||
|
},
|
||||||
|
message,
|
||||||
|
source: 'shrimp',
|
||||||
|
}
|
||||||
|
return diagnostic
|
||||||
|
}
|
||||||
|
|
||||||
|
const getParseErrors = (textDocument: TextDocument, tree: Tree): Diagnostic[] => {
|
||||||
|
const ranges: { start: Position; end: Position }[] = []
|
||||||
|
tree.iterate({
|
||||||
|
enter(n) {
|
||||||
|
if (n.type.isError) {
|
||||||
|
ranges.push({
|
||||||
|
start: textDocument.positionAt(n.from),
|
||||||
|
end: textDocument.positionAt(n.to),
|
||||||
|
})
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
return ranges.map((range) => {
|
||||||
|
return {
|
||||||
|
range,
|
||||||
|
severity: DiagnosticSeverity.Error,
|
||||||
|
message: 'Parse error: Invalid syntax',
|
||||||
|
source: 'shrimp',
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const getErrorMessage = (error: unknown): string => {
|
||||||
|
if (error instanceof Error) {
|
||||||
|
return error.message
|
||||||
|
}
|
||||||
|
return String(error)
|
||||||
|
}
|
||||||
151
vscode-extension/server/src/editorScopeAnalyzer.test.ts
Normal file
151
vscode-extension/server/src/editorScopeAnalyzer.test.ts
Normal file
|
|
@ -0,0 +1,151 @@
|
||||||
|
import { test, expect, describe } from 'bun:test'
|
||||||
|
import { EditorScopeAnalyzer } from './editorScopeAnalyzer'
|
||||||
|
import { TextDocument } from 'vscode-languageserver-textdocument'
|
||||||
|
import { parser } from '../../../src/parser/shrimp'
|
||||||
|
import * as Terms from '../../../src/parser/shrimp.terms'
|
||||||
|
|
||||||
|
describe('EditorScopeAnalyzer', () => {
|
||||||
|
test('top-level assignment is in scope', () => {
|
||||||
|
const code = 'x = 5\necho x'
|
||||||
|
const { tree, tracker } = parseAndGetScope(code)
|
||||||
|
|
||||||
|
// Find the 'x' identifier in 'echo x'
|
||||||
|
const identifiers: any[] = []
|
||||||
|
tree.topNode.cursor().iterate((node: any) => {
|
||||||
|
if (node.type.id === Terms.Identifier) {
|
||||||
|
identifiers.push(node.node)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
// Second identifier should be the 'x' in 'echo x'
|
||||||
|
const xInEcho = identifiers[1]
|
||||||
|
expect(xInEcho).toBeDefined()
|
||||||
|
expect(tracker.isInScope('x', xInEcho)).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('undeclared variable is not in scope', () => {
|
||||||
|
const code = 'echo x'
|
||||||
|
const { tree, tracker } = parseAndGetScope(code)
|
||||||
|
|
||||||
|
// Find the 'x' identifier
|
||||||
|
let xNode: any = null
|
||||||
|
tree.topNode.cursor().iterate((node: any) => {
|
||||||
|
if (node.type.id === Terms.Identifier) {
|
||||||
|
xNode = node.node
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(xNode).toBeDefined()
|
||||||
|
expect(tracker.isInScope('x', xNode)).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('function parameter is in scope inside function', () => {
|
||||||
|
const code = `greet = do name:
|
||||||
|
echo name
|
||||||
|
end`
|
||||||
|
const { tree, tracker } = parseAndGetScope(code)
|
||||||
|
|
||||||
|
// Find all identifiers
|
||||||
|
const identifiers: any[] = []
|
||||||
|
tree.topNode.cursor().iterate((node: any) => {
|
||||||
|
if (node.type.id === Terms.Identifier) {
|
||||||
|
identifiers.push(node.node)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
// Find the 'name' in 'echo name' (should be last identifier)
|
||||||
|
const nameInEcho = identifiers[identifiers.length - 1]
|
||||||
|
expect(tracker.isInScope('name', nameInEcho)).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('assignment before usage is in scope', () => {
|
||||||
|
const code = `x = 5
|
||||||
|
y = 10
|
||||||
|
echo x y`
|
||||||
|
const { tree, tracker } = parseAndGetScope(code)
|
||||||
|
|
||||||
|
// Find identifiers
|
||||||
|
const identifiers: any[] = []
|
||||||
|
tree.topNode.cursor().iterate((node: any) => {
|
||||||
|
if (node.type.id === Terms.Identifier) {
|
||||||
|
identifiers.push(node.node)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
// Last two identifiers should be 'x' and 'y' in 'echo x y'
|
||||||
|
const xInEcho = identifiers[identifiers.length - 2]
|
||||||
|
const yInEcho = identifiers[identifiers.length - 1]
|
||||||
|
|
||||||
|
expect(tracker.isInScope('x', xInEcho)).toBe(true)
|
||||||
|
expect(tracker.isInScope('y', yInEcho)).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('assignment after usage is not in scope', () => {
|
||||||
|
const code = `echo x
|
||||||
|
x = 5`
|
||||||
|
const { tree, tracker } = parseAndGetScope(code)
|
||||||
|
|
||||||
|
// Find the first 'x' identifier (in echo)
|
||||||
|
let xNode: any = null
|
||||||
|
tree.topNode.cursor().iterate((node: any) => {
|
||||||
|
if (node.type.id === Terms.Identifier && !xNode) {
|
||||||
|
xNode = node.node
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(tracker.isInScope('x', xNode)).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('nested function has access to outer scope', () => {
|
||||||
|
const code = `x = 5
|
||||||
|
greet = do:
|
||||||
|
echo x
|
||||||
|
end`
|
||||||
|
const { tree, tracker } = parseAndGetScope(code)
|
||||||
|
|
||||||
|
// Find all identifiers
|
||||||
|
const identifiers: any[] = []
|
||||||
|
tree.topNode.cursor().iterate((node: any) => {
|
||||||
|
if (node.type.id === Terms.Identifier) {
|
||||||
|
identifiers.push(node.node)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
// Find the 'x' in 'echo x' (should be last identifier)
|
||||||
|
const xInEcho = identifiers[identifiers.length - 1]
|
||||||
|
expect(tracker.isInScope('x', xInEcho)).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('inner function parameter shadows outer variable', () => {
|
||||||
|
const code = `x = 5
|
||||||
|
greet = do x:
|
||||||
|
echo x
|
||||||
|
end`
|
||||||
|
const { tree, tracker } = parseAndGetScope(code)
|
||||||
|
|
||||||
|
// Find all identifiers
|
||||||
|
const identifiers: any[] = []
|
||||||
|
tree.topNode.cursor().iterate((node: any) => {
|
||||||
|
if (node.type.id === Terms.Identifier) {
|
||||||
|
identifiers.push(node.node)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
// The 'x' in 'echo x' should have 'x' in scope (from parameter)
|
||||||
|
const xInEcho = identifiers[identifiers.length - 1]
|
||||||
|
expect(tracker.isInScope('x', xInEcho)).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('the prelude functions are always in scope', () => {
|
||||||
|
const code = `echo "Hello, World!"`
|
||||||
|
const { tree, tracker } = parseAndGetScope(code)
|
||||||
|
expect(tracker.isInScope('echo', tree.topNode)).toBe(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
const parseAndGetScope = (code: string) => {
|
||||||
|
const document = TextDocument.create('test://test.sh', 'shrimp', 1, code)
|
||||||
|
const tree = parser.parse(code)
|
||||||
|
const tracker = new EditorScopeAnalyzer(document)
|
||||||
|
return { document, tree, tracker }
|
||||||
|
}
|
||||||
137
vscode-extension/server/src/editorScopeAnalyzer.ts
Normal file
137
vscode-extension/server/src/editorScopeAnalyzer.ts
Normal file
|
|
@ -0,0 +1,137 @@
|
||||||
|
import { SyntaxNode } from '@lezer/common'
|
||||||
|
import { TextDocument } from 'vscode-languageserver-textdocument'
|
||||||
|
import * as Terms from '../../../src/parser/shrimp.terms'
|
||||||
|
import { PRELUDE_NAMES } from './metadata/prelude-names'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tracks variables in scope at a given position in the parse tree.
|
||||||
|
* Used to distinguish identifiers (in scope) from words (not in scope).
|
||||||
|
*/
|
||||||
|
export class EditorScopeAnalyzer {
|
||||||
|
private document: TextDocument
|
||||||
|
private scopeCache = new Map<number, Set<string>>()
|
||||||
|
|
||||||
|
constructor(document: TextDocument) {
|
||||||
|
this.document = document
|
||||||
|
this.scopeCache.set(0, new Set(PRELUDE_NAMES))
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a name is in scope at the given node's position.
|
||||||
|
*/
|
||||||
|
isInScope(name: string, node: SyntaxNode): boolean {
|
||||||
|
const scope = this.getScopeAt(node)
|
||||||
|
return scope.has(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all variables in scope at the given node's position.
|
||||||
|
*/
|
||||||
|
private getScopeAt(node: SyntaxNode): Set<string> {
|
||||||
|
const position = node.from
|
||||||
|
|
||||||
|
// Check cache first
|
||||||
|
if (this.scopeCache.has(position)) {
|
||||||
|
return this.scopeCache.get(position)!
|
||||||
|
}
|
||||||
|
|
||||||
|
const scope = new Set<string>()
|
||||||
|
|
||||||
|
// Find all containing function definitions
|
||||||
|
const containingFunctions = this.findContainingFunctions(node)
|
||||||
|
|
||||||
|
// Collect scope from each containing function (inner to outer)
|
||||||
|
for (const fnNode of containingFunctions) {
|
||||||
|
this.collectParams(fnNode, scope)
|
||||||
|
this.collectAssignments(fnNode, position, scope)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Collect top-level assignments
|
||||||
|
const root = this.getRoot(node)
|
||||||
|
this.collectAssignments(root, position, scope)
|
||||||
|
|
||||||
|
this.scopeCache.set(position, scope)
|
||||||
|
return scope
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find all function definitions that contain the given node.
|
||||||
|
*/
|
||||||
|
private findContainingFunctions(node: SyntaxNode): SyntaxNode[] {
|
||||||
|
const functions: SyntaxNode[] = []
|
||||||
|
let current = node.parent
|
||||||
|
|
||||||
|
while (current) {
|
||||||
|
if (current.type.id === Terms.FunctionDef) {
|
||||||
|
functions.unshift(current) // Add to beginning for outer-to-inner order
|
||||||
|
}
|
||||||
|
current = current.parent
|
||||||
|
}
|
||||||
|
|
||||||
|
return functions
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the root node of the tree.
|
||||||
|
*/
|
||||||
|
private getRoot(node: SyntaxNode): SyntaxNode {
|
||||||
|
let current = node
|
||||||
|
while (current.parent) {
|
||||||
|
current = current.parent
|
||||||
|
}
|
||||||
|
return current
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Collect parameter names from a function definition.
|
||||||
|
*/
|
||||||
|
private collectParams(fnNode: SyntaxNode, scope: Set<string>) {
|
||||||
|
let child = fnNode.firstChild
|
||||||
|
while (child) {
|
||||||
|
if (child.type.id === Terms.Params) {
|
||||||
|
let param = child.firstChild
|
||||||
|
while (param) {
|
||||||
|
if (param.type.id === Terms.Identifier) {
|
||||||
|
const text = this.document.getText({
|
||||||
|
start: this.document.positionAt(param.from),
|
||||||
|
end: this.document.positionAt(param.to),
|
||||||
|
})
|
||||||
|
scope.add(text)
|
||||||
|
}
|
||||||
|
param = param.nextSibling
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
child = child.nextSibling
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Collect assignment names from a scope node that occur before the given position.
|
||||||
|
*/
|
||||||
|
private collectAssignments(scopeNode: SyntaxNode, beforePosition: number, scope: Set<string>) {
|
||||||
|
const cursor = scopeNode.cursor()
|
||||||
|
|
||||||
|
cursor.iterate((node) => {
|
||||||
|
// Stop if we've passed the position we're checking
|
||||||
|
if (node.from >= beforePosition) return false
|
||||||
|
|
||||||
|
if (node.type.id === Terms.Assign) {
|
||||||
|
const assignNode = node.node
|
||||||
|
const child = assignNode.firstChild
|
||||||
|
if (child?.type.id === Terms.AssignableIdentifier) {
|
||||||
|
const text = this.document.getText({
|
||||||
|
start: this.document.positionAt(child.from),
|
||||||
|
end: this.document.positionAt(child.to),
|
||||||
|
})
|
||||||
|
scope.add(text)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Don't descend into nested functions unless it's the current scope
|
||||||
|
if (node.type.id === Terms.FunctionDef && node.node !== scopeNode) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
796
vscode-extension/server/src/metadata/prelude-completions.ts
Normal file
796
vscode-extension/server/src/metadata/prelude-completions.ts
Normal file
|
|
@ -0,0 +1,796 @@
|
||||||
|
// Auto-generated by scripts/generate-prelude-metadata.ts
|
||||||
|
// Do not edit manually - run 'bun run generate-prelude-metadata' to regenerate
|
||||||
|
|
||||||
|
export type CompletionMetadata = {
|
||||||
|
params: string[]
|
||||||
|
description?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export const completions = {
|
||||||
|
modules: {
|
||||||
|
"date": {
|
||||||
|
"now": {
|
||||||
|
"params": []
|
||||||
|
},
|
||||||
|
"year": {
|
||||||
|
"params": [
|
||||||
|
"time"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"month": {
|
||||||
|
"params": [
|
||||||
|
"time"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"date": {
|
||||||
|
"params": [
|
||||||
|
"time"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hour": {
|
||||||
|
"params": [
|
||||||
|
"time"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"minute": {
|
||||||
|
"params": [
|
||||||
|
"time"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"second": {
|
||||||
|
"params": [
|
||||||
|
"time"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"ms": {
|
||||||
|
"params": [
|
||||||
|
"time"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"new": {
|
||||||
|
"params": [
|
||||||
|
"year",
|
||||||
|
"month",
|
||||||
|
"day",
|
||||||
|
"hour",
|
||||||
|
"minute",
|
||||||
|
"second",
|
||||||
|
"ms"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"dict": {
|
||||||
|
"keys": {
|
||||||
|
"params": [
|
||||||
|
"dict"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"values": {
|
||||||
|
"params": [
|
||||||
|
"dict"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"entries": {
|
||||||
|
"params": [
|
||||||
|
"dict"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"has?": {
|
||||||
|
"params": [
|
||||||
|
"dict",
|
||||||
|
"key"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"get": {
|
||||||
|
"params": [
|
||||||
|
"dict",
|
||||||
|
"key",
|
||||||
|
"defaultValue"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"set": {
|
||||||
|
"params": [
|
||||||
|
"dict",
|
||||||
|
"key",
|
||||||
|
"value"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"merge": {
|
||||||
|
"params": [
|
||||||
|
"...dicts"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"empty?": {
|
||||||
|
"params": [
|
||||||
|
"dict"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"map": {
|
||||||
|
"params": [
|
||||||
|
"dict",
|
||||||
|
"cb"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"filter": {
|
||||||
|
"params": [
|
||||||
|
"dict",
|
||||||
|
"cb"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"from-entries": {
|
||||||
|
"params": [
|
||||||
|
"entries"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"fs": {
|
||||||
|
"ls": {
|
||||||
|
"params": [
|
||||||
|
"path"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"mkdir": {
|
||||||
|
"params": [
|
||||||
|
"path"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"rmdir": {
|
||||||
|
"params": [
|
||||||
|
"path"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"pwd": {
|
||||||
|
"params": []
|
||||||
|
},
|
||||||
|
"cd": {
|
||||||
|
"params": [
|
||||||
|
"path"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"read": {
|
||||||
|
"params": [
|
||||||
|
"path"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"cat": {
|
||||||
|
"params": [
|
||||||
|
"path"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"read-bytes": {
|
||||||
|
"params": [
|
||||||
|
"path"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"write": {
|
||||||
|
"params": [
|
||||||
|
"path",
|
||||||
|
"content"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"append": {
|
||||||
|
"params": [
|
||||||
|
"path",
|
||||||
|
"content"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"delete": {
|
||||||
|
"params": [
|
||||||
|
"path"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"rm": {
|
||||||
|
"params": [
|
||||||
|
"path"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"copy": {
|
||||||
|
"params": [
|
||||||
|
"from",
|
||||||
|
"to"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"move": {
|
||||||
|
"params": [
|
||||||
|
"from",
|
||||||
|
"to"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"mv": {
|
||||||
|
"params": [
|
||||||
|
"from",
|
||||||
|
"to"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"basename": {
|
||||||
|
"params": [
|
||||||
|
"path"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"dirname": {
|
||||||
|
"params": [
|
||||||
|
"path"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"extname": {
|
||||||
|
"params": [
|
||||||
|
"path"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"join": {
|
||||||
|
"params": [
|
||||||
|
"...paths"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"resolve": {
|
||||||
|
"params": [
|
||||||
|
"...paths"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"stat": {
|
||||||
|
"params": [
|
||||||
|
"path"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"exists?": {
|
||||||
|
"params": [
|
||||||
|
"path"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"file?": {
|
||||||
|
"params": [
|
||||||
|
"path"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"dir?": {
|
||||||
|
"params": [
|
||||||
|
"path"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"symlink?": {
|
||||||
|
"params": [
|
||||||
|
"path"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"exec?": {
|
||||||
|
"params": [
|
||||||
|
"path"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"size": {
|
||||||
|
"params": [
|
||||||
|
"path"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"chmod": {
|
||||||
|
"params": [
|
||||||
|
"path",
|
||||||
|
"mode"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"symlink": {
|
||||||
|
"params": [
|
||||||
|
"target",
|
||||||
|
"path"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"readlink": {
|
||||||
|
"params": [
|
||||||
|
"path"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"glob": {
|
||||||
|
"params": [
|
||||||
|
"pattern"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"watch": {
|
||||||
|
"params": [
|
||||||
|
"path",
|
||||||
|
"callback"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"cp": {
|
||||||
|
"params": [
|
||||||
|
"from",
|
||||||
|
"to"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"json": {
|
||||||
|
"encode": {
|
||||||
|
"params": [
|
||||||
|
"s"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"decode": {
|
||||||
|
"params": [
|
||||||
|
"s"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"parse": {
|
||||||
|
"params": [
|
||||||
|
"s"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"stringify": {
|
||||||
|
"params": [
|
||||||
|
"s"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"list": {
|
||||||
|
"slice": {
|
||||||
|
"params": [
|
||||||
|
"list",
|
||||||
|
"start",
|
||||||
|
"end"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"map": {
|
||||||
|
"params": [
|
||||||
|
"list",
|
||||||
|
"cb"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"filter": {
|
||||||
|
"params": [
|
||||||
|
"list",
|
||||||
|
"cb"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"reject": {
|
||||||
|
"params": [
|
||||||
|
"list",
|
||||||
|
"cb"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"reduce": {
|
||||||
|
"params": [
|
||||||
|
"list",
|
||||||
|
"cb",
|
||||||
|
"initial"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"find": {
|
||||||
|
"params": [
|
||||||
|
"list",
|
||||||
|
"cb"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"empty?": {
|
||||||
|
"params": [
|
||||||
|
"list"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"contains?": {
|
||||||
|
"params": [
|
||||||
|
"list",
|
||||||
|
"item"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"includes?": {
|
||||||
|
"params": [
|
||||||
|
"list",
|
||||||
|
"item"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"has?": {
|
||||||
|
"params": [
|
||||||
|
"list",
|
||||||
|
"item"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"any?": {
|
||||||
|
"params": [
|
||||||
|
"list",
|
||||||
|
"cb"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"all?": {
|
||||||
|
"params": [
|
||||||
|
"list",
|
||||||
|
"cb"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"push": {
|
||||||
|
"params": [
|
||||||
|
"list",
|
||||||
|
"item"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"pop": {
|
||||||
|
"params": [
|
||||||
|
"list"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"shift": {
|
||||||
|
"params": [
|
||||||
|
"list"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"unshift": {
|
||||||
|
"params": [
|
||||||
|
"list",
|
||||||
|
"item"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"splice": {
|
||||||
|
"params": [
|
||||||
|
"list",
|
||||||
|
"start",
|
||||||
|
"deleteCount",
|
||||||
|
"...items"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"insert": {
|
||||||
|
"params": [
|
||||||
|
"list",
|
||||||
|
"index",
|
||||||
|
"item"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"reverse": {
|
||||||
|
"params": [
|
||||||
|
"list"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"sort": {
|
||||||
|
"params": [
|
||||||
|
"list",
|
||||||
|
"cb"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"concat": {
|
||||||
|
"params": [
|
||||||
|
"...lists"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"flatten": {
|
||||||
|
"params": [
|
||||||
|
"list",
|
||||||
|
"depth"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"unique": {
|
||||||
|
"params": [
|
||||||
|
"list"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"zip": {
|
||||||
|
"params": [
|
||||||
|
"list1",
|
||||||
|
"list2"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"first": {
|
||||||
|
"params": [
|
||||||
|
"list"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"last": {
|
||||||
|
"params": [
|
||||||
|
"list"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"rest": {
|
||||||
|
"params": [
|
||||||
|
"list"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"take": {
|
||||||
|
"params": [
|
||||||
|
"list",
|
||||||
|
"n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"drop": {
|
||||||
|
"params": [
|
||||||
|
"list",
|
||||||
|
"n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"append": {
|
||||||
|
"params": [
|
||||||
|
"list",
|
||||||
|
"item"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"prepend": {
|
||||||
|
"params": [
|
||||||
|
"list",
|
||||||
|
"item"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"index-of": {
|
||||||
|
"params": [
|
||||||
|
"list",
|
||||||
|
"item"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"sum": {
|
||||||
|
"params": [
|
||||||
|
"list"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"count": {
|
||||||
|
"params": [
|
||||||
|
"list",
|
||||||
|
"cb"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"partition": {
|
||||||
|
"params": [
|
||||||
|
"list",
|
||||||
|
"cb"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"compact": {
|
||||||
|
"params": [
|
||||||
|
"list"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"group-by": {
|
||||||
|
"params": [
|
||||||
|
"list",
|
||||||
|
"cb"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"math": {
|
||||||
|
"abs": {
|
||||||
|
"params": [
|
||||||
|
"n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"floor": {
|
||||||
|
"params": [
|
||||||
|
"n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"ceil": {
|
||||||
|
"params": [
|
||||||
|
"n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"round": {
|
||||||
|
"params": [
|
||||||
|
"n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"min": {
|
||||||
|
"params": [
|
||||||
|
"...nums"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"max": {
|
||||||
|
"params": [
|
||||||
|
"...nums"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"pow": {
|
||||||
|
"params": [
|
||||||
|
"base",
|
||||||
|
"exp"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"sqrt": {
|
||||||
|
"params": [
|
||||||
|
"n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"random": {
|
||||||
|
"params": [
|
||||||
|
"min",
|
||||||
|
"max"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"clamp": {
|
||||||
|
"params": [
|
||||||
|
"n",
|
||||||
|
"min",
|
||||||
|
"max"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"sign": {
|
||||||
|
"params": [
|
||||||
|
"n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"trunc": {
|
||||||
|
"params": [
|
||||||
|
"n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"even?": {
|
||||||
|
"params": [
|
||||||
|
"n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"odd?": {
|
||||||
|
"params": [
|
||||||
|
"n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"positive?": {
|
||||||
|
"params": [
|
||||||
|
"n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"negative?": {
|
||||||
|
"params": [
|
||||||
|
"n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"zero?": {
|
||||||
|
"params": [
|
||||||
|
"n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"str": {
|
||||||
|
"join": {
|
||||||
|
"params": [
|
||||||
|
"arr",
|
||||||
|
"sep"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"split": {
|
||||||
|
"params": [
|
||||||
|
"str",
|
||||||
|
"sep"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"to-upper": {
|
||||||
|
"params": [
|
||||||
|
"str"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"to-lower": {
|
||||||
|
"params": [
|
||||||
|
"str"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"trim": {
|
||||||
|
"params": [
|
||||||
|
"str"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"starts-with?": {
|
||||||
|
"params": [
|
||||||
|
"str",
|
||||||
|
"prefix"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"ends-with?": {
|
||||||
|
"params": [
|
||||||
|
"str",
|
||||||
|
"suffix"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"contains?": {
|
||||||
|
"params": [
|
||||||
|
"str",
|
||||||
|
"substr"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"empty?": {
|
||||||
|
"params": [
|
||||||
|
"str"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"index-of": {
|
||||||
|
"params": [
|
||||||
|
"str",
|
||||||
|
"search"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"last-index-of": {
|
||||||
|
"params": [
|
||||||
|
"str",
|
||||||
|
"search"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"replace": {
|
||||||
|
"params": [
|
||||||
|
"str",
|
||||||
|
"search",
|
||||||
|
"replacement"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"replace-all": {
|
||||||
|
"params": [
|
||||||
|
"str",
|
||||||
|
"search",
|
||||||
|
"replacement"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"slice": {
|
||||||
|
"params": [
|
||||||
|
"str",
|
||||||
|
"start",
|
||||||
|
"end"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"substring": {
|
||||||
|
"params": [
|
||||||
|
"str",
|
||||||
|
"start",
|
||||||
|
"end"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"repeat": {
|
||||||
|
"params": [
|
||||||
|
"str",
|
||||||
|
"count"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"pad-start": {
|
||||||
|
"params": [
|
||||||
|
"str",
|
||||||
|
"length",
|
||||||
|
"pad"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"pad-end": {
|
||||||
|
"params": [
|
||||||
|
"str",
|
||||||
|
"length",
|
||||||
|
"pad"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"capitalize": {
|
||||||
|
"params": [
|
||||||
|
"str"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"titlecase": {
|
||||||
|
"params": [
|
||||||
|
"s"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"lines": {
|
||||||
|
"params": [
|
||||||
|
"str"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"chars": {
|
||||||
|
"params": [
|
||||||
|
"str"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"match": {
|
||||||
|
"params": [
|
||||||
|
"str",
|
||||||
|
"regex"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"test?": {
|
||||||
|
"params": [
|
||||||
|
"str",
|
||||||
|
"regex"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
dollar: {
|
||||||
|
"args": {
|
||||||
|
"params": []
|
||||||
|
},
|
||||||
|
"argv": {
|
||||||
|
"params": []
|
||||||
|
},
|
||||||
|
"env": {
|
||||||
|
"params": []
|
||||||
|
},
|
||||||
|
"pid": {
|
||||||
|
"params": []
|
||||||
|
},
|
||||||
|
"cwd": {
|
||||||
|
"params": []
|
||||||
|
},
|
||||||
|
"script": {
|
||||||
|
"params": []
|
||||||
|
}
|
||||||
|
},
|
||||||
|
} as const
|
||||||
45
vscode-extension/server/src/metadata/prelude-names.ts
Normal file
45
vscode-extension/server/src/metadata/prelude-names.ts
Normal file
|
|
@ -0,0 +1,45 @@
|
||||||
|
// Auto-generated by scripts/generate-prelude-metadata.ts
|
||||||
|
// Do not edit manually - run 'bun run generate-prelude-metadata' to regenerate
|
||||||
|
|
||||||
|
export const PRELUDE_NAMES = [
|
||||||
|
"$",
|
||||||
|
"array?",
|
||||||
|
"at",
|
||||||
|
"bnot",
|
||||||
|
"boolean",
|
||||||
|
"boolean?",
|
||||||
|
"date",
|
||||||
|
"dec",
|
||||||
|
"describe",
|
||||||
|
"dict",
|
||||||
|
"dict?",
|
||||||
|
"each",
|
||||||
|
"echo",
|
||||||
|
"empty?",
|
||||||
|
"exit",
|
||||||
|
"fs",
|
||||||
|
"function?",
|
||||||
|
"identity",
|
||||||
|
"import",
|
||||||
|
"inc",
|
||||||
|
"inspect",
|
||||||
|
"json",
|
||||||
|
"length",
|
||||||
|
"list",
|
||||||
|
"list?",
|
||||||
|
"load",
|
||||||
|
"math",
|
||||||
|
"not",
|
||||||
|
"null?",
|
||||||
|
"number",
|
||||||
|
"number?",
|
||||||
|
"range",
|
||||||
|
"ref",
|
||||||
|
"some?",
|
||||||
|
"str",
|
||||||
|
"string",
|
||||||
|
"string?",
|
||||||
|
"type",
|
||||||
|
"var",
|
||||||
|
"var?"
|
||||||
|
] as const
|
||||||
251
vscode-extension/server/src/semanticTokens.ts
Normal file
251
vscode-extension/server/src/semanticTokens.ts
Normal file
|
|
@ -0,0 +1,251 @@
|
||||||
|
import { parser } from '../../../src/parser/shrimp'
|
||||||
|
import * as Terms from '../../../src/parser/shrimp.terms'
|
||||||
|
import { SyntaxNode, Tree } from '@lezer/common'
|
||||||
|
import { TextDocument } from 'vscode-languageserver-textdocument'
|
||||||
|
import {
|
||||||
|
SemanticTokensBuilder,
|
||||||
|
SemanticTokenTypes,
|
||||||
|
SemanticTokenModifiers,
|
||||||
|
} from 'vscode-languageserver/node'
|
||||||
|
import { EditorScopeAnalyzer } from './editorScopeAnalyzer'
|
||||||
|
|
||||||
|
export const TOKEN_TYPES = [
|
||||||
|
SemanticTokenTypes.function,
|
||||||
|
SemanticTokenTypes.variable,
|
||||||
|
SemanticTokenTypes.string,
|
||||||
|
SemanticTokenTypes.number,
|
||||||
|
SemanticTokenTypes.operator,
|
||||||
|
SemanticTokenTypes.keyword,
|
||||||
|
SemanticTokenTypes.parameter,
|
||||||
|
SemanticTokenTypes.property,
|
||||||
|
SemanticTokenTypes.regexp,
|
||||||
|
SemanticTokenTypes.comment,
|
||||||
|
]
|
||||||
|
|
||||||
|
export const TOKEN_MODIFIERS = [
|
||||||
|
SemanticTokenModifiers.declaration,
|
||||||
|
SemanticTokenModifiers.modification,
|
||||||
|
SemanticTokenModifiers.readonly,
|
||||||
|
]
|
||||||
|
|
||||||
|
export function buildSemanticTokens(document: TextDocument, tree: Tree): number[] {
|
||||||
|
const builder = new SemanticTokensBuilder()
|
||||||
|
const scopeTracker = new EditorScopeAnalyzer(document)
|
||||||
|
|
||||||
|
walkTree(tree.topNode, document, builder, scopeTracker)
|
||||||
|
|
||||||
|
return builder.build().data
|
||||||
|
}
|
||||||
|
|
||||||
|
// Emit split tokens for NamedArgPrefix (e.g., "color=" → "color" + "=")
|
||||||
|
function emitNamedArgPrefix(
|
||||||
|
node: SyntaxNode,
|
||||||
|
document: TextDocument,
|
||||||
|
builder: SemanticTokensBuilder
|
||||||
|
) {
|
||||||
|
const text = document.getText({
|
||||||
|
start: document.positionAt(node.from),
|
||||||
|
end: document.positionAt(node.to),
|
||||||
|
})
|
||||||
|
|
||||||
|
const nameLength = text.length - 1 // Everything except the =
|
||||||
|
const start = document.positionAt(node.from)
|
||||||
|
|
||||||
|
// Emit token for the name part (e.g., "color")
|
||||||
|
builder.push(
|
||||||
|
start.line,
|
||||||
|
start.character,
|
||||||
|
nameLength,
|
||||||
|
TOKEN_TYPES.indexOf(SemanticTokenTypes.property),
|
||||||
|
0
|
||||||
|
)
|
||||||
|
|
||||||
|
// Emit token for the "=" part
|
||||||
|
builder.push(
|
||||||
|
start.line,
|
||||||
|
start.character + nameLength,
|
||||||
|
1, // Just the = character
|
||||||
|
TOKEN_TYPES.indexOf(SemanticTokenTypes.operator),
|
||||||
|
0
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Walk the tree and collect tokens
|
||||||
|
function walkTree(
|
||||||
|
node: SyntaxNode,
|
||||||
|
document: TextDocument,
|
||||||
|
builder: SemanticTokensBuilder,
|
||||||
|
scopeTracker: EditorScopeAnalyzer
|
||||||
|
) {
|
||||||
|
// Special handling for NamedArgPrefix to split "name=" into two tokens
|
||||||
|
if (node.type.id === Terms.NamedArgPrefix) {
|
||||||
|
emitNamedArgPrefix(node, document, builder)
|
||||||
|
} else {
|
||||||
|
const tokenInfo = getTokenType(node, document, scopeTracker)
|
||||||
|
|
||||||
|
if (tokenInfo !== undefined) {
|
||||||
|
const start = document.positionAt(node.from)
|
||||||
|
const length = node.to - node.from
|
||||||
|
builder.push(start.line, start.character, length, tokenInfo.type, tokenInfo.modifiers)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let child = node.firstChild
|
||||||
|
while (child) {
|
||||||
|
walkTree(child, document, builder, scopeTracker)
|
||||||
|
child = child.nextSibling
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Map Lezer node IDs to semantic token type indices and modifiers
|
||||||
|
type TokenInfo = { type: number; modifiers: number } | undefined
|
||||||
|
function getTokenType(
|
||||||
|
node: SyntaxNode,
|
||||||
|
document: TextDocument,
|
||||||
|
scopeTracker: EditorScopeAnalyzer
|
||||||
|
): TokenInfo {
|
||||||
|
const nodeTypeId = node.type.id
|
||||||
|
const parentTypeId = node.parent?.type.id
|
||||||
|
|
||||||
|
// Special case for now, eventually keywords will go away
|
||||||
|
if (node.type.name === 'keyword') {
|
||||||
|
return {
|
||||||
|
type: TOKEN_TYPES.indexOf(SemanticTokenTypes.keyword),
|
||||||
|
modifiers: 0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
switch (nodeTypeId) {
|
||||||
|
case Terms.Identifier:
|
||||||
|
// Check parent to determine context
|
||||||
|
if (parentTypeId === Terms.FunctionCall) {
|
||||||
|
return {
|
||||||
|
type: TOKEN_TYPES.indexOf(SemanticTokenTypes.function),
|
||||||
|
modifiers: 0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (parentTypeId === Terms.FunctionDef) {
|
||||||
|
return {
|
||||||
|
type: TOKEN_TYPES.indexOf(SemanticTokenTypes.function),
|
||||||
|
modifiers: getModifierBits(SemanticTokenModifiers.declaration),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (parentTypeId === Terms.FunctionCallOrIdentifier) {
|
||||||
|
return {
|
||||||
|
type: TOKEN_TYPES.indexOf(SemanticTokenTypes.function),
|
||||||
|
modifiers: 0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (parentTypeId === Terms.Params) {
|
||||||
|
return {
|
||||||
|
type: TOKEN_TYPES.indexOf(SemanticTokenTypes.parameter),
|
||||||
|
modifiers: 0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (parentTypeId === Terms.DotGet) {
|
||||||
|
return {
|
||||||
|
type: TOKEN_TYPES.indexOf(SemanticTokenTypes.property),
|
||||||
|
modifiers: 0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Special case: Identifier in PositionalArg or NamedArg- check scope
|
||||||
|
if (parentTypeId === Terms.PositionalArg || parentTypeId === Terms.NamedArg) {
|
||||||
|
const identifierText = document.getText({
|
||||||
|
start: document.positionAt(node.from),
|
||||||
|
end: document.positionAt(node.to),
|
||||||
|
})
|
||||||
|
|
||||||
|
// If not in scope, treat as string (like a Word)
|
||||||
|
if (!scopeTracker.isInScope(identifierText, node)) {
|
||||||
|
return {
|
||||||
|
type: TOKEN_TYPES.indexOf(SemanticTokenTypes.string),
|
||||||
|
modifiers: 0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// If in scope, fall through to treat as variable
|
||||||
|
}
|
||||||
|
|
||||||
|
// Otherwise it's a regular variable
|
||||||
|
return {
|
||||||
|
type: TOKEN_TYPES.indexOf(SemanticTokenTypes.variable),
|
||||||
|
modifiers: 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
case Terms.IdentifierBeforeDot:
|
||||||
|
return {
|
||||||
|
type: TOKEN_TYPES.indexOf(SemanticTokenTypes.variable),
|
||||||
|
modifiers: 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
case Terms.AssignableIdentifier:
|
||||||
|
return {
|
||||||
|
type: TOKEN_TYPES.indexOf(SemanticTokenTypes.variable),
|
||||||
|
modifiers: getModifierBits(SemanticTokenModifiers.modification),
|
||||||
|
}
|
||||||
|
|
||||||
|
case Terms.String:
|
||||||
|
case Terms.StringFragment:
|
||||||
|
case Terms.Word:
|
||||||
|
return {
|
||||||
|
type: TOKEN_TYPES.indexOf(SemanticTokenTypes.string),
|
||||||
|
modifiers: 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
case Terms.Number:
|
||||||
|
return {
|
||||||
|
type: TOKEN_TYPES.indexOf(SemanticTokenTypes.number),
|
||||||
|
modifiers: 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
case Terms.Plus:
|
||||||
|
case Terms.Minus:
|
||||||
|
case Terms.Star:
|
||||||
|
case Terms.Slash:
|
||||||
|
case Terms.Eq:
|
||||||
|
case Terms.EqEq:
|
||||||
|
case Terms.Neq:
|
||||||
|
case Terms.Lt:
|
||||||
|
case Terms.Lte:
|
||||||
|
case Terms.Gt:
|
||||||
|
case Terms.Gte:
|
||||||
|
case Terms.Modulo:
|
||||||
|
case Terms.And:
|
||||||
|
case Terms.Or:
|
||||||
|
return {
|
||||||
|
type: TOKEN_TYPES.indexOf(SemanticTokenTypes.operator),
|
||||||
|
modifiers: 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
case Terms.Do:
|
||||||
|
case Terms.colon:
|
||||||
|
return {
|
||||||
|
type: TOKEN_TYPES.indexOf(SemanticTokenTypes.keyword),
|
||||||
|
modifiers: 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
case Terms.Regex:
|
||||||
|
return {
|
||||||
|
type: TOKEN_TYPES.indexOf(SemanticTokenTypes.regexp),
|
||||||
|
modifiers: 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
case Terms.Comment:
|
||||||
|
return {
|
||||||
|
type: TOKEN_TYPES.indexOf(SemanticTokenTypes.comment),
|
||||||
|
modifiers: 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
default:
|
||||||
|
return undefined
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const getModifierBits = (...modifiers: SemanticTokenModifiers[]): number => {
|
||||||
|
let bits = 0
|
||||||
|
for (const modifier of modifiers) {
|
||||||
|
const index = TOKEN_MODIFIERS.indexOf(modifier)
|
||||||
|
if (index !== -1) bits |= 1 << index
|
||||||
|
}
|
||||||
|
return bits
|
||||||
|
}
|
||||||
227
vscode-extension/server/src/server.ts
Normal file
227
vscode-extension/server/src/server.ts
Normal file
|
|
@ -0,0 +1,227 @@
|
||||||
|
import { TextDocument } from 'vscode-languageserver-textdocument'
|
||||||
|
import { buildDiagnostics } from './diagnostics'
|
||||||
|
import { buildSemanticTokens, TOKEN_MODIFIERS, TOKEN_TYPES } from './semanticTokens'
|
||||||
|
import { provideCompletions } from './completion/completionProvider'
|
||||||
|
import { provideSignatureHelp } from './signatureHelp'
|
||||||
|
import { PRELUDE_NAMES } from './metadata/prelude-names'
|
||||||
|
import { parser } from '../../../src/parser/shrimp'
|
||||||
|
import { setGlobals } from '../../../src/parser/tokenizer'
|
||||||
|
import { Compiler } from '../../../src/compiler/compiler'
|
||||||
|
import { Tree } from '@lezer/common'
|
||||||
|
import {
|
||||||
|
InitializeResult,
|
||||||
|
TextDocuments,
|
||||||
|
TextDocumentSyncKind,
|
||||||
|
createConnection,
|
||||||
|
ProposedFeatures,
|
||||||
|
CompletionItemKind,
|
||||||
|
TextDocumentChangeEvent,
|
||||||
|
} from 'vscode-languageserver/node'
|
||||||
|
import { globals } from '../../../src/prelude'
|
||||||
|
|
||||||
|
// Initialize parser with prelude globals so it knows dict/list/str are in scope
|
||||||
|
setGlobals(PRELUDE_NAMES)
|
||||||
|
|
||||||
|
const connection = createConnection(ProposedFeatures.all)
|
||||||
|
const documents = new TextDocuments(TextDocument)
|
||||||
|
documents.listen(connection)
|
||||||
|
|
||||||
|
const documentTrees = new Map<string, Tree>()
|
||||||
|
|
||||||
|
// Server capabilities
|
||||||
|
connection.onInitialize(handleInitialize)
|
||||||
|
|
||||||
|
// Language features
|
||||||
|
connection.languages.semanticTokens.on(handleSemanticTokens)
|
||||||
|
documents.onDidOpen(handleDocumentOpen)
|
||||||
|
documents.onDidChangeContent(handleDocumentChange)
|
||||||
|
documents.onDidClose(handleDocumentClose)
|
||||||
|
connection.onCompletion(handleCompletion)
|
||||||
|
connection.onSignatureHelp(handleSignatureHelp)
|
||||||
|
|
||||||
|
// Debug commands
|
||||||
|
connection.onRequest('shrimp/parseTree', handleParseTree)
|
||||||
|
connection.onRequest('shrimp/bytecode', handleBytecode)
|
||||||
|
|
||||||
|
// Start listening
|
||||||
|
connection.listen()
|
||||||
|
|
||||||
|
// Handler implementations
|
||||||
|
function handleInitialize(): InitializeResult {
|
||||||
|
connection.console.log('🦐 Server initialized with capabilities')
|
||||||
|
const result: InitializeResult = {
|
||||||
|
capabilities: {
|
||||||
|
textDocumentSync: TextDocumentSyncKind.Full,
|
||||||
|
completionProvider: {
|
||||||
|
triggerCharacters: ['.'],
|
||||||
|
},
|
||||||
|
signatureHelpProvider: {
|
||||||
|
triggerCharacters: [' '],
|
||||||
|
},
|
||||||
|
semanticTokensProvider: {
|
||||||
|
legend: {
|
||||||
|
tokenTypes: TOKEN_TYPES,
|
||||||
|
tokenModifiers: TOKEN_MODIFIERS,
|
||||||
|
},
|
||||||
|
full: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleDocumentOpen(event: TextDocumentChangeEvent<TextDocument>) {
|
||||||
|
const document = event.document
|
||||||
|
setGlobals(Object.keys(globals))
|
||||||
|
const tree = parser.parse(document.getText())
|
||||||
|
documentTrees.set(document.uri, tree)
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleSemanticTokens(params: any) {
|
||||||
|
const document = documents.get(params.textDocument.uri)
|
||||||
|
if (!document) return { data: [] }
|
||||||
|
|
||||||
|
const tree = documentTrees.get(params.textDocument.uri)
|
||||||
|
if (!tree) return { data: [] }
|
||||||
|
|
||||||
|
const data = buildSemanticTokens(document, tree)
|
||||||
|
return { data }
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleDocumentChange(change: TextDocumentChangeEvent<TextDocument>) {
|
||||||
|
const document = change.document
|
||||||
|
|
||||||
|
// Parse and cache
|
||||||
|
setGlobals(Object.keys(globals))
|
||||||
|
const tree = parser.parse(document.getText())
|
||||||
|
documentTrees.set(document.uri, tree)
|
||||||
|
|
||||||
|
// Build diagnostics using cached tree
|
||||||
|
const diagnostics = buildDiagnostics(document, tree)
|
||||||
|
connection.sendDiagnostics({ uri: document.uri, diagnostics })
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleDocumentClose(event: TextDocumentChangeEvent<TextDocument>) {
|
||||||
|
documentTrees.delete(event.document.uri)
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleCompletion(params: any) {
|
||||||
|
const document = documents.get(params.textDocument.uri)
|
||||||
|
if (!document) {
|
||||||
|
console.log('❌ No document found')
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
|
||||||
|
const position = params.position
|
||||||
|
const text = document.getText()
|
||||||
|
const offset = document.offsetAt(position)
|
||||||
|
console.log(`📍 Text around cursor: "${text.slice(Math.max(0, offset - 10), offset + 10)}"`)
|
||||||
|
|
||||||
|
// First try context-aware completions (module/dollar)
|
||||||
|
const contextCompletions = provideCompletions(document, position)
|
||||||
|
console.log(`🎯 Context completions count: ${contextCompletions.length}`)
|
||||||
|
if (contextCompletions.length > 0) {
|
||||||
|
console.log(
|
||||||
|
`✅ Returning ${contextCompletions.length} completions:`,
|
||||||
|
contextCompletions.map((c) => c.label).join(', ')
|
||||||
|
)
|
||||||
|
return contextCompletions
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fall back to keywords + prelude globals (for Ctrl+Space in general context)
|
||||||
|
console.log(`⌨️ Falling back to keywords + prelude globals`)
|
||||||
|
const keywords = ['if', 'else', 'do', 'end', 'and', 'or', 'true', 'false', 'null']
|
||||||
|
const keywordCompletions = keywords.map((keyword) => ({
|
||||||
|
label: keyword,
|
||||||
|
kind: CompletionItemKind.Keyword,
|
||||||
|
}))
|
||||||
|
|
||||||
|
const preludeCompletions = PRELUDE_NAMES.map((name) => ({
|
||||||
|
label: name,
|
||||||
|
kind: CompletionItemKind.Function,
|
||||||
|
}))
|
||||||
|
|
||||||
|
return [...keywordCompletions, ...preludeCompletions]
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleSignatureHelp(params: any) {
|
||||||
|
const document = documents.get(params.textDocument.uri)
|
||||||
|
if (!document) return
|
||||||
|
return provideSignatureHelp(document, params.position)
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleParseTree(params: { uri: string }) {
|
||||||
|
connection.console.log(`🦐 Parse tree requested for: ${params.uri}`)
|
||||||
|
const document = documents.get(params.uri)
|
||||||
|
if (!document) return 'Document not found'
|
||||||
|
|
||||||
|
const tree = documentTrees.get(params.uri)
|
||||||
|
if (!tree) {
|
||||||
|
connection.console.error(`🦐 No cached tree for ${params.uri}`)
|
||||||
|
return 'No cached parse tree available'
|
||||||
|
}
|
||||||
|
|
||||||
|
const text = document.getText()
|
||||||
|
const cursor = tree.cursor()
|
||||||
|
|
||||||
|
let formatted = ''
|
||||||
|
let depth = 0
|
||||||
|
|
||||||
|
const printNode = () => {
|
||||||
|
const nodeName = cursor.name
|
||||||
|
const nodeText = text.slice(cursor.from, cursor.to)
|
||||||
|
const indent = ' '.repeat(depth)
|
||||||
|
|
||||||
|
formatted += `${indent}${nodeName}`
|
||||||
|
if (nodeText) {
|
||||||
|
const escapedText = nodeText.replace(/\n/g, '\\n').replace(/\r/g, '\\r')
|
||||||
|
formatted += ` "${escapedText}"`
|
||||||
|
}
|
||||||
|
formatted += '\n'
|
||||||
|
}
|
||||||
|
|
||||||
|
const traverse = (): void => {
|
||||||
|
printNode()
|
||||||
|
|
||||||
|
if (cursor.firstChild()) {
|
||||||
|
depth++
|
||||||
|
do {
|
||||||
|
traverse()
|
||||||
|
} while (cursor.nextSibling())
|
||||||
|
cursor.parent()
|
||||||
|
depth--
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
traverse()
|
||||||
|
return formatted
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleBytecode(params: { uri: string }) {
|
||||||
|
connection.console.log(`🦐 Bytecode requested for: ${params.uri}`)
|
||||||
|
const document = documents.get(params.uri)
|
||||||
|
if (!document) return 'Document not found'
|
||||||
|
|
||||||
|
try {
|
||||||
|
const text = document.getText()
|
||||||
|
const compiler = new Compiler(text)
|
||||||
|
|
||||||
|
// Format bytecode as readable string
|
||||||
|
let output = 'Bytecode:\n\n'
|
||||||
|
const bytecode = compiler.bytecode
|
||||||
|
|
||||||
|
output += bytecode.instructions
|
||||||
|
.map((op, i) => `${i.toString().padStart(4)}: ${JSON.stringify(op)}`)
|
||||||
|
.join('\n')
|
||||||
|
|
||||||
|
// Strip ANSI color codes
|
||||||
|
output = output.replace(/\x1b\[[0-9;]*m/g, '')
|
||||||
|
|
||||||
|
return output
|
||||||
|
} catch (error) {
|
||||||
|
const errorMsg = error instanceof Error ? error.message : String(error)
|
||||||
|
// Strip ANSI color codes from error message too
|
||||||
|
return `Compilation failed: ${errorMsg.replace(/\x1b\[[0-9;]*m/g, '')}`
|
||||||
|
}
|
||||||
|
}
|
||||||
105
vscode-extension/server/src/signatureHelp.ts
Normal file
105
vscode-extension/server/src/signatureHelp.ts
Normal file
|
|
@ -0,0 +1,105 @@
|
||||||
|
import { SignatureHelp, SignatureInformation, ParameterInformation } from 'vscode-languageserver/node'
|
||||||
|
import { TextDocument } from 'vscode-languageserver-textdocument'
|
||||||
|
import { Tree, SyntaxNode } from '@lezer/common'
|
||||||
|
import { parser } from '../../../src/parser/shrimp'
|
||||||
|
import { completions } from './metadata/prelude-completions'
|
||||||
|
|
||||||
|
export const provideSignatureHelp = (
|
||||||
|
document: TextDocument,
|
||||||
|
position: { line: number; character: number }
|
||||||
|
): SignatureHelp | undefined => {
|
||||||
|
const text = document.getText()
|
||||||
|
const tree = parser.parse(text)
|
||||||
|
const cursorPos = document.offsetAt(position)
|
||||||
|
|
||||||
|
const context = findCallContext(tree, cursorPos, text)
|
||||||
|
if (!context) return
|
||||||
|
|
||||||
|
const params = lookupFunctionParams(context.funcName)
|
||||||
|
if (!params) return
|
||||||
|
|
||||||
|
return {
|
||||||
|
signatures: [buildSignature(context.funcName, params)],
|
||||||
|
activeParameter: Math.min(context.argCount, params.length - 1),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const findCallContext = (tree: Tree, cursorPos: number, text: string) => {
|
||||||
|
const findBestCall = (node: SyntaxNode): SyntaxNode | undefined => {
|
||||||
|
let result: SyntaxNode | undefined
|
||||||
|
|
||||||
|
const isCall = node.name === 'FunctionCall' || node.name === 'FunctionCallOrIdentifier'
|
||||||
|
|
||||||
|
// Call ends just before cursor (within 5 chars)
|
||||||
|
if (isCall && node.to <= cursorPos && cursorPos <= node.to + 5) {
|
||||||
|
result = node
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cursor is inside the call's span
|
||||||
|
if (isCall && node.from < cursorPos && cursorPos < node.to) {
|
||||||
|
result = node
|
||||||
|
}
|
||||||
|
|
||||||
|
// Recurse - prefer smaller spans (more specific)
|
||||||
|
let child = node.firstChild
|
||||||
|
while (child) {
|
||||||
|
const found = findBestCall(child)
|
||||||
|
if (found) {
|
||||||
|
const foundSpan = found.to - found.from
|
||||||
|
const resultSpan = result ? result.to - result.from : Infinity
|
||||||
|
if (foundSpan < resultSpan) {
|
||||||
|
result = found
|
||||||
|
}
|
||||||
|
}
|
||||||
|
child = child.nextSibling
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
const call = findBestCall(tree.topNode)
|
||||||
|
if (!call) return
|
||||||
|
|
||||||
|
// Count args before cursor
|
||||||
|
let argCount = 0
|
||||||
|
let child = call.firstChild
|
||||||
|
while (child) {
|
||||||
|
if ((child.name === 'PositionalArg' || child.name === 'NamedArg') && child.to <= cursorPos) {
|
||||||
|
argCount++
|
||||||
|
}
|
||||||
|
child = child.nextSibling
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract function name
|
||||||
|
const firstChild = call.firstChild
|
||||||
|
if (!firstChild) return
|
||||||
|
|
||||||
|
let funcName: string | undefined
|
||||||
|
if (firstChild.name === 'DotGet') {
|
||||||
|
funcName = text.slice(firstChild.from, firstChild.to)
|
||||||
|
} else if (firstChild.name === 'Identifier') {
|
||||||
|
funcName = text.slice(firstChild.from, firstChild.to)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!funcName) return
|
||||||
|
|
||||||
|
return { funcName, argCount }
|
||||||
|
}
|
||||||
|
|
||||||
|
const lookupFunctionParams = (funcName: string): string[] | undefined => {
|
||||||
|
// Handle module functions: "list.map" → modules.list.map
|
||||||
|
if (funcName.includes('.')) {
|
||||||
|
const [moduleName, methodName] = funcName.split('.')
|
||||||
|
const module = completions.modules[moduleName as keyof typeof completions.modules]
|
||||||
|
const method = module?.[methodName as keyof typeof module]
|
||||||
|
return method?.params as string[] | undefined
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: Handle top-level prelude functions (print, range, etc.)
|
||||||
|
}
|
||||||
|
|
||||||
|
const buildSignature = (funcName: string, params: string[]): SignatureInformation => {
|
||||||
|
const label = `${funcName}(${params.join(', ')})`
|
||||||
|
const parameters: ParameterInformation[] = params.map(p => ({ label: p }))
|
||||||
|
return { label, parameters }
|
||||||
|
}
|
||||||
16
vscode-extension/tsconfig.json
Normal file
16
vscode-extension/tsconfig.json
Normal file
|
|
@ -0,0 +1,16 @@
|
||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"target": "ES2022",
|
||||||
|
"lib": ["ES2022"],
|
||||||
|
"module": "commonjs",
|
||||||
|
"moduleResolution": "bundler",
|
||||||
|
"outDir": "./dist",
|
||||||
|
"strict": true,
|
||||||
|
"esModuleInterop": true,
|
||||||
|
"skipLibCheck": true,
|
||||||
|
"forceConsistentCasingInFileNames": true,
|
||||||
|
"resolveJsonModule": true
|
||||||
|
},
|
||||||
|
"include": ["client/src/**/*", "server/src/**/*", "../src/**/*"],
|
||||||
|
"exclude": ["node_modules", "client/dist", "server/dist"]
|
||||||
|
}
|
||||||
Loading…
Reference in New Issue
Block a user