Compare commits

...

20 Commits

Author SHA1 Message Date
b651ff9583 cool 2025-11-04 15:52:39 -08:00
f3c6f2c032 Merge remote-tracking branch 'origin/main' into vscode 2025-11-04 14:57:34 -08:00
b99394e94f neat 2025-11-04 14:57:13 -08:00
2d7f0dbe25 extension is working! 2025-11-04 13:59:22 -08:00
e0e5e82869 Merge pull request 'Risky Business: omit do when passing a 0 arg function to a function' (#22) from risky-business into main
Reviewed-on: #22
Reviewed-by: probablycorey <probablycorey@gmail.com>
2025-11-04 15:19:53 +00:00
d707ee7e6b comments 2025-11-03 21:28:00 -08:00
b31b981343 highlight Do 2025-11-03 21:26:10 -08:00
67e0db090b maybe better 2025-11-03 20:25:44 -08:00
24e0b49679 it's alive 2025-11-03 20:25:44 -08:00
70ac5544a9 old syntax 2025-11-03 20:25:44 -08:00
7756306e1d insanity 2025-11-03 20:25:44 -08:00
7bcd582dc6 what have i done 2025-11-03 20:25:42 -08:00
6f531a2ebf ./bin/shrimp parse file 2025-11-03 20:19:33 -08:00
e68624b608 elseif -> else if 2025-11-03 20:16:15 -08:00
2fab792c1a Merge branch 'while' into grammar-tweaks 2025-11-03 20:12:29 -08:00
f1eaafee19 just in case 2025-11-03 20:11:59 -08:00
950eef0e69 no more single vs multiline blocks 2025-11-03 20:07:34 -08:00
dc557deb40 Collapse all blocks into SingleLineBlock and MultiLineBlock 2025-11-03 19:55:41 -08:00
ee0e6c6c41 Add some grammar aliases for readability 2025-11-03 19:51:43 -08:00
5f4bf60062 while loops 2025-11-03 19:29:19 -08:00
32 changed files with 1432 additions and 322 deletions

View File

@ -12,7 +12,7 @@ Go to http://localhost:3000 to try out the playground.
tail log.txt lines=50 tail log.txt lines=50
name = "Shrimp" name = "Shrimp"
greet = fn person: echo "Hello" person greet = do person: echo "Hello" person
result = tail log.txt lines=10 result = tail log.txt lines=10

View File

@ -2,9 +2,11 @@
import { Compiler } from '../src/compiler/compiler' import { Compiler } from '../src/compiler/compiler'
import { colors, globals } from '../src/prelude' import { colors, globals } from '../src/prelude'
import { parser } from '../src/parser/shrimp'
import { treeToString } from '../src/utils/tree'
import { VM, fromValue, bytecodeToString } from 'reefvm' import { VM, fromValue, bytecodeToString } from 'reefvm'
import { readFileSync, writeFileSync, mkdirSync } from 'fs' import { readFileSync, writeFileSync, mkdirSync } from 'fs'
import { randomUUID } from "crypto" import { randomUUID } from 'crypto'
import { spawn } from 'child_process' import { spawn } from 'child_process'
import { join } from 'path' import { join } from 'path'
@ -32,6 +34,17 @@ async function compileFile(filePath: string) {
} }
} }
async function parseFile(filePath: string) {
try {
const code = readFileSync(filePath, 'utf-8')
const tree = parser.parse(code)
return treeToString(tree, code)
} catch (error: any) {
console.error(`${colors.red}Error:${colors.reset} ${error.message}`)
process.exit(1)
}
}
function showHelp() { function showHelp() {
console.log(`${colors.bright}${colors.magenta}🦐 Shrimp${colors.reset} is a scripting language in a shell. console.log(`${colors.bright}${colors.magenta}🦐 Shrimp${colors.reset} is a scripting language in a shell.
@ -39,6 +52,7 @@ ${colors.bright}Usage:${colors.reset} shrimp <command> [...args]
${colors.bright}Commands:${colors.reset} ${colors.bright}Commands:${colors.reset}
${colors.cyan}run ${colors.yellow}./my-file.sh${colors.reset} Execute a file with Shrimp ${colors.cyan}run ${colors.yellow}./my-file.sh${colors.reset} Execute a file with Shrimp
${colors.cyan}parse ${colors.yellow}./my-file.sh${colors.reset} Print parse tree for Shrimp file
${colors.cyan}bytecode ${colors.yellow}./my-file.sh${colors.reset} Print bytecode for Shrimp file ${colors.cyan}bytecode ${colors.yellow}./my-file.sh${colors.reset} Print bytecode for Shrimp file
${colors.cyan}eval ${colors.yellow}'some code'${colors.reset} Evaluate a line of Shrimp code ${colors.cyan}eval ${colors.yellow}'some code'${colors.reset} Evaluate a line of Shrimp code
${colors.cyan}repl${colors.reset} Start REPL ${colors.cyan}repl${colors.reset} Start REPL
@ -102,6 +116,16 @@ async function main() {
return return
} }
if (['parse', '-parse', '--parse', '-p'].includes(command)) {
const file = args[1]
if (!file) {
console.log(`${colors.bright}usage: shrimp parse <file>${colors.reset}`)
process.exit(1)
}
console.log(await parseFile(file))
return
}
if (['run', '-run', '--run', '-r'].includes(command)) { if (['run', '-run', '--run', '-r'].includes(command)) {
const file = args[1] const file = args[1]
if (!file) { if (!file) {

View File

@ -62,7 +62,7 @@
"hono": ["hono@4.9.8", "", {}, "sha512-JW8Bb4RFWD9iOKxg5PbUarBYGM99IcxFl2FPBo2gSJO11jjUDqlP1Bmfyqt8Z/dGhIQ63PMA9LdcLefXyIasyg=="], "hono": ["hono@4.9.8", "", {}, "sha512-JW8Bb4RFWD9iOKxg5PbUarBYGM99IcxFl2FPBo2gSJO11jjUDqlP1Bmfyqt8Z/dGhIQ63PMA9LdcLefXyIasyg=="],
"reefvm": ["reefvm@git+https://git.nose.space/defunkt/reefvm#c69b172c78853756ec8acba5bc33d93eb6a571c6", { "peerDependencies": { "typescript": "^5" } }, "c69b172c78853756ec8acba5bc33d93eb6a571c6"], "reefvm": ["reefvm@git+https://git.nose.space/defunkt/reefvm#0f39e9401eb7a0a7c906e150127f9829458a79b6", { "peerDependencies": { "typescript": "^5" } }, "0f39e9401eb7a0a7c906e150127f9829458a79b6"],
"style-mod": ["style-mod@4.1.2", "", {}, "sha512-wnD1HyVqpJUI2+eKZ+eo1UwghftP6yuFheBqqe+bWCotBjC2K1YnteJILRMs3SM4V/0dLEW1SC27MWP5y+mwmw=="], "style-mod": ["style-mod@4.1.2", "", {}, "sha512-wnD1HyVqpJUI2+eKZ+eo1UwghftP6yuFheBqqe+bWCotBjC2K1YnteJILRMs3SM4V/0dLEW1SC27MWP5y+mwmw=="],

View File

@ -54,6 +54,7 @@ export class Compiler {
fnLabelCount = 0 fnLabelCount = 0
ifLabelCount = 0 ifLabelCount = 0
tryLabelCount = 0 tryLabelCount = 0
loopLabelCount = 0
bytecode: Bytecode bytecode: Bytecode
pipeCounter = 0 pipeCounter = 0
@ -280,13 +281,27 @@ export class Compiler {
const opValue = input.slice(operator.from, operator.to) const opValue = input.slice(operator.from, operator.to)
switch (opValue) { switch (opValue) {
case '+=': instructions.push(['ADD']); break case '+=':
case '-=': instructions.push(['SUB']); break instructions.push(['ADD'])
case '*=': instructions.push(['MUL']); break break
case '/=': instructions.push(['DIV']); break case '-=':
case '%=': instructions.push(['MOD']); break instructions.push(['SUB'])
break
case '*=':
instructions.push(['MUL'])
break
case '/=':
instructions.push(['DIV'])
break
case '%=':
instructions.push(['MOD'])
break
default: default:
throw new CompilerError(`Unknown compound operator: ${opValue}`, operator.from, operator.to) throw new CompilerError(
`Unknown compound operator: ${opValue}`,
operator.from,
operator.to
)
} }
// DUP and store (same as regular assignment) // DUP and store (same as regular assignment)
@ -304,10 +319,8 @@ export class Compiler {
} }
case terms.FunctionDef: { case terms.FunctionDef: {
const { paramNames, bodyNodes, catchVariable, catchBody, finallyBody } = getFunctionDefParts( const { paramNames, bodyNodes, catchVariable, catchBody, finallyBody } =
node, getFunctionDefParts(node, input)
input
)
const instructions: ProgramItem[] = [] const instructions: ProgramItem[] = []
const functionLabel: Label = `.func_${this.fnLabelCount++}` const functionLabel: Label = `.func_${this.fnLabelCount++}`
const afterLabel: Label = `.after_${functionLabel}` const afterLabel: Label = `.after_${functionLabel}`
@ -330,7 +343,13 @@ export class Compiler {
if (catchVariable || finallyBody) { if (catchVariable || finallyBody) {
// If function has catch or finally, wrap body in try/catch/finally // If function has catch or finally, wrap body in try/catch/finally
instructions.push( instructions.push(
...this.#compileTryCatchFinally(compileFunctionBody, catchVariable, catchBody, finallyBody, input) ...this.#compileTryCatchFinally(
compileFunctionBody,
catchVariable,
catchBody,
finallyBody,
input
)
) )
} else { } else {
instructions.push(...compileFunctionBody()) instructions.push(...compileFunctionBody())
@ -388,9 +407,7 @@ export class Compiler {
return instructions return instructions
} }
case terms.ThenBlock: case terms.Block: {
case terms.SingleLineThenBlock:
case terms.TryBlock: {
const children = getAllChildren(node) const children = getAllChildren(node)
const instructions: ProgramItem[] = [] const instructions: ProgramItem[] = []
@ -405,6 +422,51 @@ export class Compiler {
return instructions return instructions
} }
case terms.FunctionCallWithBlock: {
const [fn, _colon, ...block] = getAllChildren(node)
let instructions: ProgramItem[] = []
const fnLabel: Label = `.func_${this.fnLabelCount++}`
const afterLabel: Label = `.after_${fnLabel}`
instructions.push(['JUMP', afterLabel])
instructions.push([`${fnLabel}:`])
instructions.push(
...block.filter(x => x.type.name !== 'keyword')
.map(x => this.#compileNode(x!, input))
.flat()
)
instructions.push(['RETURN'])
instructions.push([`${afterLabel}:`])
if (fn?.type.id === terms.FunctionCallOrIdentifier) {
instructions.push(['LOAD', input.slice(fn!.from, fn!.to)])
instructions.push(['MAKE_FUNCTION', [], fnLabel])
instructions.push(['PUSH', 1])
instructions.push(['PUSH', 0])
instructions.push(['CALL'])
} else if (fn?.type.id === terms.FunctionCall) {
let body = this.#compileNode(fn!, input)
const namedArgCount = (body[body.length - 2]![1] as number) * 2
const startSlice = body.length - namedArgCount - 3
body = [
...body.slice(0, startSlice),
['MAKE_FUNCTION', [], fnLabel],
...body.slice(startSlice)
]
// @ts-ignore
body[body.length - 3]![1] += 1
instructions.push(...body)
} else {
throw new Error(`FunctionCallWithBlock: Expected FunctionCallOrIdentifier or FunctionCall`)
}
return instructions
}
case terms.TryExpr: { case terms.TryExpr: {
const { tryBlock, catchVariable, catchBody, finallyBody } = getTryExprParts(node, input) const { tryBlock, catchVariable, catchBody, finallyBody } = getTryExprParts(node, input)
@ -629,6 +691,24 @@ export class Compiler {
return instructions return instructions
} }
case terms.WhileExpr: {
const [_while, test, _colon, block] = getAllChildren(node)
const instructions: ProgramItem[] = []
this.loopLabelCount++
const startLoop = `.loop_${this.loopLabelCount}:`
const endLoop = `.end_loop_${this.loopLabelCount}:`
instructions.push([`${startLoop}:`])
instructions.push(...this.#compileNode(test!, input))
instructions.push(['JUMP_IF_FALSE', endLoop])
instructions.push(...this.#compileNode(block!, input))
instructions.push(['JUMP', startLoop])
instructions.push([`${endLoop}:`])
return instructions
}
default: default:
throw new CompilerError( throw new CompilerError(
`Compiler doesn't know how to handle a "${node.type.name}" node.`, `Compiler doesn't know how to handle a "${node.type.name}" node.`,

View File

@ -304,9 +304,12 @@ describe('default params', () => {
}) })
test.skip('dict default', () => { test.skip('dict default', () => {
expect('make-person = do person=[name=Bob age=60]: person end; make-person') expect('make-person = do person=[name=Bob age=60]: person end; make-person').toEvaluateTo({
.toEvaluateTo({ name: 'Bob', age: 60 }) name: 'Bob',
expect('make-person = do person=[name=Bob age=60]: person end; make-person [name=Jon age=21]') age: 60,
.toEvaluateTo({ name: 'Jon', age: 21 }) })
expect(
'make-person = do person=[name=Bob age=60]: person end; make-person [name=Jon age=21]'
).toEvaluateTo({ name: 'Jon', age: 21 })
}) })
}) })

View File

@ -0,0 +1,55 @@
import { expect, describe, test } from 'bun:test'
describe('single line function blocks', () => {
test('work with no args', () => {
expect(`trap = do x: x end; trap: true end`).toEvaluateTo(true)
})
test('work with one arg', () => {
expect(`trap = do x y: [ x (y) ] end; trap EXIT: true end`).toEvaluateTo(['EXIT', true])
})
test('work with named args', () => {
expect(`attach = do signal fn: [ signal (fn) ] end; attach signal='exit': true end`).toEvaluateTo(['exit', true])
})
test('work with dot-get', () => {
expect(`signals = [trap=do x y: [x (y)] end]; signals.trap 'EXIT': true end`).toEvaluateTo(['EXIT', true])
})
})
describe('multi line function blocks', () => {
test('work with no args', () => {
expect(`
trap = do x: x end
trap:
true
end`).toEvaluateTo(true)
})
test('work with one arg', () => {
expect(`
trap = do x y: [ x (y) ] end
trap EXIT:
true
end`).toEvaluateTo(['EXIT', true])
})
test('work with named args', () => {
expect(`
attach = do signal fn: [ signal (fn) ] end
attach signal='exit':
true
end`).toEvaluateTo(['exit', true])
})
test('work with dot-get', () => {
expect(`
signals = [trap=do x y: [x (y)] end]
signals.trap 'EXIT':
true
end`).toEvaluateTo(['EXIT', true])
})
})

View File

@ -0,0 +1,115 @@
import { expect, describe, test, beforeEach } from 'bun:test'
const buffer: string[] = []
const ribbitGlobals = {
ribbit: async (cb: Function) => {
await cb()
return buffer.join("\n")
},
tag: async (tagFn: Function, atDefaults = {}) => {
return (atNamed = {}, ...args: any[]) => tagFn(Object.assign({}, atDefaults, atNamed), ...args)
},
head: (atNamed: {}, ...args: any[]) => tag('head', atNamed, ...args),
title: (atNamed: {}, ...args: any[]) => tag('title', atNamed, ...args),
meta: (atNamed: {}, ...args: any[]) => tag('meta', atNamed, ...args),
p: (atNamed: {}, ...args: any[]) => tag('p', atNamed, ...args),
h1: (atNamed: {}, ...args: any[]) => tag('h1', atNamed, ...args),
h2: (atNamed: {}, ...args: any[]) => tag('h2', atNamed, ...args),
b: (atNamed: {}, ...args: any[]) => tag('b', atNamed, ...args),
ul: (atNamed: {}, ...args: any[]) => tag('ul', atNamed, ...args),
li: (atNamed: {}, ...args: any[]) => tag('li', atNamed, ...args),
nospace: () => NOSPACE_TOKEN,
echo: (...args: any[]) => console.log(...args)
}
function raw(fn: Function) { (fn as any).raw = true }
const tagBlock = async (tagName: string, props = {}, fn: Function) => {
const attrs = Object.entries(props).map(([key, value]) => `${key}="${value}"`)
const space = attrs.length ? ' ' : ''
buffer.push(`<${tagName}${space}${attrs.join(' ')}>`)
await fn()
buffer.push(`</${tagName}>`)
}
const tagCall = (tagName: string, atNamed = {}, ...args: any[]) => {
const attrs = Object.entries(atNamed).map(([key, value]) => `${key}="${value}"`)
const space = attrs.length ? ' ' : ''
const children = args
.reverse()
.map(a => a === TAG_TOKEN ? buffer.pop() : a)
.reverse().join(' ')
.replaceAll(` ${NOSPACE_TOKEN} `, '')
if (SELF_CLOSING.includes(tagName))
buffer.push(`<${tagName}${space}${attrs.join(' ')} />`)
else
buffer.push(`<${tagName}${space}${attrs.join(' ')}>${children}</${tagName}>`)
}
const tag = async (tagName: string, atNamed = {}, ...args: any[]) => {
if (typeof args[0] === 'function') {
await tagBlock(tagName, atNamed, args[0])
} else {
tagCall(tagName, atNamed, ...args)
return TAG_TOKEN
}
}
const NOSPACE_TOKEN = '!!ribbit-nospace!!'
const TAG_TOKEN = '!!ribbit-tag!!'
const SELF_CLOSING = ["area", "base", "br", "col", "embed", "hr", "img", "input", "link", "meta", "param", "source", "track", "wbr"]
describe('ribbit', () => {
beforeEach(() => buffer.length = 0)
test('head tag', () => {
expect(`
ribbit:
head:
title What up
meta charset=UTF-8
meta name=viewport content='width=device-width, initial-scale=1, viewport-fit=cover'
end
end
`).toEvaluateTo(`<head>
<title>What up</title>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1, viewport-fit=cover" />
</head>`, ribbitGlobals)
})
test('custom tags', () => {
expect(`
list = tag ul class=list
ribbit:
list:
li border-bottom='1px solid black' one
li two
li three
end
end`).toEvaluateTo(`<ul class="list">
<li border-bottom="1px solid black">one</li>
<li>two</li>
<li>three</li>
</ul>`, ribbitGlobals)
})
test('inline expressions', () => {
expect(`
ribbit:
p class=container:
h1 class=bright style='font-family: helvetica' Heya
h2 man that is (b wild) (nospace) !
p Double the fun.
end
end`).toEvaluateTo(
`<p class="container">
<h1 class="bright" style="font-family: helvetica">Heya</h1>
<h2>man that is <b>wild</b>!</h2>
<p>Double the fun.</p>
</p>`, ribbitGlobals)
})
})

View File

@ -0,0 +1,48 @@
import { describe } from 'bun:test'
import { expect, test } from 'bun:test'
describe('while', () => {
test('basic variable', () => {
expect(`
a = true
b = ''
while a:
a = false
b = done
end
b`)
.toEvaluateTo('done')
})
test('basic expression', () => {
expect(`
a = 0
while a < 10:
a += 1
end
a`)
.toEvaluateTo(10)
})
test('compound expression', () => {
expect(`
a = 1
b = 0
while a > 0 and b < 100:
b += 1
end
b`)
.toEvaluateTo(100)
})
test('returns value', () => {
expect(`
a = 0
ret = while a < 10:
a += 1
done
end
ret`)
.toEvaluateTo('done')
})
})

View File

@ -210,7 +210,7 @@ export const getIfExprParts = (node: SyntaxNode, input: string) => {
} }
elseThenBlock = parts.at(-1) elseThenBlock = parts.at(-1)
} else if (child.type.id === terms.ElseIfExpr) { } else if (child.type.id === terms.ElseIfExpr) {
const [_keyword, conditional, _colon, thenBlock] = parts const [_else, _if, conditional, _colon, thenBlock] = parts
if (!conditional || !thenBlock) { if (!conditional || !thenBlock) {
const names = parts.map((p) => p.type.name).join(', ') const names = parts.map((p) => p.type.name).join(', ')
const message = `ElseIfExpr expected conditional and thenBlock, got ${names}` const message = `ElseIfExpr expected conditional and thenBlock, got ${names}`
@ -309,7 +309,7 @@ export const getDotGetParts = (node: SyntaxNode, input: string) => {
export const getTryExprParts = (node: SyntaxNode, input: string) => { export const getTryExprParts = (node: SyntaxNode, input: string) => {
const children = getAllChildren(node) const children = getAllChildren(node)
// First child is always 'try' keyword, second is colon, third is TryBlock or statement // First child is always 'try' keyword, second is colon, third is Block
const [tryKeyword, _colon, tryBlock, ...rest] = children const [tryKeyword, _colon, tryBlock, ...rest] = children
if (!tryKeyword || !tryBlock) { if (!tryKeyword || !tryBlock) {

View File

@ -5,6 +5,7 @@ export const highlighting = styleTags({
Number: tags.number, Number: tags.number,
String: tags.string, String: tags.string,
Boolean: tags.bool, Boolean: tags.bool,
Do: tags.keyword,
keyword: tags.keyword, keyword: tags.keyword,
end: tags.keyword, end: tags.keyword,
':': tags.keyword, ':': tags.keyword,
@ -15,4 +16,5 @@ export const highlighting = styleTags({
Command: tags.function(tags.variableName), Command: tags.function(tags.variableName),
'Params/Identifier': tags.definition(tags.variableName), 'Params/Identifier': tags.definition(tags.variableName),
Paren: tags.paren, Paren: tags.paren,
Comment: tags.comment,
}) })

View File

@ -25,9 +25,18 @@
Underscore { "_" } Underscore { "_" }
Regex { "//" (![/\\\n[] | "\\" ![\n] | "[" (![\n\\\]] | "\\" ![\n])* "]")+ ("//" $[gimsuy]*)? } // Stolen from the lezer JavaScript grammar Regex { "//" (![/\\\n[] | "\\" ![\n] | "[" (![\n\\\]] | "\\" ![\n])* "]")+ ("//" $[gimsuy]*)? } // Stolen from the lezer JavaScript grammar
"|"[@name=operator] "|"[@name=operator]
} }
end { @specialize[@name=keyword]<Identifier, "end"> }
while { @specialize[@name=keyword]<Identifier, "while"> }
if { @specialize[@name=keyword]<Identifier, "if"> }
else { @specialize[@name=keyword]<Identifier, "else"> }
try { @specialize[@name=keyword]<Identifier, "try"> }
catch { @specialize[@name=keyword]<Identifier, "catch"> }
finally { @specialize[@name=keyword]<Identifier, "finally"> }
throw { @specialize[@name=keyword]<Identifier, "throw"> }
null { @specialize[@name=Null]<Identifier, "null"> }
@external tokens tokenizer from "./tokenizer" { Identifier, AssignableIdentifier, Word, IdentifierBeforeDot } @external tokens tokenizer from "./tokenizer" { Identifier, AssignableIdentifier, Word, IdentifierBeforeDot }
@external specialize {Identifier} specializeKeyword from "./tokenizer" { Do } @external specialize {Identifier} specializeKeyword from "./tokenizer" { Do }
@ -47,9 +56,10 @@ item {
newlineOrSemicolon // allow blank lines newlineOrSemicolon // allow blank lines
} }
consumeToTerminator { consumeToTerminator {
PipeExpr | PipeExpr |
WhileExpr |
FunctionCallWithBlock |
ambiguousFunctionCall | ambiguousFunctionCall |
TryExpr | TryExpr |
Throw | Throw |
@ -70,6 +80,18 @@ pipeOperand {
FunctionCall | FunctionCallOrIdentifier FunctionCall | FunctionCallOrIdentifier
} }
WhileExpr {
while (ConditionalOp | expression) colon Block end
}
Block {
consumeToTerminator | newlineOrSemicolon block
}
FunctionCallWithBlock {
ambiguousFunctionCall colon Block CatchExpr? FinallyExpr? end
}
FunctionCallOrIdentifier { FunctionCallOrIdentifier {
DotGet | Identifier DotGet | Identifier
} }
@ -86,7 +108,6 @@ arg {
PositionalArg | NamedArg PositionalArg | NamedArg
} }
PositionalArg { PositionalArg {
expression | FunctionDef | Underscore expression | FunctionDef | Underscore
} }
@ -96,71 +117,35 @@ NamedArg {
} }
FunctionDef { FunctionDef {
singleLineFunctionDef | multilineFunctionDef Do Params colon (consumeToTerminator | newlineOrSemicolon block) CatchExpr? FinallyExpr? end
}
singleLineFunctionDef {
Do Params colon consumeToTerminator CatchExpr? FinallyExpr? @specialize[@name=keyword]<Identifier, "end">
}
multilineFunctionDef {
Do Params colon newlineOrSemicolon block CatchExpr? FinallyExpr? @specialize[@name=keyword]<Identifier, "end">
} }
IfExpr { IfExpr {
singleLineIf | multilineIf if (ConditionalOp | expression) colon Block ElseIfExpr* ElseExpr? end
}
singleLineIf {
@specialize[@name=keyword]<Identifier, "if"> (ConditionalOp | expression) colon SingleLineThenBlock @specialize[@name=keyword]<Identifier, "end">
}
multilineIf {
@specialize[@name=keyword]<Identifier, "if"> (ConditionalOp | expression) colon newlineOrSemicolon ThenBlock ElseIfExpr* ElseExpr? @specialize[@name=keyword]<Identifier, "end">
} }
ElseIfExpr { ElseIfExpr {
@specialize[@name=keyword]<Identifier, "elseif"> (ConditionalOp | expression) colon newlineOrSemicolon ThenBlock else if (ConditionalOp | expression) colon Block
} }
ElseExpr { ElseExpr {
@specialize[@name=keyword]<Identifier, "else"> colon newlineOrSemicolon ThenBlock else colon Block
}
ThenBlock {
block
}
SingleLineThenBlock {
consumeToTerminator
} }
TryExpr { TryExpr {
singleLineTry | multilineTry try colon Block CatchExpr? FinallyExpr? end
}
singleLineTry {
@specialize[@name=keyword]<Identifier, "try"> colon consumeToTerminator CatchExpr? FinallyExpr? @specialize[@name=keyword]<Identifier, "end">
}
multilineTry {
@specialize[@name=keyword]<Identifier, "try"> colon newlineOrSemicolon TryBlock CatchExpr? FinallyExpr? @specialize[@name=keyword]<Identifier, "end">
} }
CatchExpr { CatchExpr {
@specialize[@name=keyword]<Identifier, "catch"> Identifier colon (newlineOrSemicolon TryBlock | consumeToTerminator) catch Identifier colon Block
} }
FinallyExpr { FinallyExpr {
@specialize[@name=keyword]<Identifier, "finally"> colon (newlineOrSemicolon TryBlock | consumeToTerminator) finally colon Block
}
TryBlock {
block
} }
Throw { Throw {
@specialize[@name=keyword]<Identifier, "throw"> (BinOp | ConditionalOp | expression) throw (BinOp | ConditionalOp | expression)
} }
ConditionalOp { ConditionalOp {
@ -179,7 +164,7 @@ Params {
} }
NamedParam { NamedParam {
NamedArgPrefix (String | Number | Boolean | @specialize[@name=Null]<Identifier, "null">) NamedArgPrefix (String | Number | Boolean | null)
} }
Assign { Assign {
@ -217,7 +202,6 @@ expression {
} }
String { "'" stringContent* "'" } String { "'" stringContent* "'" }
} }
stringContent { stringContent {
@ -253,7 +237,7 @@ Array {
// to go through ambiguousFunctionCall (which is what we want semantically). // to go through ambiguousFunctionCall (which is what we want semantically).
// Yes, it is annoying and I gave up trying to use GLR to fix it. // Yes, it is annoying and I gave up trying to use GLR to fix it.
expressionWithoutIdentifier { expressionWithoutIdentifier {
ParenExpr | Word | String | Number | Boolean | Regex | Dict | Array | @specialize[@name=Null]<Identifier, "null"> ParenExpr | Word | String | Number | Boolean | Regex | Dict | Array | null
} }
block { block {

View File

@ -47,19 +47,19 @@ export const
Null = 45, Null = 45,
colon = 46, colon = 46,
CatchExpr = 47, CatchExpr = 47,
keyword = 69, keyword = 68,
TryBlock = 49, Block = 49,
FinallyExpr = 50, FinallyExpr = 50,
Underscore = 53, Underscore = 53,
Array = 54, Array = 54,
ConditionalOp = 55, ConditionalOp = 55,
PositionalArg = 56, PositionalArg = 56,
TryExpr = 58, WhileExpr = 58,
Throw = 60, FunctionCallWithBlock = 60,
IfExpr = 62, TryExpr = 61,
SingleLineThenBlock = 64, Throw = 63,
ThenBlock = 65, IfExpr = 65,
ElseIfExpr = 66, ElseIfExpr = 67,
ElseExpr = 68, ElseExpr = 69,
CompoundAssign = 70, CompoundAssign = 70,
Assign = 71 Assign = 71

View File

@ -4,14 +4,14 @@ import {operatorTokenizer} from "./operatorTokenizer"
import {tokenizer, specializeKeyword} from "./tokenizer" import {tokenizer, specializeKeyword} from "./tokenizer"
import {trackScope} from "./scopeTracker" import {trackScope} from "./scopeTracker"
import {highlighting} from "./highlight" import {highlighting} from "./highlight"
const spec_Identifier = {__proto__:null,null:90, catch:96, finally:102, end:104, try:118, throw:122, if:126, elseif:134, else:138} const spec_Identifier = {__proto__:null,null:90, catch:96, finally:102, end:104, while:118, try:124, throw:128, if:132, else:136}
export const parser = LRParser.deserialize({ export const parser = LRParser.deserialize({
version: 14, version: 14,
states: ":QQYQbOOO#tQcO'#C{O$qOSO'#C}O%PQbO'#EhOOQ`'#DW'#DWOOQa'#DT'#DTO&VQbO'#DdO'hQcO'#E]OOQa'#E]'#E]O(kQcO'#E]O)mQcO'#E[O*QQRO'#C|O+^QcO'#EWO+nQcO'#EWO+xQbO'#CzO,pOpO'#CxOOQ`'#EX'#EXO,uQbO'#EWO,|QQO'#EnOOQ`'#Dh'#DhO-RQbO'#DjO-RQbO'#EpOOQ`'#Dl'#DlO-vQRO'#DtOOQ`'#EW'#EWO.[QQO'#EVOOQ`'#EV'#EVOOQ`'#Dv'#DvQYQbOOO.dQbO'#DUOOQa'#E['#E[OOQ`'#Df'#DfOOQ`'#Em'#EmOOQ`'#EO'#EOO.nQbO,59cO/bQbO'#DPO/jQWO'#DQOOOO'#E_'#E_OOOO'#Dw'#DwO0OOSO,59iOOQa,59i,59iOOQ`'#Dy'#DyO0^QbO'#DXO0iQbO'#DYOOQO'#Dz'#DzO0aQQO'#DXO0wQQO,5;SOOQ`'#Dx'#DxO0|QbO,5:OO1TQQO,59oOOQa,5:O,5:OO1`QbO,5:OO1jQbO,5:aO-RQbO,59hO-RQbO,59hO-RQbO,59hO-RQbO,5:PO-RQbO,5:PO-RQbO,5:PO1zQRO,59fO2RQRO,59fO2dQRO,59fO2_QQO,59fO2oQQO,59fO2wObO,59dO3SQbO'#EPO3_QbO,59bO3vQbO,5;YO4ZQcO,5:UO5PQcO,5:UO5aQcO,5:UO6VQRO,5;[O6^QRO,5;[O1jQbO,5:`OOQ`,5:q,5:qOOQ`-E7t-E7tOOQ`,59p,59pOOQ`-E7|-E7|OOOO,59k,59kOOOO,59l,59lOOOO-E7u-E7uOOQa1G/T1G/TOOQ`-E7w-E7wO6iQQO,59sOOQO,59t,59tOOQO-E7x-E7xO6qQbO1G0nOOQ`-E7v-E7vO7UQQO1G/ZOOQa1G/j1G/jO7aQbO1G/jOOQO'#D|'#D|O7UQQO1G/ZOOQa1G/Z1G/ZOOQ`'#D}'#D}O7aQbO1G/jOOQ`1G/{1G/{OOQa1G/S1G/SO8YQcO1G/SO8dQcO1G/SO8nQcO1G/SOOQa1G/k1G/kO:^QcO1G/kO:eQcO1G/kO:lQcO1G/kOOQa1G/Q1G/QOOQa1G/O1G/OO!aQbO'#C{O:sQbO'#CwOOQ`,5:k,5:kOOQ`-E7}-E7}O;QQbO1G0tO;]QbO1G0uO;yQbO1G0vOOQ`1G/z1G/zO<^QbO7+&YO;]QbO7+&[O<iQQO7+$uOOQa7+$u7+$uO<tQbO7+%UOOQa7+%U7+%UOOQO-E7z-E7zOOQ`-E7{-E7{O=OQbO'#D]O=TQQO'#D`OOQ`7+&`7+&`O=YQbO7+&`O=_QbO7+&`OOQ`'#D{'#D{O=gQQO'#D{O=lQbO'#EiOOQ`'#D_'#D_O>`QbO7+&aOOQ`'#Dn'#DnO>kQbO7+&bO>pQbO7+&cOOQ`<<It<<ItO?^QbO<<ItO?cQbO<<ItO?kQbO<<IvOOQa<<Ha<<HaOOQa<<Hp<<HpO?vQQO,59wO?{QbO,59zOOQ`<<Iz<<IzO@`QbO<<IzOOQ`,5:g,5:gOOQ`-E7y-E7yOOQ`<<I{<<I{O@eQbO<<I{O@jQbO<<I{OOQ`<<I|<<I|OOQ`'#Do'#DoO@rQbO<<I}OOQ`AN?`AN?`O@}QbOAN?`OOQ`AN?bAN?bOASQbOAN?bOAXQbOAN?bOAaQbO1G/cOAtQbO1G/fOOQ`1G/f1G/fOOQ`AN?fAN?fOOQ`AN?gAN?gOB[QbOAN?gO-RQbO'#DpOOQ`'#EQ'#EQOBaQbOAN?iOBlQQO'#DrOOQ`AN?iAN?iOBqQbOAN?iOOQ`G24zG24zOOQ`G24|G24|OBvQbOG24|OB{QbO7+$}OOQ`7+$}7+$}OOQ`7+%Q7+%QOOQ`G25RG25ROCfQRO,5:[OCmQRO,5:[OOQ`-E8O-E8OOOQ`G25TG25TOCxQbOG25TOC}QQO,5:^OOQ`LD*hLD*hOOQ`<<Hi<<HiODSQQO1G/vOOQ`LD*oLD*oOAtQbO1G/xO>pQbO7+%bOOQ`7+%d7+%dOOQ`<<H|<<H|", states: "9OQYQbOOO#zQcO'#C{O$zOSO'#C}OOQa'#DT'#DTO&TQbO'#DdO'iQcO'#E]OOQa'#E]'#E]O(lQcO'#E]O)nQcO'#E[O*UQRO'#C|O+eQcO'#EWO+uQcO'#EWO,PQbO'#CzO,wOpO'#CxOOQ`'#EX'#EXO,|QbO'#EWO-WQRO'#DtOOQ`'#EW'#EWO-lQQO'#EVOOQ`'#EV'#EVOOQ`'#Dv'#DvQYQbOOO-tQbO'#DWO.PQbO'#DhO.tQQO'#DkO.PQbO'#DmO.PQbO'#DoO.yQbO'#DUOOQa'#E['#E[OOQ`'#Df'#DfOOQ`'#Ek'#EkOOQ`'#EO'#EOO/TQbO,59cO/}QbO'#DPO0VQWO'#DQOOOO'#E_'#E_OOOO'#Dw'#DwO0kOSO,59iOOQa,59i,59iOOQ`'#Dx'#DxO0yQbO,5:OO1QQQO,59oOOQa,5:O,5:OO1]QbO,5:OO1gQbO,5:aO.PQbO,59hO.PQbO,59hO.PQbO,59hO.PQbO,5:PO.PQbO,5:PO.PQbO,5:PO1zQRO,59fO2RQRO,59fO2dQRO,59fO2_QQO,59fO2oQQO,59fO2wObO,59dO3SQbO'#EPO3_QbO,59bO3yQbO,5:UO1gQbO,5:`OOQ`,5:q,5:qOOQ`-E7t-E7tOOQ`'#Dy'#DyO4aQbO'#DXO4lQbO'#DYOOQO'#Dz'#DzO4dQQO'#DXO4zQQO,59rO5kQRO,5:SO5rQRO,5:SO3yQbO,5:VO5}QcO,5:XO6yQcO,5:XO7ZQcO,5:XO7eQRO,5:ZO7lQRO,5:ZOOQ`,59p,59pOOQ`-E7|-E7|OOOO,59k,59kOOOO,59l,59lOOOO-E7u-E7uOOQa1G/T1G/TOOQ`-E7v-E7vO7wQQO1G/ZOOQa1G/j1G/jO8SQbO1G/jOOQO'#D|'#D|O7wQQO1G/ZOOQa1G/Z1G/ZOOQ`'#D}'#D}O8SQbO1G/jOOQ`1G/{1G/{OOQa1G/S1G/SO9OQcO1G/SO9YQcO1G/SO9dQcO1G/SOOQa1G/k1G/kO;YQcO1G/kO;aQcO1G/kO;hQcO1G/kOOQa1G/Q1G/QOOQa1G/O1G/OO!dQbO'#C{O;oQbO'#CwOOQ`,5:k,5:kOOQ`-E7}-E7}OOQ`'#D_'#D_O;|QbO'#D_O<pQbO1G/pOOQ`1G/z1G/zOOQ`-E7w-E7wO<{QQO,59sOOQO,59t,59tOOQO-E7x-E7xO=TQbO1G/^O3yQbO1G/nO=kQbO1G/qO3yQbO1G/uO=vQQO7+$uOOQa7+$u7+$uO>RQbO7+%UOOQa7+%U7+%UOOQO-E7z-E7zOOQ`-E7{-E7{OOQ`'#D{'#D{O>]QQO'#D{O>bQbO'#EhOOQ`,59y,59yO?UQbO'#D]O?ZQQO'#D`OOQ`7+%[7+%[O?`QbO7+%[O?eQbO7+%[O?mQbO7+$xO?xQbO7+$xO@iQbO7+%YOOQ`7+%]7+%]O@nQbO7+%]O@sQbO7+%]O@{QbO7+%aOOQa<<Ha<<HaOOQa<<Hp<<HpOOQ`,5:g,5:gOOQ`-E7y-E7yOATQQO,59wO3yQbO,59zOOQ`<<Hv<<HvOAYQbO<<HvOOQ`<<Hd<<HdOA_QbO<<HdOAdQbO<<HdOAlQbO<<HdOOQ`<<Ht<<HtOOQ`<<Hw<<HwOAwQbO<<HwOOQ`'#EQ'#EQOA|QbO<<H{OBUQbO'#DsOOQ`<<H{<<H{OB^QbO<<H{O3yQbO1G/cOOQ`1G/f1G/fOOQ`AN>bAN>bOOQ`AN>OAN>OOBcQbOAN>OOBhQbOAN>OOOQ`AN>cAN>cOOQ`-E8O-E8OOOQ`AN>gAN>gOBpQbOAN>gO.PQbO,5:]O3yQbO,5:_OOQ`7+$}7+$}OOQ`G23jG23jOBuQbOG23jPBXQbO'#DqOOQ`G24RG24ROBzQRO1G/wOCRQRO1G/wOOQ`1G/y1G/yOOQ`LD)ULD)UO3yQbO7+%cOOQ`<<H}<<H}",
stateData: "D[~O!wOS!xOS~OdPOegOfWOg_OhROmWOuWOvWO}WO!]bO!_dO!aeO!}^O#QQO#XTO#YUO#ZjO~OdnOfWOg_OhROmWOuWOvWOymO}WO!VoO!}^O#QQO#XTO#YUO!ZoX#ZoX#foX#`oX!QoX!ToX!UoX~OP#OXQ#OXR#OXS#OXT#OXU#OXW#OXX#OXY#OXZ#OX[#OX]#OX^#OX~P!aOruO#QxO#SsO#TtO~OdyOy{O!O{P~OdnOfWOg_OmWOuWOvWOymO}WO!}^O#QQO#XTO#YUO#Z!PO~O#_!SO~P%[OP#PXQ#PXR#PXS#PXT#PXU#PXW#PXX#PXY#PXZ#PX[#PX]#PX^#PX#Z#PX#f#PX!Q#PX!T#PX!U#PX~OdnOfWOg_OhROmWOuWOvWOymO}WO!VoO!}^O#QQO#XTO#YUO#`#PX~P&^OV!UO~P&^OP#OXQ#OXR#OXS#OXT#OXU#OXW#OXX#OXY#OXZ#OX[#OX]#OX^#OX~O#Z!zX#f!zX!Q!zX!T!zX!U!zX~P(rOP!WOQ!WOR!XOS!XOT!ZOU![OW!YOX!YOY!YOZ!YO[!YO]!YO^!VO~O#Z!zX#f!zX!Q!zX!T!zX!U!zX~OP!WOQ!WOR!XOS!XO~P*{OT!ZOU![O~P*{OdPOfWOg_OhROmWOuWOvWO}WO!}^O#QQO#XTO#YUO~O!|!bO~O!Z!cO~P*{O!O!eO~OdnOfWOg_OmWOuWOvWO}WO!}^O#QQO#XTO#YUO~OV!UO_!kO`!kOa!kOb!kOc!kO~O#Z!lO#f!lO~OhRO!V!nO~P-ROhROymO!VoO!Zka#Zka#fka#`ka!Qka!Tka!Uka~P-ROd!pO!}^O~O#Q!qO#S!qO#T!qO#U!qO#V!qO#W!qO~OruO#Q!sO#SsO#TtO~OdyOy{O!O{X~Om!vOu!vO}!vO#QQO~O!O!xO~O#_!{O~P%[OymO#Z!}O#_#PO~O#Z#QO#_!{O~P-ROegO!]bO!_dO!aeO~P+xO#`#]O~P(rOP!WOQ!WOR!XOS!XO#`#]O~OT!ZOU![O#`#]O~O!Z!cO#`#]O~Od#^Om#^O!}^O~Od#_Og_O!}^O~O!Z!cO#Zja#fja#`ja!Qja!Tja!Uja~OegO!]bO!_dO!aeO#Z#dO~P+xO#Z!^a#f!^a!Q!^a!T!^a!U!^a~P*QO#Z!^a#f!^a!Q!^a!T!^a!U!^a~OP!WOQ!WOR!XOS!XO~P4nOT!ZOU![O~P4nOT!ZOU![OW!YOX!YOY!YOZ!YO[!YO]!YO~O!O#eO~P5kOT!ZOU![O!O#eO~Oy{O!O{a~OegO!]bO!_dO!aeO#Z#hO~P+xOymO#Z!}O#_#jO~O#Z#QO#_#lO~P-RO^!VORpiSpi#Zpi#fpi#`pi!Qpi!Tpi!Upi~OPpiQpi~P7kOP!WOQ!WO~P7kOP!WOQ!WORpiSpi#Zpi#fpi#`pi!Qpi!Tpi!Upi~OW!YOX!YOY!YOZ!YO[!YO]!YOT!Xi#Z!Xi#f!Xi#`!Xi!O!Xi!Q!Xi!T!Xi!U!Xi~OU![O~P9`OU![O~P9rOU!Xi~P9`OhROymO!VoO~P-RO!Q#oO!T#pO!U#qO~OegO!]bO!_dO!aeO#Z#tO!Q#]P!T#]P!U#]P~P+xOegO!]bO!_dO!aeO#Z#{O~P+xO!Q#oO!T#pO!U#|O~OymO#Z!}O#_$QO~O#Z#QO#_$RO~P-ROd$SO~O!O$TO~O!U$UO~O!T#pO!U$UO~O#Z$WO~OegO!]bO!_dO!aeO#Z#tO!Q#]X!T#]X!U#]X!e#]X!g#]X~P+xO!Q#oO!T#pO!U$YO~O!U$]O~OegO!]bO!_dO!aeO#Z#tO!U#]P!e#]P!g#]P~P+xO!U$`O~O!T#pO!U$`O~O!Q#oO!T#pO!U$bO~O!O$eO~OegO!]bO!_dO!aeO#Z$fO~P+xO!U$hO~O!U$iO~O!T#pO!U$iO~O!U$oO!e$kO!g$nO~O!U$qO~O!U$rO~O!T#pO!U$rO~OegO!]bO!_dO!aeO#Z$tO~P+xOegO!]bO!_dO!aeO#Z#tO!U#]P~P+xO!U$wO~O!U${O!e$kO!g$nO~O!O$}O~O!U${O~O!U%OO~OegO!]bO!_dO!aeO#Z#tO!T#]P!U#]P~P+xO!O%QO~P5kOT!ZOU![O!O%QO~O!U%RO~O#Z%SO~O#Z%TO~Omv~", stateData: "Ca~O!wOS!xOS~OdPOe`OfUOg]OhfOmUOuUOvUO}UO!]gO!`hO!biO!djO!}[O#QQO#XRO#YSO#ZcO~OdlOfUOg]OhfOmUOuUOvUOykO}UO!VmO!}[O#QQO#XRO#YSO!ZoX#ZoX#`oX#^oX!QoX!ToX!UoX!foX~OP#OXQ#OXR#OXS#OXT#OXU#OXW#OXX#OXY#OXZ#OX[#OX]#OX^#OX!OoX~P!dOrsO#QvO#SqO#TrO~OdlOfUOg]OmUOuUOvUOykO}UO!}[O#QQO#XRO#YSO#ZwO~O#]zO~P%YOP#PXQ#PXR#PXS#PXT#PXU#PXW#PXX#PXY#PXZ#PX[#PX]#PX^#PX#Z#PX#`#PX!Q#PX!T#PX!U#PX!f#PX~OdlOfUOg]OhfOmUOuUOvUOykO}UO!VmO!}[O#QQO#XRO#YSO#^#PX~P&[OV|O~P&[OP#OXQ#OXR#OXS#OXT#OXU#OXW#OXX#OXY#OXZ#OX[#OX]#OX^#OX~O#Z!zX#`!zX!Q!zX!T!zX!U!zX!f!zX~P(sOP!OOQ!OOR!POS!POT!ROU!SOW!QOX!QOY!QOZ!QO[!QO]!QO^}O~O#Z!zX#`!zX!Q!zX!T!zX!U!zX!f!zX~OP!OOQ!OOR!POS!PO~P+POT!ROU!SO~P+POdPOfUOg]OhfOmUOuUOvUO}UO!}[O#QQO#XRO#YSO~O!|!YO~O!O!]O!Z!ZO~P+POV|O_!^O`!^Oa!^Ob!^Oc!^O~O#Z!_O#`!_O~Od!aOy!cO!O{P~OdlOfUOg]OmUOuUOvUO}UO!}[O#QQO#XRO#YSO~O!O!iO~OhfO!V!oO~P.POhfOykO!VmO!Oka!Zka#Zka#`ka#^ka!Qka!Tka!Uka!fka~P.POd!qO!}[O~O#Q!rO#S!rO#T!rO#U!rO#V!rO#W!rO~OrsO#Q!tO#SqO#TrO~O#]!wO~P%YOykO#Z!yO#]!{O~O#Z!|O#]!wO~P.POe`O!]gO!`hO!biO!djO~P,PO#^#XO~P(sOP!OOQ!OOR!POS!PO#^#XO~OT!ROU!SO#^#XO~O!Z!ZO#^#XO~Od#YOm#YO!}[O~Od#ZOg]O!}[O~O!Z!ZO#Zja#`ja#^ja!Qja!Tja!Uja!fja~Oe`O!]gO!`hO!biO!djO#Z#`O~P,POd!aOy!cO!O{X~Om#eOu#eO}#eO#QQO~O!O#gO~OT!ROU!SOW!QOX!QOY!QOZ!QO[!QO]!QO~O!O#hO~P5POT!ROU!SO!O#hO~O#Z!aa#`!aa!Q!aa!T!aa!U!aa!f!aa~P*UO#Z!aa#`!aa!Q!aa!T!aa!U!aa!f!aa~OP!OOQ!OOR!POS!PO~P6eOT!ROU!SO~P6eO!O#jO~P5POT!ROU!SO!O#jO~OykO#Z!yO#]#lO~O#Z!|O#]#nO~P.PO^}ORpiSpi#Zpi#`pi#^pi!Qpi!Tpi!Upi!fpi~OPpiQpi~P8^OP!OOQ!OO~P8^OP!OOQ!OORpiSpi#Zpi#`pi#^pi!Qpi!Tpi!Upi!fpi~OW!QOX!QOY!QOZ!QO[!QO]!QOT!Xi#Z!Xi#`!Xi#^!Xi!O!Xi!Q!Xi!T!Xi!U!Xi!f!Xi~OU!SO~P:XOU!SO~P:kOU!Xi~P:XOhfOykO!VmO~P.POe`O!]gO!`hO!biO!djO#Z#qO!Q#[P!T#[P!U#[P!f#[P~P,PO!Q#uO!T#vO!U#wO~Oy!cO!O{a~Oe`O!]gO!`hO!biO!djO#Z#{O~P,PO!Q#uO!T#vO!U#}O~OykO#Z!yO#]$RO~O#Z!|O#]$SO~P.PO#Z$TO~Oe`O!]gO!`hO!biO!djO#Z#qO!Q#[X!T#[X!U#[X!f#[X~P,POd$VO~O!O$WO~O!U$XO~O!T#vO!U$XO~O!Q#uO!T#vO!U$ZO~Oe`O!]gO!`hO!biO!djO#Z#qO!Q#[P!T#[P!U#[P~P,PO!U$_O~O!U$`O~O!T#vO!U$`O~O!U$eO!f$dO~O!O$gO~O!U$iO~O!U$jO~O!T#vO!U$jO~O!Q#uO!T#vO!U$jO~O!U$mO~O!U$oO!f$dO~O!O$rO!d$qO~O!U$oO~O!U$tO~O!T#vO!U$tO~O!U$wO~O!U${O~O!O$|O~P5POT!ROU!SO!O$|O~Omv~",
goto: "5{#fPPPPPPPPPPPPPPPPPPPPPPPPPP#g#}$dP%d#}&j'ZP(X(XPP(])WP)k*]*`PP*fP*r*{PPP+e,b-XP-`P-`P-`P-s-v.PP.TP-`-`.Z.a.g.m.s.}/Z/e/o/x0PPPPP0V0Z1OPP1i3QP4PPPPPPPPP4T4o4TPP4|5T5T5h5hrhOl!U!e!k!x#d#e#h#v#{$T$e$f$t%S%TR!`^w`O^l!U!c!e!k!x#d#e#h#v#{$T$e$f$t%S%TtPO^l!U!e!k!x#d#e#h#v#{$T$e$f$t%S%TznPUVdemr!Q!T!V!W!X!Y!Z![!|#R#_#`#k$kR#_!ctVO^l!U!e!k!x#d#e#h#v#{$T$e$f$t%S%TzWPUVdemr!Q!T!V!W!X!Y!Z![!|#R#_#`#k$kQ!psQ#^!bR#`!cr[Ol!U!e!k!x#d#e#h#v#{$T$e$f$t%S%TQ!^^Q!gdQ#T!WR#W!X!pWOPUV^delmr!Q!T!U!V!W!X!Y!Z![!e!k!x!|#R#_#`#d#e#h#k#v#{$T$e$f$k$t%S%TR!v{TuQw!qWOPUV^delmr!Q!T!U!V!W!X!Y!Z![!e!k!x!|#R#_#`#d#e#h#k#v#{$T$e$f$k$t%S%TYpPVr#_#`Q!RUQ!z!QX!}!R!z#O#irhOl!U!e!k!x#d#e#h#v#{$T$e$f$t%S%TYoPVr#_#`Q!`^R!nmR!ORX|Rz}!uQ#s#cQ$O#gQ$[#xR$d$PQ#x#dQ$v$fR%P$tQ#r#cQ#}#gQ$V#sQ$Z#xQ$a$OQ$c$PQ$j$[R$s$d|WPUV^demr!Q!T!V!W!X!Y!Z![!|#R#_#`#k$ksXOl!U!e!k!x#d#e#h#v#{$T$e$f$t%S%Tr]Ol!U!e!k!x#d#e#h#v#{$T$e$f$t%S%TQ!_^Q!hdQ!jeQ#X![Q#Z!ZR$y$kZpPVr#_#`shOl!U!e!k!x#d#e#h#v#{$T$e$f$t%S%TR#z#eQ$_#{Q%U%SR%V%TT$l$_$mQ$p$_R$|$mQlOR!mlQwQR!rwQ!QUR!y!QQzRR!tzQ}RQ!uzT!w}!u^#v#d#h#{$f$t%S%TR$X#vQ#O!RQ#i!zT#m#O#iQ#R!TQ#k!|T#n#R#kWrPV#_#`R!orS!da!aR#b!dQ$m$_R$z$mTkOlSiOlQ#S!UQ#c!eQ#f!kQ#g!x`#u#d#h#v#{$f$t%S%TQ#y#eQ$g$TR$u$eraOl!U!e!k!x#d#e#h#v#{$T$e$f$t%S%TQ!a^R#a!ctZO^l!U!e!k!x#d#e#h#v#{$T$e$f$t%S%TYoPVr#_#`Q!TUQ!fdQ!ieQ!nmQ!|!QW#Q!T!|#R#kQ#T!VQ#U!WQ#V!XQ#X!YQ#Y!ZQ#[![R$x$krYOl!U!e!k!x#d#e#h#v#{$T$e$f$t%S%TznPUVdemr!Q!T!V!W!X!Y!Z![!|#R#_#`#k$kR!]^TvQw!RSOPV^lmr!U!e!k!x#_#`#d#e#h#v#{$T$e$f$t%S%TU#w#d$f$tQ$P#hV$^#{%S%TZqPVr#_#`scOl!U!e!k!x#d#e#h#v#{$T$e$f$t%S%TsfOl!U!e!k!x#d#e#h#v#{$T$e$f$t%S%T", goto: "3}#`PPPPPPPPPPPPPPPPPPPPPPPPPP#a#v$[P%[#v&b'QP(O(OPP(S(}P)b*R*UPP*[P*h+QPPP+h,e-^P-eP-e-eP-eP-eP-wP-{-e-e.R.X._.e.k.u.|/W/b/k/rPPPP/x/|0jPP1S2mP3lPPPPPPPP3pPP3vpaOe|!]!^!i#`#g#h#j#s#{$W$g$r$|R!W[u^O[e|!Z!]!^!i#`#g#h#j#s#{$W$g$r$|rPO[e|!]!^!i#`#g#h#j#s#{$W$g$r$||lPSTgijkpx{}!O!P!Q!R!S!x!}#Z#[#m$qR#Z!ZrTO[e|!]!^!i#`#g#h#j#s#{$W$g$r$||UPSTgijkpx{}!O!P!Q!R!S!x!}#Z#[#m$qQ!qqQ#Y!YR#[!ZpYOe|!]!^!i#`#g#h#j#s#{$W$g$r$|Q!U[Q!kiQ#P!OR#S!P!pUOPST[egijkpx{|}!O!P!Q!R!S!]!^!i!x!}#Z#[#`#g#h#j#m#s#{$W$g$q$r$|R#e!cTsQu!qUOPST[egijkpx{|}!O!P!Q!R!S!]!^!i!x!}#Z#[#`#g#h#j#m#s#{$W$g$q$r$|YnPTp#Z#[QySQ!vxX!yy!v!z#kpaOe|!]!^!i#`#g#h#j#s#{$W$g$r$|YmPTp#Z#[Q!W[R!okR!ffX!df!b!e#dQ#y#aQ$P#iQ$]#zR$l$^Q#a!]Q#i!iQ#|#hQ$Q#jQ$h$WQ$s$gQ$z$rR$}$|Q#x#aQ$O#iQ$Y#yQ$[#zQ$a$PS$k$]$^R$u$l!OUPST[gijkpx{}!O!P!Q!R!S!x!}#Z#[#m$qqVOe|!]!^!i#`#g#h#j#s#{$W$g$r$|pZOe|!]!^!i#`#g#h#j#s#{$W$g$r$|Q!V[Q!hgQ!liQ!njQ#T!SQ#V!RR$y$qZnPTp#Z#[qaOe|!]!^!i#`#g#h#j#s#{$W$g$r$|T$b$Q$cQ$f$QR$p$cQeOR!`eQuQR!suQxSR!uxQ!bfR#c!bQ!efQ#d!bT#f!e#dS#s#`#{R$U#sQ!zyQ#k!vT#o!z#kQ!}{Q#m!xT#p!}#mWpPT#Z#[R!ppS![_!XR#^![Q$c$QR$n$cTdOeSbOeQ#O|`#_!]!i#h#j$W$g$r$|Q#b!^U#r#`#s#{R#z#gp_Oe|!]!^!i#`#g#h#j#s#{$W$g$r$|Q!X[R#]!ZrXO[e|!]!^!i#`#g#h#j#s#{$W$g$r$|YmPTp#Z#[Q{SQ!ggQ!jiQ!mjQ!okQ!xxW!|{!x!}#mQ#P}Q#Q!OQ#R!PQ#T!QQ#U!RQ#W!SR$x$qpWOe|!]!^!i#`#g#h#j#s#{$W$g$r$||lPSTgijkpx{}!O!P!Q!R!S!x!}#Z#[#m$qR!T[TtQuQ#t#`R$^#{ZoPTp#Z#[",
nodeNames: "⚠ Star Slash Plus Minus And Or Eq EqEq Neq Lt Lte Gt Gte Modulo PlusEq MinusEq StarEq SlashEq ModuloEq Identifier AssignableIdentifier Word IdentifierBeforeDot Do Program PipeExpr FunctionCall DotGet Number ParenExpr FunctionCallOrIdentifier BinOp String StringFragment Interpolation EscapeSeq Boolean Regex Dict NamedArg NamedArgPrefix FunctionDef Params NamedParam Null colon CatchExpr keyword TryBlock FinallyExpr keyword keyword Underscore Array ConditionalOp PositionalArg operator TryExpr keyword Throw keyword IfExpr keyword SingleLineThenBlock ThenBlock ElseIfExpr keyword ElseExpr keyword CompoundAssign Assign", nodeNames: "⚠ Star Slash Plus Minus And Or Eq EqEq Neq Lt Lte Gt Gte Modulo PlusEq MinusEq StarEq SlashEq ModuloEq Identifier AssignableIdentifier Word IdentifierBeforeDot Do Program PipeExpr FunctionCall DotGet Number ParenExpr FunctionCallOrIdentifier BinOp String StringFragment Interpolation EscapeSeq Boolean Regex Dict NamedArg NamedArgPrefix FunctionDef Params NamedParam Null colon CatchExpr keyword Block FinallyExpr keyword keyword Underscore Array ConditionalOp PositionalArg operator WhileExpr keyword FunctionCallWithBlock TryExpr keyword Throw keyword IfExpr keyword ElseIfExpr keyword ElseExpr CompoundAssign Assign",
maxTerm: 114, maxTerm: 108,
context: trackScope, context: trackScope,
nodeProps: [ nodeProps: [
["closedBy", 46,"end"] ["closedBy", 46,"end"]
@ -19,9 +19,9 @@ export const parser = LRParser.deserialize({
propSources: [highlighting], propSources: [highlighting],
skippedNodes: [0], skippedNodes: [0],
repeatNodeCount: 11, repeatNodeCount: 11,
tokenData: "C_~R|OX#{XY$jYZ%TZp#{pq$jqs#{st%ntu'Vuw#{wx'[xy'ayz'zz{#{{|(e|}#{}!O+X!O!P#{!P!Q-n!Q![)S![!]6Z!]!^%T!^!}#{!}#O6t#O#P8j#P#Q8o#Q#R#{#R#S9Y#S#T#{#T#Y,Y#Y#Z9s#Z#b,Y#b#c>q#c#f,Y#f#g?n#g#h,Y#h#i@k#i#o,Y#o#p#{#p#qBo#q;'S#{;'S;=`$d<%l~#{~O#{~~CYS$QUrSOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{S$gP;=`<%l#{^$qUrS!wYOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{U%[UrS#ZQOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{^%uZrS!xYOY%nYZ#{Zt%ntu&huw%nwx&hx#O%n#O#P&h#P;'S%n;'S;=`'P<%lO%nY&mS!xYOY&hZ;'S&h;'S;=`&y<%lO&hY&|P;=`<%l&h^'SP;=`<%l%n~'[O#S~~'aO#Q~U'hUrS!}QOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{U(RUrS#`QOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{U(jWrSOt#{uw#{x!Q#{!Q![)S![#O#{#P;'S#{;'S;=`$d<%lO#{U)ZYrSmQOt#{uw#{x!O#{!O!P)y!P!Q#{!Q![)S![#O#{#P;'S#{;'S;=`$d<%lO#{U*OWrSOt#{uw#{x!Q#{!Q![*h![#O#{#P;'S#{;'S;=`$d<%lO#{U*oWrSmQOt#{uw#{x!Q#{!Q![*h![#O#{#P;'S#{;'S;=`$d<%lO#{U+^^rSOt#{uw#{x}#{}!O,Y!O!Q#{!Q![)S![!_#{!_!`-T!`#O#{#P#T#{#T#o,Y#o;'S#{;'S;=`$d<%lO#{U,_[rSOt#{uw#{x}#{}!O,Y!O!_#{!_!`-T!`#O#{#P#T#{#T#o,Y#o;'S#{;'S;=`$d<%lO#{U-[UyQrSOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{U-sWrSOt#{uw#{x!P#{!P!Q.]!Q#O#{#P;'S#{;'S;=`$d<%lO#{U.b^rSOY/^YZ#{Zt/^tu0auw/^wx0ax!P/^!P!Q#{!Q!}/^!}#O5S#O#P2o#P;'S/^;'S;=`6T<%lO/^U/e^rSvQOY/^YZ#{Zt/^tu0auw/^wx0ax!P/^!P!Q3U!Q!}/^!}#O5S#O#P2o#P;'S/^;'S;=`6T<%lO/^Q0fXvQOY0aZ!P0a!P!Q1R!Q!}0a!}#O1p#O#P2o#P;'S0a;'S;=`3O<%lO0aQ1UP!P!Q1XQ1^UvQ#Z#[1X#]#^1X#a#b1X#g#h1X#i#j1X#m#n1XQ1sVOY1pZ#O1p#O#P2Y#P#Q0a#Q;'S1p;'S;=`2i<%lO1pQ2]SOY1pZ;'S1p;'S;=`2i<%lO1pQ2lP;=`<%l1pQ2rSOY0aZ;'S0a;'S;=`3O<%lO0aQ3RP;=`<%l0aU3ZWrSOt#{uw#{x!P#{!P!Q3s!Q#O#{#P;'S#{;'S;=`$d<%lO#{U3zbrSvQOt#{uw#{x#O#{#P#Z#{#Z#[3s#[#]#{#]#^3s#^#a#{#a#b3s#b#g#{#g#h3s#h#i#{#i#j3s#j#m#{#m#n3s#n;'S#{;'S;=`$d<%lO#{U5X[rSOY5SYZ#{Zt5Stu1puw5Swx1px#O5S#O#P2Y#P#Q/^#Q;'S5S;'S;=`5}<%lO5SU6QP;=`<%l5SU6WP;=`<%l/^U6bUrS!OQOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{U6{W#YQrSOt#{uw#{x!_#{!_!`7e!`#O#{#P;'S#{;'S;=`$d<%lO#{U7jVrSOt#{uw#{x#O#{#P#Q8P#Q;'S#{;'S;=`$d<%lO#{U8WU#XQrSOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{~8oO#T~U8vU#_QrSOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{U9aUrS!VQOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{U9x]rSOt#{uw#{x}#{}!O,Y!O!_#{!_!`-T!`#O#{#P#T#{#T#U:q#U#o,Y#o;'S#{;'S;=`$d<%lO#{U:v^rSOt#{uw#{x}#{}!O,Y!O!_#{!_!`-T!`#O#{#P#T#{#T#`,Y#`#a;r#a#o,Y#o;'S#{;'S;=`$d<%lO#{U;w^rSOt#{uw#{x}#{}!O,Y!O!_#{!_!`-T!`#O#{#P#T#{#T#g,Y#g#h<s#h#o,Y#o;'S#{;'S;=`$d<%lO#{U<x^rSOt#{uw#{x}#{}!O,Y!O!_#{!_!`-T!`#O#{#P#T#{#T#X,Y#X#Y=t#Y#o,Y#o;'S#{;'S;=`$d<%lO#{U={[uQrSOt#{uw#{x}#{}!O,Y!O!_#{!_!`-T!`#O#{#P#T#{#T#o,Y#o;'S#{;'S;=`$d<%lO#{^>x[#UWrSOt#{uw#{x}#{}!O,Y!O!_#{!_!`-T!`#O#{#P#T#{#T#o,Y#o;'S#{;'S;=`$d<%lO#{^?u[#WWrSOt#{uw#{x}#{}!O,Y!O!_#{!_!`-T!`#O#{#P#T#{#T#o,Y#o;'S#{;'S;=`$d<%lO#{^@r^#VWrSOt#{uw#{x}#{}!O,Y!O!_#{!_!`-T!`#O#{#P#T#{#T#f,Y#f#gAn#g#o,Y#o;'S#{;'S;=`$d<%lO#{UAs^rSOt#{uw#{x}#{}!O,Y!O!_#{!_!`-T!`#O#{#P#T#{#T#i,Y#i#j<s#j#o,Y#o;'S#{;'S;=`$d<%lO#{UBvU!ZQrSOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{~C_O#f~", tokenData: "C_~R|OX#{XY$jYZ%TZp#{pq$jqs#{st%ntu'Vuw#{wx'[xy'ayz'zz{#{{|(e|}#{}!O+X!O!P#{!P!Q-n!Q![)S![!]6Z!]!^%T!^!}#{!}#O6t#O#P8j#P#Q8o#Q#R#{#R#S9Y#S#T#{#T#Y,Y#Y#Z9s#Z#b,Y#b#c>q#c#f,Y#f#g?n#g#h,Y#h#i@k#i#o,Y#o#p#{#p#qBo#q;'S#{;'S;=`$d<%l~#{~O#{~~CYS$QUrSOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{S$gP;=`<%l#{^$qUrS!wYOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{U%[UrS#ZQOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{^%uZrS!xYOY%nYZ#{Zt%ntu&huw%nwx&hx#O%n#O#P&h#P;'S%n;'S;=`'P<%lO%nY&mS!xYOY&hZ;'S&h;'S;=`&y<%lO&hY&|P;=`<%l&h^'SP;=`<%l%n~'[O#S~~'aO#Q~U'hUrS!}QOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{U(RUrS#^QOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{U(jWrSOt#{uw#{x!Q#{!Q![)S![#O#{#P;'S#{;'S;=`$d<%lO#{U)ZYrSmQOt#{uw#{x!O#{!O!P)y!P!Q#{!Q![)S![#O#{#P;'S#{;'S;=`$d<%lO#{U*OWrSOt#{uw#{x!Q#{!Q![*h![#O#{#P;'S#{;'S;=`$d<%lO#{U*oWrSmQOt#{uw#{x!Q#{!Q![*h![#O#{#P;'S#{;'S;=`$d<%lO#{U+^^rSOt#{uw#{x}#{}!O,Y!O!Q#{!Q![)S![!_#{!_!`-T!`#O#{#P#T#{#T#o,Y#o;'S#{;'S;=`$d<%lO#{U,_[rSOt#{uw#{x}#{}!O,Y!O!_#{!_!`-T!`#O#{#P#T#{#T#o,Y#o;'S#{;'S;=`$d<%lO#{U-[UyQrSOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{U-sWrSOt#{uw#{x!P#{!P!Q.]!Q#O#{#P;'S#{;'S;=`$d<%lO#{U.b^rSOY/^YZ#{Zt/^tu0auw/^wx0ax!P/^!P!Q#{!Q!}/^!}#O5S#O#P2o#P;'S/^;'S;=`6T<%lO/^U/e^rSvQOY/^YZ#{Zt/^tu0auw/^wx0ax!P/^!P!Q3U!Q!}/^!}#O5S#O#P2o#P;'S/^;'S;=`6T<%lO/^Q0fXvQOY0aZ!P0a!P!Q1R!Q!}0a!}#O1p#O#P2o#P;'S0a;'S;=`3O<%lO0aQ1UP!P!Q1XQ1^UvQ#Z#[1X#]#^1X#a#b1X#g#h1X#i#j1X#m#n1XQ1sVOY1pZ#O1p#O#P2Y#P#Q0a#Q;'S1p;'S;=`2i<%lO1pQ2]SOY1pZ;'S1p;'S;=`2i<%lO1pQ2lP;=`<%l1pQ2rSOY0aZ;'S0a;'S;=`3O<%lO0aQ3RP;=`<%l0aU3ZWrSOt#{uw#{x!P#{!P!Q3s!Q#O#{#P;'S#{;'S;=`$d<%lO#{U3zbrSvQOt#{uw#{x#O#{#P#Z#{#Z#[3s#[#]#{#]#^3s#^#a#{#a#b3s#b#g#{#g#h3s#h#i#{#i#j3s#j#m#{#m#n3s#n;'S#{;'S;=`$d<%lO#{U5X[rSOY5SYZ#{Zt5Stu1puw5Swx1px#O5S#O#P2Y#P#Q/^#Q;'S5S;'S;=`5}<%lO5SU6QP;=`<%l5SU6WP;=`<%l/^U6bUrS!OQOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{U6{W#YQrSOt#{uw#{x!_#{!_!`7e!`#O#{#P;'S#{;'S;=`$d<%lO#{U7jVrSOt#{uw#{x#O#{#P#Q8P#Q;'S#{;'S;=`$d<%lO#{U8WU#XQrSOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{~8oO#T~U8vU#]QrSOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{U9aUrS!VQOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{U9x]rSOt#{uw#{x}#{}!O,Y!O!_#{!_!`-T!`#O#{#P#T#{#T#U:q#U#o,Y#o;'S#{;'S;=`$d<%lO#{U:v^rSOt#{uw#{x}#{}!O,Y!O!_#{!_!`-T!`#O#{#P#T#{#T#`,Y#`#a;r#a#o,Y#o;'S#{;'S;=`$d<%lO#{U;w^rSOt#{uw#{x}#{}!O,Y!O!_#{!_!`-T!`#O#{#P#T#{#T#g,Y#g#h<s#h#o,Y#o;'S#{;'S;=`$d<%lO#{U<x^rSOt#{uw#{x}#{}!O,Y!O!_#{!_!`-T!`#O#{#P#T#{#T#X,Y#X#Y=t#Y#o,Y#o;'S#{;'S;=`$d<%lO#{U={[uQrSOt#{uw#{x}#{}!O,Y!O!_#{!_!`-T!`#O#{#P#T#{#T#o,Y#o;'S#{;'S;=`$d<%lO#{^>x[#UWrSOt#{uw#{x}#{}!O,Y!O!_#{!_!`-T!`#O#{#P#T#{#T#o,Y#o;'S#{;'S;=`$d<%lO#{^?u[#WWrSOt#{uw#{x}#{}!O,Y!O!_#{!_!`-T!`#O#{#P#T#{#T#o,Y#o;'S#{;'S;=`$d<%lO#{^@r^#VWrSOt#{uw#{x}#{}!O,Y!O!_#{!_!`-T!`#O#{#P#T#{#T#f,Y#f#gAn#g#o,Y#o;'S#{;'S;=`$d<%lO#{UAs^rSOt#{uw#{x}#{}!O,Y!O!_#{!_!`-T!`#O#{#P#T#{#T#i,Y#i#j<s#j#o,Y#o;'S#{;'S;=`$d<%lO#{UBvU!ZQrSOt#{uw#{x#O#{#P;'S#{;'S;=`$d<%lO#{~C_O#`~",
tokenizers: [operatorTokenizer, 1, 2, 3, tokenizer, new LocalTokenGroup("[~RP!O!PU~ZO!|~~", 11)], tokenizers: [operatorTokenizer, 1, 2, 3, tokenizer, new LocalTokenGroup("[~RP!O!PU~ZO!|~~", 11)],
topRules: {"Program":[0,25]}, topRules: {"Program":[0,25]},
specialized: [{term: 20, get: (value: any, stack: any) => (specializeKeyword(value, stack) << 1), external: specializeKeyword},{term: 20, get: (value: keyof typeof spec_Identifier) => spec_Identifier[value] || -1}], specialized: [{term: 20, get: (value: any, stack: any) => (specializeKeyword(value, stack) << 1), external: specializeKeyword},{term: 20, get: (value: keyof typeof spec_Identifier) => spec_Identifier[value] || -1}],
tokenPrec: 1619 tokenPrec: 1578
}) })

View File

@ -752,7 +752,7 @@ Assign
EqEq == EqEq ==
Number 5 Number 5
colon : colon :
ThenBlock Block
Boolean true Boolean true
keyword end keyword end
keyword end keyword end
@ -794,7 +794,7 @@ Assign
EqEq == EqEq ==
Number 5 Number 5
colon : colon :
ThenBlock Block
Boolean true Boolean true
keyword end keyword end
keyword end keyword end

View File

@ -12,7 +12,7 @@ describe('if/elseif/else', () => {
EqEq == EqEq ==
Number 1 Number 1
colon : colon :
SingleLineThenBlock Block
String String
StringFragment cool StringFragment cool
keyword end keyword end
@ -26,7 +26,7 @@ describe('if/elseif/else', () => {
keyword if keyword if
Identifier x Identifier x
colon : colon :
SingleLineThenBlock Block
Number 2 Number 2
keyword end keyword end
`) `)
@ -44,7 +44,7 @@ describe('if/elseif/else', () => {
Lt < Lt <
Number 9 Number 9
colon : colon :
ThenBlock Block
FunctionCallOrIdentifier FunctionCallOrIdentifier
Identifier yes Identifier yes
keyword end keyword end
@ -61,13 +61,13 @@ describe('if/elseif/else', () => {
keyword if keyword if
Identifier with-else Identifier with-else
colon : colon :
ThenBlock Block
FunctionCallOrIdentifier FunctionCallOrIdentifier
Identifier x Identifier x
ElseExpr ElseExpr
keyword else keyword else
colon : colon :
ThenBlock Block
FunctionCallOrIdentifier FunctionCallOrIdentifier
Identifier y Identifier y
keyword end keyword end
@ -75,23 +75,24 @@ describe('if/elseif/else', () => {
}) })
test('parses multiline if with else if', () => { test('parses multiline if with else if', () => {
expect(`if with-elseif: expect(`if with-else-if:
x x
else if another-condition: else if another-condition:
y y
end`).toMatchTree(` end`).toMatchTree(`
IfExpr IfExpr
keyword if keyword if
Identifier with-elseif Identifier with-else-if
colon : colon :
ThenBlock Block
FunctionCallOrIdentifier FunctionCallOrIdentifier
Identifier x Identifier x
ElseIfExpr ElseIfExpr
keyword elseif keyword else
keyword if
Identifier another-condition Identifier another-condition
colon : colon :
ThenBlock Block
FunctionCallOrIdentifier FunctionCallOrIdentifier
Identifier y Identifier y
keyword end keyword end
@ -99,7 +100,7 @@ describe('if/elseif/else', () => {
}) })
test('parses multiline if with multiple else if and else', () => { test('parses multiline if with multiple else if and else', () => {
expect(`if with-elseif-else: expect(`if with-else-if-else:
x x
else if another-condition: else if another-condition:
y y
@ -110,29 +111,31 @@ describe('if/elseif/else', () => {
end`).toMatchTree(` end`).toMatchTree(`
IfExpr IfExpr
keyword if keyword if
Identifier with-elseif-else Identifier with-else-if-else
colon : colon :
ThenBlock Block
FunctionCallOrIdentifier FunctionCallOrIdentifier
Identifier x Identifier x
ElseIfExpr ElseIfExpr
keyword elseif keyword else
keyword if
Identifier another-condition Identifier another-condition
colon : colon :
ThenBlock Block
FunctionCallOrIdentifier FunctionCallOrIdentifier
Identifier y Identifier y
ElseIfExpr ElseIfExpr
keyword elseif keyword else
keyword if
Identifier yet-another-condition Identifier yet-another-condition
colon : colon :
ThenBlock Block
FunctionCallOrIdentifier FunctionCallOrIdentifier
Identifier z Identifier z
ElseExpr ElseExpr
keyword else keyword else
colon : colon :
ThenBlock Block
FunctionCallOrIdentifier FunctionCallOrIdentifier
Identifier oh-no Identifier oh-no
keyword end keyword end
@ -148,9 +151,124 @@ describe('if/elseif/else', () => {
keyword if keyword if
Boolean true Boolean true
colon : colon :
SingleLineThenBlock Block
Number 2 Number 2
keyword end keyword end
`) `)
}) })
}) })
describe('while', () => {
test('infinite loop', () => {
expect(`while true: true end`).toMatchTree(`
WhileExpr
keyword while
Boolean true
colon :
Block
Boolean true
keyword end`)
})
test('basic expression', () => {
expect(`while a > 0: true end`).toMatchTree(`
WhileExpr
keyword while
ConditionalOp
Identifier a
Gt >
Number 0
colon :
Block
Boolean true
keyword end`)
})
test('compound expression', () => {
expect(`while a > 0 and b < 100 and c < 1000: true end`).toMatchTree(`
WhileExpr
keyword while
ConditionalOp
ConditionalOp
ConditionalOp
Identifier a
Gt >
Number 0
And and
ConditionalOp
Identifier b
Lt <
Number 100
And and
ConditionalOp
Identifier c
Lt <
Number 1000
colon :
Block
Boolean true
keyword end`)
})
test('multiline infinite loop', () => {
expect(`
while true:
true
end`).toMatchTree(`
WhileExpr
keyword while
Boolean true
colon :
Block
Boolean true
keyword end`)
})
test('multiline basic expression', () => {
expect(`
while a > 0:
true
end`).toMatchTree(`
WhileExpr
keyword while
ConditionalOp
Identifier a
Gt >
Number 0
colon :
Block
Boolean true
keyword end`)
})
test('multiline compound expression', () => {
expect(`
while a > 0 and b < 100 and c < 1000:
true
end`).toMatchTree(`
WhileExpr
keyword while
ConditionalOp
ConditionalOp
ConditionalOp
Identifier a
Gt >
Number 0
And and
ConditionalOp
Identifier b
Lt <
Number 100
And and
ConditionalOp
Identifier c
Lt <
Number 1000
colon :
Block
Boolean true
keyword end`)
})
})

View File

@ -12,14 +12,14 @@ describe('try/catch/finally/throw', () => {
TryExpr TryExpr
keyword try keyword try
colon : colon :
TryBlock Block
FunctionCallOrIdentifier FunctionCallOrIdentifier
Identifier risky-operation Identifier risky-operation
CatchExpr CatchExpr
keyword catch keyword catch
Identifier err Identifier err
colon : colon :
TryBlock Block
FunctionCall FunctionCall
Identifier handle-error Identifier handle-error
PositionalArg PositionalArg
@ -37,13 +37,13 @@ describe('try/catch/finally/throw', () => {
TryExpr TryExpr
keyword try keyword try
colon : colon :
TryBlock Block
FunctionCallOrIdentifier FunctionCallOrIdentifier
Identifier do-work Identifier do-work
FinallyExpr FinallyExpr
keyword finally keyword finally
colon : colon :
TryBlock Block
FunctionCallOrIdentifier FunctionCallOrIdentifier
Identifier cleanup Identifier cleanup
keyword end keyword end
@ -61,14 +61,14 @@ describe('try/catch/finally/throw', () => {
TryExpr TryExpr
keyword try keyword try
colon : colon :
TryBlock Block
FunctionCallOrIdentifier FunctionCallOrIdentifier
Identifier risky-operation Identifier risky-operation
CatchExpr CatchExpr
keyword catch keyword catch
Identifier err Identifier err
colon : colon :
TryBlock Block
FunctionCall FunctionCall
Identifier handle-error Identifier handle-error
PositionalArg PositionalArg
@ -76,7 +76,7 @@ describe('try/catch/finally/throw', () => {
FinallyExpr FinallyExpr
keyword finally keyword finally
colon : colon :
TryBlock Block
FunctionCallOrIdentifier FunctionCallOrIdentifier
Identifier cleanup Identifier cleanup
keyword end keyword end
@ -91,6 +91,7 @@ describe('try/catch/finally/throw', () => {
TryExpr TryExpr
keyword try keyword try
colon : colon :
Block
FunctionCall FunctionCall
Identifier parse-number Identifier parse-number
PositionalArg PositionalArg
@ -99,6 +100,7 @@ describe('try/catch/finally/throw', () => {
keyword catch keyword catch
Identifier err Identifier err
colon : colon :
Block
Number 0 Number 0
keyword end keyword end
`) `)
@ -109,16 +111,19 @@ describe('try/catch/finally/throw', () => {
TryExpr TryExpr
keyword try keyword try
colon : colon :
Block
FunctionCallOrIdentifier FunctionCallOrIdentifier
Identifier work Identifier work
CatchExpr CatchExpr
keyword catch keyword catch
Identifier err Identifier err
colon : colon :
Block
Number 0 Number 0
FinallyExpr FinallyExpr
keyword finally keyword finally
colon : colon :
Block
FunctionCallOrIdentifier FunctionCallOrIdentifier
Identifier cleanup Identifier cleanup
keyword end keyword end
@ -164,12 +169,14 @@ describe('try/catch/finally/throw', () => {
TryExpr TryExpr
keyword try keyword try
colon : colon :
Block
FunctionCallOrIdentifier FunctionCallOrIdentifier
Identifier work Identifier work
CatchExpr CatchExpr
keyword catch keyword catch
Identifier err Identifier err
colon : colon :
Block
Number 0 Number 0
keyword end keyword end
`) `)
@ -199,7 +206,7 @@ describe('function-level exception handling', () => {
keyword catch keyword catch
Identifier e Identifier e
colon : colon :
TryBlock Block
FunctionCallOrIdentifier FunctionCallOrIdentifier
Identifier empty-string Identifier empty-string
keyword end keyword end
@ -227,7 +234,7 @@ describe('function-level exception handling', () => {
FinallyExpr FinallyExpr
keyword finally keyword finally
colon : colon :
TryBlock Block
FunctionCallOrIdentifier FunctionCallOrIdentifier
Identifier close-resources Identifier close-resources
keyword end keyword end
@ -259,7 +266,7 @@ describe('function-level exception handling', () => {
keyword catch keyword catch
Identifier err Identifier err
colon : colon :
TryBlock Block
FunctionCall FunctionCall
Identifier log Identifier log
PositionalArg PositionalArg
@ -269,7 +276,7 @@ describe('function-level exception handling', () => {
FinallyExpr FinallyExpr
keyword finally keyword finally
colon : colon :
TryBlock Block
FunctionCallOrIdentifier FunctionCallOrIdentifier
Identifier cleanup Identifier cleanup
keyword end keyword end

View File

@ -0,0 +1,303 @@
import { expect, describe, test } from 'bun:test'
import '../shrimp.grammar' // Importing this so changes cause it to retest!
describe('single line function blocks', () => {
test('work with no args', () => {
expect(`trap: echo bye bye end`).toMatchTree(`
FunctionCallWithBlock
FunctionCallOrIdentifier
Identifier trap
colon :
Block
FunctionCall
Identifier echo
PositionalArg
Identifier bye
PositionalArg
Identifier bye
keyword end`
)
})
test('work with one arg', () => {
expect(`trap EXIT: echo bye bye end`).toMatchTree(`
FunctionCallWithBlock
FunctionCall
Identifier trap
PositionalArg
Word EXIT
colon :
Block
FunctionCall
Identifier echo
PositionalArg
Identifier bye
PositionalArg
Identifier bye
keyword end`
)
})
test('work with named args', () => {
expect(`attach signal='exit': echo bye bye end`).toMatchTree(`
FunctionCallWithBlock
FunctionCall
Identifier attach
NamedArg
NamedArgPrefix signal=
String
StringFragment exit
colon :
Block
FunctionCall
Identifier echo
PositionalArg
Identifier bye
PositionalArg
Identifier bye
keyword end`
)
})
test('work with dot-get', () => {
expect(`signals = [=]; signals.trap 'EXIT': echo bye bye end`).toMatchTree(`
Assign
AssignableIdentifier signals
Eq =
Dict [=]
FunctionCallWithBlock
FunctionCall
DotGet
IdentifierBeforeDot signals
Identifier trap
PositionalArg
String
StringFragment EXIT
colon :
Block
FunctionCall
Identifier echo
PositionalArg
Identifier bye
PositionalArg
Identifier bye
keyword end`
)
})
})
describe('multi line function blocks', () => {
test('work with no args', () => {
expect(`
trap:
echo bye bye
end
`).toMatchTree(`
FunctionCallWithBlock
FunctionCallOrIdentifier
Identifier trap
colon :
Block
FunctionCall
Identifier echo
PositionalArg
Identifier bye
PositionalArg
Identifier bye
keyword end`
)
})
test('work with one arg', () => {
expect(`
trap EXIT:
echo bye bye
end`).toMatchTree(`
FunctionCallWithBlock
FunctionCall
Identifier trap
PositionalArg
Word EXIT
colon :
Block
FunctionCall
Identifier echo
PositionalArg
Identifier bye
PositionalArg
Identifier bye
keyword end`
)
})
test('work with named args', () => {
expect(`
attach signal='exit' code=1:
echo bye bye
end`).toMatchTree(`
FunctionCallWithBlock
FunctionCall
Identifier attach
NamedArg
NamedArgPrefix signal=
String
StringFragment exit
NamedArg
NamedArgPrefix code=
Number 1
colon :
Block
FunctionCall
Identifier echo
PositionalArg
Identifier bye
PositionalArg
Identifier bye
keyword end`
)
})
test('work with dot-get', () => {
expect(`
signals = [=]
signals.trap 'EXIT':
echo bye bye
end`).toMatchTree(`
Assign
AssignableIdentifier signals
Eq =
Dict [=]
FunctionCallWithBlock
FunctionCall
DotGet
IdentifierBeforeDot signals
Identifier trap
PositionalArg
String
StringFragment EXIT
colon :
Block
FunctionCall
Identifier echo
PositionalArg
Identifier bye
PositionalArg
Identifier bye
keyword end`
)
})
})
describe('ribbit', () => {
test('head tag', () => {
expect(`
head:
title What up
meta charSet=UTF-8
meta name='viewport' content='width=device-width, initial-scale=1, viewport-fit=cover'
end`).toMatchTree(`
FunctionCallWithBlock
FunctionCallOrIdentifier
Identifier head
colon :
Block
FunctionCall
Identifier title
PositionalArg
Word What
PositionalArg
Identifier up
FunctionCall
Identifier meta
PositionalArg
Word charSet=UTF-8
FunctionCall
Identifier meta
NamedArg
NamedArgPrefix name=
String
StringFragment viewport
NamedArg
NamedArgPrefix content=
String
StringFragment width=device-width, initial-scale=1, viewport-fit=cover
keyword end
`)
})
test('li', () => {
expect(`
list:
li border-bottom='1px solid black' one
li two
li three
end`).toMatchTree(`
FunctionCallWithBlock
FunctionCallOrIdentifier
Identifier list
colon :
Block
FunctionCall
Identifier li
NamedArg
NamedArgPrefix border-bottom=
String
StringFragment 1px solid black
PositionalArg
Identifier one
FunctionCall
Identifier li
PositionalArg
Identifier two
FunctionCall
Identifier li
PositionalArg
Identifier three
keyword end`)
})
test('inline expressions', () => {
expect(`
p:
h1 class=bright style='font-family: helvetica' Heya
h2 man that is (b wild)!
end`)
.toMatchTree(`
FunctionCallWithBlock
FunctionCallOrIdentifier
Identifier p
colon :
Block
FunctionCall
Identifier h1
NamedArg
NamedArgPrefix class=
Identifier bright
NamedArg
NamedArgPrefix style=
String
StringFragment font-family: helvetica
PositionalArg
Word Heya
FunctionCall
Identifier h2
PositionalArg
Identifier man
PositionalArg
Identifier that
PositionalArg
Identifier is
PositionalArg
ParenExpr
FunctionCall
Identifier b
PositionalArg
Identifier wild
PositionalArg
Word !
keyword end`)
})
})

View File

@ -1,3 +1,4 @@
node_modules node_modules
dist client/dist
server/dist
*.vsix *.vsix

View File

@ -6,10 +6,12 @@
"type": "extensionHost", "type": "extensionHost",
"request": "launch", "request": "launch",
"args": [ "args": [
"--extensionDevelopmentPath=${workspaceFolder}" "--extensionDevelopmentPath=${workspaceFolder}",
"--profile=Shrimp Dev"
], ],
"outFiles": [ "outFiles": [
"${workspaceFolder}/dist/**/*.js" "${workspaceFolder}/client/dist/**/*.js",
"${workspaceFolder}/server/dist/**/*.js"
], ],
"preLaunchTask": "bun: compile" "preLaunchTask": "bun: compile"
} }

View File

@ -14,17 +14,5 @@
"kind": "build", "kind": "build",
"isDefault": true "isDefault": true
} }
},
{
"type": "shell",
"label": "bun: watch",
"command": "bun",
"args": ["run", "watch"],
"options": {
"cwd": "${workspaceFolder}"
},
"problemMatcher": "$tsc-watch",
"isBackground": true
} }
]
} }

View File

@ -0,0 +1,49 @@
# Shrimp VSCode Extension
Language support for Shrimp in VSCode. This README is for probablycorey and defunkt.
**What it provides:**
- Syntax highlighting and semantic tokens
- Language server with error diagnostics
- Commands: "Show Parse Tree" (Alt+K Alt+I) and "Show Bytecode" (Alt+K Alt+,)
- `.sh` file association
## Development Workflow
**Developing the extension:**
1. Open `vscode-extension/` in VSCode
2. Run `bun run watch` in a terminal (keeps it compiling as you make changes)
3. Use **Run > Start Debugging** to launch Extension Development Host
4. Make changes to the code
5. Press **Cmd+R** (or Ctrl+R) in the Extension Development Host window to reload
6. Repeat steps 4-5
The `.vscode/launch.json` is configured to compile before launching and use a separate "Shrimp Dev" profile. This means you can have the extension installed in your main VSCode while developing without conflicts.
**Installing for daily use:**
Run `bun run build-and-install` to build a VSIX and install it in your current VSCode profile. This lets you use the extension when working on Shrimp scripts outside of development mode.
## Project Structure
The extension has two parts: a **client** (`client/src/extension.ts`) that registers commands and starts the language server, and a **server** (`server/src/`) that implements the Language Server Protocol for diagnostics and semantic highlighting.
Both compile to their respective `dist/` folders.
## Next Steps
**Autocomplete:**
- [ ] Identifiers in scope
- [ ] Globals from the prelude (including native functions)
- [ ] Imports
- [ ] Dot-get properties
- [ ] Function argument completion
**Other features:**
- [ ] Better syntax coloring
- [ ] Run shortcut - command to execute the current Shrimp file
- [ ] REPL integration

View File

@ -3,6 +3,11 @@
"workspaces": { "workspaces": {
"": { "": {
"name": "shrimp", "name": "shrimp",
"dependencies": {
"vscode-languageclient": "^9.0.1",
"vscode-languageserver": "^9.0.1",
"vscode-languageserver-textdocument": "^1.0.12",
},
"devDependencies": { "devDependencies": {
"@types/node": "22.x", "@types/node": "22.x",
"@types/vscode": "^1.105.0", "@types/vscode": "^1.105.0",
@ -15,8 +20,28 @@
"@types/vscode": ["@types/vscode@1.105.0", "", {}, "sha512-Lotk3CTFlGZN8ray4VxJE7axIyLZZETQJVWi/lYoUVQuqfRxlQhVOfoejsD2V3dVXPSbS15ov5ZyowMAzgUqcw=="], "@types/vscode": ["@types/vscode@1.105.0", "", {}, "sha512-Lotk3CTFlGZN8ray4VxJE7axIyLZZETQJVWi/lYoUVQuqfRxlQhVOfoejsD2V3dVXPSbS15ov5ZyowMAzgUqcw=="],
"balanced-match": ["balanced-match@1.0.2", "", {}, "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="],
"brace-expansion": ["brace-expansion@2.0.2", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ=="],
"minimatch": ["minimatch@5.1.6", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g=="],
"semver": ["semver@7.7.3", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q=="],
"typescript": ["typescript@5.9.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw=="], "typescript": ["typescript@5.9.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw=="],
"undici-types": ["undici-types@6.21.0", "", {}, "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ=="], "undici-types": ["undici-types@6.21.0", "", {}, "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ=="],
"vscode-jsonrpc": ["vscode-jsonrpc@8.2.0", "", {}, "sha512-C+r0eKJUIfiDIfwJhria30+TYWPtuHJXHtI7J0YlOmKAo7ogxP20T0zxB7HZQIFhIyvoBPwWskjxrvAtfjyZfA=="],
"vscode-languageclient": ["vscode-languageclient@9.0.1", "", { "dependencies": { "minimatch": "^5.1.0", "semver": "^7.3.7", "vscode-languageserver-protocol": "3.17.5" } }, "sha512-JZiimVdvimEuHh5olxhxkht09m3JzUGwggb5eRUkzzJhZ2KjCN0nh55VfiED9oez9DyF8/fz1g1iBV3h+0Z2EA=="],
"vscode-languageserver": ["vscode-languageserver@9.0.1", "", { "dependencies": { "vscode-languageserver-protocol": "3.17.5" }, "bin": { "installServerIntoExtension": "bin/installServerIntoExtension" } }, "sha512-woByF3PDpkHFUreUa7Hos7+pUWdeWMXRd26+ZX2A8cFx6v/JPTtd4/uN0/jB6XQHYaOlHbio03NTHCqrgG5n7g=="],
"vscode-languageserver-protocol": ["vscode-languageserver-protocol@3.17.5", "", { "dependencies": { "vscode-jsonrpc": "8.2.0", "vscode-languageserver-types": "3.17.5" } }, "sha512-mb1bvRJN8SVznADSGWM9u/b07H7Ecg0I3OgXDuLdn307rl/J3A9YD6/eYOssqhecL27hK1IPZAsaqh00i/Jljg=="],
"vscode-languageserver-textdocument": ["vscode-languageserver-textdocument@1.0.12", "", {}, "sha512-cxWNPesCnQCcMPeenjKKsOCKQZ/L6Tv19DTRIGuLWe32lyzWhihGVJ/rcckZXJxfdKCFvRLS3fpBIsV/ZGX4zA=="],
"vscode-languageserver-types": ["vscode-languageserver-types@3.17.5", "", {}, "sha512-Ld1VelNuX9pdF39h2Hgaeb5hEZM2Z3jUrrMgWQAu82jMtZp7p3vJT3BzToKtZI7NgQssZje5o0zryOrhQvzQAg=="],
} }
} }

View File

@ -0,0 +1,74 @@
import {
LanguageClient,
LanguageClientOptions,
ServerOptions,
TransportKind,
} from 'vscode-languageclient/node'
import * as vscode from 'vscode'
export function activate(context: vscode.ExtensionContext) {
const serverModule = context.asAbsolutePath('server/dist/server.js')
const serverOptions: ServerOptions = {
run: { module: serverModule, transport: TransportKind.ipc },
debug: { module: serverModule, transport: TransportKind.ipc },
}
const clientOptions: LanguageClientOptions = {
documentSelector: [{ scheme: 'file', language: 'shrimp' }],
}
const client = new LanguageClient(
'shrimpLanguageServer',
'Shrimp Language Server',
serverOptions,
clientOptions
)
client.start()
context.subscriptions.push(client)
// Command: Show Parse Tree
context.subscriptions.push(
vscode.commands.registerCommand('shrimp.showParseTree', async () => {
const editor = vscode.window.activeTextEditor
if (!editor || editor.document.languageId !== 'shrimp') {
vscode.window.showErrorMessage('No active Shrimp file')
return
}
const result = await client.sendRequest<string>('shrimp/parseTree', {
uri: editor.document.uri.toString(),
})
const doc = await vscode.workspace.openTextDocument({
content: result,
language: 'text',
})
await vscode.window.showTextDocument(doc, { preview: false })
})
)
// Command: Show Bytecode
context.subscriptions.push(
vscode.commands.registerCommand('shrimp.showBytecode', async () => {
const editor = vscode.window.activeTextEditor
if (!editor || editor.document.languageId !== 'shrimp') {
vscode.window.showErrorMessage('No active Shrimp file')
return
}
const result = await client.sendRequest<string>('shrimp/bytecode', {
uri: editor.document.uri.toString(),
})
const doc = await vscode.workspace.openTextDocument({
content: result,
language: 'text',
})
await vscode.window.showTextDocument(doc, { preview: false })
})
)
}
export function deactivate() {}

View File

@ -0,0 +1,13 @@
# This just has some stuff I use to make sure the extension is working!
like-a-function = do x y z:
echo 'This is a function with parameters: $x, $y, $z'
end
value = if true:
'This is true!'
else:
'This is false!'
end
echo 'value is $(value)'

View File

@ -20,5 +20,9 @@
["'", "'"], ["'", "'"],
["\"", "\""] ["\"", "\""]
], ],
"wordPattern": "([a-z][a-z0-9-]*)|(-?\\d+\\.?\\d*)" "wordPattern": "([a-z][a-z0-9-]*)|(-?\\d+\\.?\\d*)",
"indentationRules": {
"increaseIndentPattern": ":\\s*$",
"decreaseIndentPattern": "^\\s*(end|else)\\b"
}
} }

View File

@ -1,7 +1,7 @@
{ {
"name": "shrimp", "name": "shrimp",
"version": "0.0.1", "version": "0.0.1",
"main": "./dist/extension.js", "main": "./client/dist/extension.js",
"devDependencies": { "devDependencies": {
"@types/vscode": "^1.105.0", "@types/vscode": "^1.105.0",
"@types/node": "22.x", "@types/node": "22.x",
@ -28,7 +28,29 @@
"[shrimp]": { "[shrimp]": {
"editor.semanticHighlighting.enabled": true "editor.semanticHighlighting.enabled": true
} }
},
"commands": [
{
"command": "shrimp.showParseTree",
"title": "Shrimp: Show Parse Tree"
},
{
"command": "shrimp.showBytecode",
"title": "Shrimp: Show Bytecode"
} }
],
"keybindings": [
{
"command": "shrimp.showParseTree",
"key": "alt+k alt+i",
"when": "editorLangId == shrimp"
},
{
"command": "shrimp.showBytecode",
"key": "alt+k alt+,",
"when": "editorLangId == shrimp"
}
]
}, },
"description": "Language support for Shrimp shell scripting language", "description": "Language support for Shrimp shell scripting language",
"displayName": "Shrimp", "displayName": "Shrimp",
@ -39,9 +61,17 @@
"publisher": "shrimp-lang", "publisher": "shrimp-lang",
"scripts": { "scripts": {
"vscode:prepublish": "bun run package", "vscode:prepublish": "bun run package",
"compile": "bun build src/extension.ts --outdir dist --target node --format cjs --external vscode", "compile": "bun run compile:client && bun run compile:server",
"watch": "bun build src/extension.ts --outdir dist --target node --format cjs --external vscode --watch", "compile:client": "bun build client/src/extension.ts --outdir client/dist --target node --format cjs --external vscode",
"package": "bun build src/extension.ts --outdir dist --target node --format cjs --external vscode --minify", "compile:server": "bun build server/src/server.ts --outdir server/dist --target node --format cjs",
"check-types": "tsc --noEmit" "watch": "bun run compile:client --watch",
"package": "bun run compile:client --minify && bun run compile:server --minify",
"check-types": "tsc --noEmit",
"build-and-install": "bun run package && bunx @vscode/vsce package --allow-missing-repository && code --install-extension shrimp-*.vsix"
},
"dependencies": {
"vscode-languageclient": "^9.0.1",
"vscode-languageserver": "^9.0.1",
"vscode-languageserver-textdocument": "^1.0.12"
} }
} }

View File

@ -0,0 +1,93 @@
import { TextDocument, Position } from 'vscode-languageserver-textdocument'
import { Diagnostic, DiagnosticSeverity } from 'vscode-languageserver/node'
import { parser } from '../../../src/parser/shrimp'
import { Compiler } from '../../../src/compiler/compiler'
import { CompilerError } from '../../../src/compiler/compilerError'
export const buildDiagnostics = (textDocument: TextDocument): Diagnostic[] => {
const text = textDocument.getText()
const diagnostics = getParseErrors(textDocument)
if (diagnostics.length > 0) {
return diagnostics
}
const diagnostic = getCompilerError(text)
if (diagnostic) return [diagnostic]
return []
}
const getCompilerError = (text: string): Diagnostic | undefined => {
try {
new Compiler(text)
} catch (e) {
if (!(e instanceof CompilerError)) {
return unknownDiagnostic(getErrorMessage(e))
}
const lineInfo = e.lineAtPosition(text)!
const cause = e.cause ? ` Cause: ${e.cause}` : ''
const message = e.message
if (!lineInfo) {
return unknownDiagnostic(message + cause)
}
const diagnostic: Diagnostic = {
severity: DiagnosticSeverity.Error,
range: {
start: { line: lineInfo.lineNumber, character: lineInfo.columnStart },
end: { line: lineInfo.lineNumber, character: lineInfo.columnEnd },
},
message: `Compiler error: ${message}${cause}`,
source: 'shrimp',
}
return diagnostic
}
}
const unknownDiagnostic = (message: string): Diagnostic => {
const diagnostic: Diagnostic = {
severity: DiagnosticSeverity.Error,
range: {
start: { line: 0, character: 0 },
end: { line: -1, character: -1 },
},
message,
source: 'shrimp',
}
return diagnostic
}
const getParseErrors = (textDocument: TextDocument): Diagnostic[] => {
const tree = parser.parse(textDocument.getText())
const ranges: { start: Position; end: Position }[] = []
tree.iterate({
enter(n) {
if (n.type.isError) {
ranges.push({
start: textDocument.positionAt(n.from),
end: textDocument.positionAt(n.to),
})
return false
}
},
})
return ranges.map((range) => {
return {
range,
severity: DiagnosticSeverity.Error,
message: 'Parse error: Invalid syntax',
source: 'shrimp',
}
})
}
const getErrorMessage = (error: unknown): string => {
if (error instanceof Error) {
return error.message
}
return String(error)
}

View File

@ -0,0 +1,101 @@
import { parser } from '../../../src/parser/shrimp'
import * as Terms from '../../../src/parser/shrimp.terms'
import { SyntaxNode } from '@lezer/common'
import { TextDocument } from 'vscode-languageserver-textdocument'
import { SemanticTokensBuilder, SemanticTokenTypes } from 'vscode-languageserver/node'
export const TOKEN_TYPES = [
SemanticTokenTypes.function,
SemanticTokenTypes.variable,
SemanticTokenTypes.string,
SemanticTokenTypes.number,
SemanticTokenTypes.operator,
SemanticTokenTypes.keyword,
SemanticTokenTypes.parameter,
SemanticTokenTypes.property,
SemanticTokenTypes.regexp,
]
export const TOKEN_MODIFIERS: string[] = []
export function buildSemanticTokens(document: TextDocument): number[] {
const text = document.getText()
const tree = parser.parse(text)
const builder = new SemanticTokensBuilder()
walkTree(tree.topNode, document, builder)
return builder.build().data
}
// Walk the tree and collect tokens
function walkTree(node: SyntaxNode, document: TextDocument, builder: SemanticTokensBuilder) {
const tokenType = getTokenType(node.type.id)
if (tokenType !== undefined) {
const start = document.positionAt(node.from)
const length = node.to - node.from
builder.push(start.line, start.character, length, tokenType, 0)
}
let child = node.firstChild
while (child) {
walkTree(child, document, builder)
child = child.nextSibling
}
}
// Map Lezer node IDs to semantic token type indices
function getTokenType(nodeTypeId: number): number | undefined {
switch (nodeTypeId) {
case Terms.FunctionCall:
case Terms.FunctionDef:
return TOKEN_TYPES.indexOf(SemanticTokenTypes.function)
case Terms.Identifier:
case Terms.AssignableIdentifier:
case Terms.FunctionCallOrIdentifier:
return TOKEN_TYPES.indexOf(SemanticTokenTypes.variable)
case Terms.String:
case Terms.StringFragment:
case Terms.Word:
return TOKEN_TYPES.indexOf(SemanticTokenTypes.string)
case Terms.Number:
return TOKEN_TYPES.indexOf(SemanticTokenTypes.number)
case Terms.Plus:
case Terms.Minus:
case Terms.Star:
case Terms.Slash:
case Terms.Eq:
case Terms.EqEq:
case Terms.Neq:
case Terms.Lt:
case Terms.Lte:
case Terms.Gt:
case Terms.Gte:
case Terms.Modulo:
case Terms.And:
case Terms.Or:
return TOKEN_TYPES.indexOf(SemanticTokenTypes.operator)
case Terms.keyword:
case Terms.Do:
return TOKEN_TYPES.indexOf(SemanticTokenTypes.keyword)
case Terms.Params:
case Terms.NamedParam:
return TOKEN_TYPES.indexOf(SemanticTokenTypes.parameter)
case Terms.DotGet:
return TOKEN_TYPES.indexOf(SemanticTokenTypes.property)
case Terms.Regex:
return TOKEN_TYPES.indexOf(SemanticTokenTypes.regexp)
default:
return undefined
}
}

View File

@ -0,0 +1,139 @@
import { TextDocument } from 'vscode-languageserver-textdocument'
import { buildDiagnostics } from './diagnostics'
import { buildSemanticTokens, TOKEN_MODIFIERS, TOKEN_TYPES } from './semanticTokens'
import { parser } from '../../../src/parser/shrimp'
import { Compiler } from '../../../src/compiler/compiler'
import {
InitializeResult,
TextDocuments,
TextDocumentSyncKind,
createConnection,
ProposedFeatures,
CompletionItemKind,
} from 'vscode-languageserver/node'
const connection = createConnection(ProposedFeatures.all)
const documents = new TextDocuments(TextDocument)
documents.listen(connection)
// Server capabilities
connection.onInitialize(handleInitialize)
// Language features
connection.languages.semanticTokens.on(handleSemanticTokens)
documents.onDidChangeContent(handleDocumentChange)
connection.onCompletion(handleCompletion)
// Debug commands
connection.onRequest('shrimp/parseTree', handleParseTree)
connection.onRequest('shrimp/bytecode', handleBytecode)
// Start listening
connection.listen()
// ============================================================================
// Handler implementations
// ============================================================================
function handleInitialize(): InitializeResult {
connection.console.log('🦐 Server initialized with capabilities')
const result: InitializeResult = {
capabilities: {
textDocumentSync: TextDocumentSyncKind.Full,
completionProvider: {
triggerCharacters: ['.'],
},
semanticTokensProvider: {
legend: {
tokenTypes: TOKEN_TYPES,
tokenModifiers: TOKEN_MODIFIERS,
},
full: true,
},
},
}
return result
}
function handleSemanticTokens(params: any) {
const document = documents.get(params.textDocument.uri)
if (!document) return { data: [] }
const data = buildSemanticTokens(document)
return { data }
}
function handleDocumentChange(change: any) {
const textDocument = change.document
const diagnostics = buildDiagnostics(textDocument)
connection.sendDiagnostics({ uri: textDocument.uri, diagnostics })
}
function handleCompletion(params: any) {
const keywords = ['if', 'else', 'do', 'end', 'and', 'or', 'true', 'false', 'null']
return keywords.map((keyword) => ({
label: keyword,
kind: CompletionItemKind.Keyword,
}))
}
function handleParseTree(params: { uri: string }) {
connection.console.log(`🦐 Parse tree requested for: ${params.uri}`)
const document = documents.get(params.uri)
if (!document) return 'Document not found'
const text = document.getText()
const tree = parser.parse(text)
const treeString = tree.toString()
// Format with indentation, without parentheses
let formatted = ''
let indent = 0
for (let i = 0; i < treeString.length; i++) {
const char = treeString[i]
if (char === '(') {
formatted += '\n'
indent++
formatted += ' '.repeat(indent)
} else if (char === ')') {
indent--
} else if (char === ',') {
formatted += '\n'
formatted += ' '.repeat(indent)
} else {
formatted += char
}
}
return formatted
}
function handleBytecode(params: { uri: string }) {
connection.console.log(`🦐 Bytecode requested for: ${params.uri}`)
const document = documents.get(params.uri)
if (!document) return 'Document not found'
try {
const text = document.getText()
const compiler = new Compiler(text)
// Format bytecode as readable string
let output = 'Bytecode:\n\n'
const bytecode = compiler.bytecode
output += bytecode.instructions
.map((op, i) => `${i.toString().padStart(4)}: ${JSON.stringify(op)}`)
.join('\n')
// Strip ANSI color codes
output = output.replace(/\x1b\[[0-9;]*m/g, '')
return output
} catch (error) {
const errorMsg = error instanceof Error ? error.message : String(error)
// Strip ANSI color codes from error message too
return `Compilation failed: ${errorMsg.replace(/\x1b\[[0-9;]*m/g, '')}`
}
}

View File

@ -1,30 +0,0 @@
import * as vscode from 'vscode'
import { ShrimpSemanticTokensProvider, legend } from './semanticTokens'
import { parser } from '../../src/parser/shrimp'
// This method is called when your extension is activated
export function activate(context: vscode.ExtensionContext) {
console.log('Shrimp extension is now active!')
console.log('Parser loaded:', typeof parser, parser)
// Test the parser
try {
const testTree = parser.parse('x = 42')
console.log('Parser test successful:', testTree.topNode.toString())
} catch (error) {
console.error('Parser test failed:', error)
}
// Register semantic tokens provider for Shrimp language
const provider = new ShrimpSemanticTokensProvider()
const selector: vscode.DocumentSelector = { language: 'shrimp', scheme: 'file' }
const disposable = vscode.languages.registerDocumentSemanticTokensProvider(selector, provider, legend)
console.log('Registered semantic tokens provider:', disposable)
context.subscriptions.push(disposable)
console.log('Legend token types:', legend.tokenTypes)
}
// This method is called when your extension is deactivated
export function deactivate() {}

View File

@ -1,118 +0,0 @@
import * as vscode from 'vscode'
import { parser } from '../../src/parser/shrimp'
import { Tree, SyntaxNode } from '@lezer/common'
// Define the token types we'll use
const tokenTypes = [
'function',
'variable',
'string',
'number',
'operator',
'keyword',
'parameter',
'property',
'regexp',
]
const tokenModifiers: string[] = []
export const legend = new vscode.SemanticTokensLegend(tokenTypes, tokenModifiers)
export class ShrimpSemanticTokensProvider implements vscode.DocumentSemanticTokensProvider {
async provideDocumentSemanticTokens(
document: vscode.TextDocument,
_token: vscode.CancellationToken
): Promise<vscode.SemanticTokens> {
try {
console.log('provideDocumentSemanticTokens called for:', document.fileName)
const tokensBuilder = new vscode.SemanticTokensBuilder(legend)
const text = document.getText()
console.log('Document text:', text)
console.log('About to parse with parser:', typeof parser)
const tree: Tree = parser.parse(text)
console.log('Parsed tree:', tree.topNode.toString())
this.walkTree(tree.topNode, document, tokensBuilder)
const result = tokensBuilder.build()
console.log('Built tokens, data length:', result.data.length)
return result
} catch (error) {
console.error('Error in provideDocumentSemanticTokens:', error)
throw error
}
}
// Map Lezer node types to semantic token types
walkTree(node: SyntaxNode, document: vscode.TextDocument, builder: vscode.SemanticTokensBuilder) {
const tokenType = this.getTokenType(node.type.name)
if (tokenType !== undefined) {
const start = document.positionAt(node.from)
const length = node.to - node.from
builder.push(start.line, start.character, length, tokenType, 0)
}
// Recursively walk children
let child = node.firstChild
while (child) {
this.walkTree(child, document, builder)
child = child.nextSibling
}
}
getTokenType(nodeTypeName: string): number | undefined {
// Map Lezer node names to VSCode semantic token types
switch (nodeTypeName) {
case 'FunctionCall':
case 'FunctionDef':
return tokenTypes.indexOf('function')
case 'Identifier':
case 'AssignableIdentifier':
case 'FunctionCallOrIdentifier':
return tokenTypes.indexOf('variable')
case 'String':
case 'StringFragment':
case 'Word':
return tokenTypes.indexOf('string')
case 'Number':
return tokenTypes.indexOf('number')
case 'Plus':
case 'Minus':
case 'Star':
case 'Slash':
case 'Eq':
case 'EqEq':
case 'Neq':
case 'Lt':
case 'Lte':
case 'Gt':
case 'Gte':
case 'Modulo':
case 'And':
case 'Or':
return tokenTypes.indexOf('operator')
case 'keyword':
case 'Do':
return tokenTypes.indexOf('keyword')
case 'Params':
case 'NamedParam':
return tokenTypes.indexOf('parameter')
case 'DotGet':
return tokenTypes.indexOf('property')
case 'Regex':
return tokenTypes.indexOf('regexp')
default:
return undefined
}
}
}

View File

@ -3,7 +3,7 @@
"target": "ES2022", "target": "ES2022",
"lib": ["ES2022"], "lib": ["ES2022"],
"module": "commonjs", "module": "commonjs",
"moduleResolution": "node", "moduleResolution": "bundler",
"outDir": "./dist", "outDir": "./dist",
"strict": true, "strict": true,
"esModuleInterop": true, "esModuleInterop": true,
@ -11,6 +11,6 @@
"forceConsistentCasingInFileNames": true, "forceConsistentCasingInFileNames": true,
"resolveJsonModule": true "resolveJsonModule": true
}, },
"include": ["src/**/*"], "include": ["client/src/**/*", "server/src/**/*", "../src/**/*"],
"exclude": ["node_modules", "dist"] "exclude": ["node_modules", "client/dist", "server/dist"]
} }