import { expect } from 'bun:test' import { diffLines } from 'diff' import color from 'kleur' import { Scanner, TokenType, type Token } from '#parser/tokenizer2' import { parser } from '#parser/shrimp' import { setGlobals } from '#parser/tokenizer' import { parse } from '#parser/parser2' import { globals as prelude } from '#prelude' import { $ } from 'bun' import { assert, errorMessage } from '#utils/utils' import { Compiler } from '#compiler/compiler' import { run, VM } from 'reefvm' import { treeToString2, treeToString, VMResultToValue } from '#utils/tree' const regenerateParser = async () => { let generate = true try { const grammarStat = await Bun.file('./src/parser/shrimp.grammar').stat() const tokenizerStat = await Bun.file('./src/parser/tokenizer.ts').stat() const parserStat = await Bun.file('./src/parser/shrimp.ts').stat() if (grammarStat.mtime <= parserStat.mtime && tokenizerStat.mtime <= parserStat.mtime) { generate = false } } catch (e) { console.error('Error checking or regenerating parser:', e) } finally { if (generate) { await $`bun generate-parser` } } } await regenerateParser() // Type declaration for TypeScript declare module 'bun:test' { interface Matchers { toMatchTree(expected: string, globals?: Record): T toMatchExpression(expected: string): T toFailParse(): T toEvaluateTo(expected: unknown, globals?: Record): Promise toFailEvaluation(): Promise toBeToken(expected: string): T toMatchToken(typeOrValue: string, value?: string): T toMatchTokens(...tokens: { type: string, value?: string }[]): T } } expect.extend({ toMatchTree(received: unknown, expected: string, globals?: Record) { assert(typeof received === 'string', 'toMatchTree can only be used with string values') const allGlobals = { ...prelude, ...(globals || {}) } setGlobals(Object.keys(allGlobals)) const tree = parse(received) const actual = treeToString2(tree, received) const normalizedExpected = trimWhitespace(expected) try { // A hacky way to show the colorized diff in the test output expect(actual).toEqual(normalizedExpected) return { pass: true, message: () => '' } } catch (error) { return { message: () => (error as Error).message, pass: false, } } }, toFailParse(received) { assert(typeof received === 'string', 'toFailParse can only be used with string values') try { const tree = parser.parse(received) let hasErrors = false tree.iterate({ enter(n) { if (n.type.isError) { hasErrors = true return false } }, }) if (hasErrors) { return { message: () => `Expected input to fail parsing, and it did.`, pass: true, } } else { const actual = treeToString(tree, received) return { message: () => `Expected input to fail parsing, but it parsed successfully:\n${actual}`, pass: false, } } } catch (error) { return { message: () => `Parsing threw an error: ${(error as Error).message}`, pass: false, } } }, async toEvaluateTo(received: unknown, expected: unknown, globals: Record = {}) { assert(typeof received === 'string', 'toEvaluateTo can only be used with string values') try { const allGlobals = { ...prelude, ...(globals || {}) } setGlobals(Object.keys(allGlobals)) const compiler = new Compiler(received) const result = await run(compiler.bytecode, allGlobals) let value = VMResultToValue(result) // Just treat regex as strings for comparison purposes if (expected instanceof RegExp) expected = String(expected) if (value instanceof RegExp) value = String(value) expect(value).toEqual(expected) return { message: () => `Expected evaluation to be ${expected}, but got ${value}`, pass: true, } } catch (error) { return { message: () => `Evaluation threw an error:\n${(error as Error).message}`, pass: false, } } }, async toFailEvaluation(received) { assert(typeof received === 'string', 'toFailEvaluation can only be used with string values') try { const compiler = new Compiler(received) const vm = new VM(compiler.bytecode) const value = await vm.run() return { message: () => `Expected evaluation to fail, but it succeeded with ${JSON.stringify(value)}`, pass: false, } } catch (error) { return { message: () => `Evaluation failed as expected: ${errorMessage(error)}`, pass: true, } } }, toBeToken(received: unknown, expected: string) { assert(typeof received === 'string', 'toBeToken can only be used with string values') try { const tokens = tokenize(received) const value = tokens[0] as Token const target = TokenType[expected as keyof typeof TokenType] if (!value) { return { message: () => `Expected token type to be ${expected}, but got ${value}`, pass: false, } } return { message: () => `Expected token type to be ${expected}, but got ${TokenType[value.type]}`, pass: value.type === target } } catch (error) { return { message: () => `Tokenization failed: ${errorMessage(error)}`, pass: false, } } }, toMatchToken(received: unknown, typeOrValue: string, value?: string) { assert(typeof received === 'string', 'toMatchToken can only be used with string values') const expectedValue = value ? value : typeOrValue const expectedType = value ? typeOrValue : undefined try { const tokens = tokenize(received) const token = tokens[0] as Token if (!token) { return { message: () => `Expected token to be ${expectedValue.replaceAll('\n', '\\n')}, got ${token}`, pass: false, } } if (expectedType && TokenType[expectedType as keyof typeof TokenType] !== token.type) { return { message: () => `Expected token to be ${expectedType}, but got ${TokenType[token.type]}`, pass: false } } return { message: () => `Expected token to be ${expectedValue.replaceAll('\n', '\\n')}, but got ${token.value}`, pass: token.value === expectedValue } } catch (error) { return { message: () => `Tokenization failed: ${errorMessage(error)} `, pass: false, } } }, toMatchTokens(received: unknown, ...tokens: { type: string, value?: string }[]) { assert(typeof received === 'string', 'toMatchTokens can only be used with string values') try { const result = tokenize(received).map(t => toHumanToken(t)) if (result.length === 0 && tokens.length > 0) { return { message: () => `Expected tokens ${JSON.stringify(tokens)}, got nothing`, pass: false, } } const expected = JSON.stringify(tokens, null, 2) const actual = JSON.stringify(result, null, 2) return { message: () => `Tokens don't match: \n\n${diff(actual, expected)}`, pass: expected == actual } } catch (error) { return { message: () => `Tokenization failed: ${errorMessage(error)} `, pass: false, } } } }) const tokenize = (code: string): Token[] => { const scanner = new Scanner return scanner.tokenize(code) } const toHumanToken = (tok: Token): { type: string, value?: string } => { return { type: TokenType[tok.type], value: tok.value } } const trimWhitespace = (str: string): string => { const lines = str.split('\n').filter((line) => line.trim().length > 0) const firstLine = lines[0] if (!firstLine) return '' const leadingWhitespace = firstLine.match(/^(\s*)/)?.[1] || '' return lines .map((line) => { if (!line.startsWith(leadingWhitespace)) { let foundWhitespace = line.match(/^(\s*)/)?.[1] || '' throw new Error( `Line has inconsistent leading whitespace: "${line}"(found "${foundWhitespace}", expected "${leadingWhitespace}")` ) } return line.slice(leadingWhitespace.length) }) .join('\n') } const diff = (a: string, b: string): string => { const expected = a.trim() const actual = b.trim() const lines = [] if (expected !== actual) { const changes = diffLines(actual, expected) for (const part of changes) { const sign = part.added ? "+" : part.removed ? "-" : " " let line = sign + part.value if (part.added) { line = color.green(line) } else if (part.removed) { line = color.red(line) } lines.push(line.endsWith("\n") || line.endsWith("\n\u001b[39m") ? line : line + "\n") } } return lines.join('\n') }