diff --git a/src/parser/curlyTokenizer.ts b/src/parser/curlyTokenizer.ts index 00e3ce1..9495bc0 100644 --- a/src/parser/curlyTokenizer.ts +++ b/src/parser/curlyTokenizer.ts @@ -1,5 +1,6 @@ import { parser } from '#parser/shrimp.ts' -import type { SyntaxNode } from '@lezer/common' +import { parse } from '#parser/parser2' +import type { SyntaxNode } from '#parser/node' import { isIdentStart, isIdentChar } from './tokenizer' // Turns a { curly string } into strings and nodes for interpolation @@ -37,7 +38,7 @@ export const tokenizeCurlyString = (value: string): (string | [string, SyntaxNod } const input = value.slice(start + 2, pos) // skip '$(' - tokens.push([input, parser.parse(input).topNode]) + tokens.push([input, parse(input)]) start = ++pos // skip ')' } else { char = value[++pos] @@ -48,7 +49,7 @@ export const tokenizeCurlyString = (value: string): (string | [string, SyntaxNod char = value[++pos] const input = value.slice(start + 1, pos) // skip '$' - tokens.push([input, parser.parse(input).topNode]) + tokens.push([input, parse(input)]) start = pos-- // backtrack and start over } }