From aee9fa0747811b0c8085febb1c40bbad6eb393a9 Mon Sep 17 00:00:00 2001 From: Corey Johnson Date: Fri, 17 Oct 2025 18:43:11 -0700 Subject: [PATCH] refactor(scope): simplify trackScope to only track AssignableIdentifier MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Update trackScope ContextTracker to use ScopeContext wrapper - Simplify shift() to only capture AssignableIdentifier tokens - Simplify reduce() to handle only Assign, Params, and FunctionDef - Update hash function to use hashScope helper - Export ScopeContext class for use in tokenizer - Update tokenizer to access scope via ScopeContext.scope 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- src/parser/scopeTracker.ts | 59 +++++++++++++++++--------------------- src/parser/tokenizer.ts | 5 ++-- 2 files changed, 29 insertions(+), 35 deletions(-) diff --git a/src/parser/scopeTracker.ts b/src/parser/scopeTracker.ts index 42e9af4..d1dd15d 100644 --- a/src/parser/scopeTracker.ts +++ b/src/parser/scopeTracker.ts @@ -42,7 +42,7 @@ export class Scope { } // Wrapper that adds temporary state for identifier capture -class ScopeContext { +export class ScopeContext { constructor( public scope: Scope, public pendingIds: string[] = [] @@ -54,17 +54,12 @@ const hashScope = (context: ScopeContext): number => { return context.scope.hash() } -export const trackScope = new ContextTracker({ - start: new Scope(null, new Set(), [], false), +export const trackScope = new ContextTracker({ + start: new ScopeContext(new Scope(null, new Set())), shift(context, term, stack, input) { - // Track fn keyword to enter param capture mode - if (term === terms.Fn) { - return context.withIsInParams(true).withPendingIdentifiers([]) - } - - // Capture identifiers - if (term === terms.Identifier) { + // Only capture AssignableIdentifier tokens + if (term === terms.AssignableIdentifier) { // Build text by peeking backwards from stack.pos to input.pos let text = '' const start = input.pos @@ -76,14 +71,10 @@ export const trackScope = new ContextTracker({ text += String.fromCharCode(ch) } - // Capture ALL identifiers when in params - if (context.isInParams) { - return context.withPendingIdentifiers([...context.pendingIdentifiers, text]) - } - // Capture FIRST identifier for assignments - else if (context.pendingIdentifiers.length === 0) { - return context.withPendingIdentifiers([text]) - } + return new ScopeContext( + context.scope, + [...context.pendingIds, text] + ) } return context @@ -91,31 +82,33 @@ export const trackScope = new ContextTracker({ reduce(context, term, stack, input) { // Add assignment variable to scope - if (term === terms.Assign && context.pendingIdentifiers.length > 0) { - return context.add(context.pendingIdentifiers[0]!) + if (term === terms.Assign && context.pendingIds.length > 0) { + // Pop the last identifier (most recent AssignableIdentifier) + const varName = context.pendingIds[context.pendingIds.length - 1]! + return new ScopeContext( + context.scope.add(varName), + context.pendingIds.slice(0, -1) + ) } - // Push new scope and add parameters + // Push new scope and add all parameters if (term === terms.Params) { - const newScope = context.push() - if (context.pendingIdentifiers.length > 0) { - return newScope.add(...context.pendingIdentifiers).withIsInParams(false) - } - return newScope.withIsInParams(false) + const newScope = context.scope.push() + return new ScopeContext( + context.pendingIds.length > 0 + ? newScope.add(...context.pendingIds) + : newScope, + [] // Clear all pending after consuming + ) } // Pop scope when exiting function if (term === terms.FunctionDef) { - return context.pop() - } - - // Clear stale identifiers after non-assignment statements - if (term === terms.DotGet || term === terms.FunctionCallOrIdentifier || term === terms.FunctionCall) { - return context.clearPending() + return new ScopeContext(context.scope.pop(), []) } return context }, - hash: (context) => context.hash(), + hash: hashScope, }) diff --git a/src/parser/tokenizer.ts b/src/parser/tokenizer.ts index 9bed8b0..a862e04 100644 --- a/src/parser/tokenizer.ts +++ b/src/parser/tokenizer.ts @@ -1,6 +1,6 @@ import { ExternalTokenizer, InputStream, Stack } from '@lezer/lr' import { Identifier, AssignableIdentifier, Word, IdentifierBeforeDot } from './shrimp.terms' -import type { Scope } from './scopeTracker' +import type { ScopeContext } from './scopeTracker' // The only chars that can't be words are whitespace, apostrophes, closing parens, and EOF. @@ -36,7 +36,8 @@ export const tokenizer = new ExternalTokenizer( identifierText += String.fromCharCode(charCode) } - const scope = stack.context as Scope | undefined + const scopeContext = stack.context as ScopeContext | undefined + const scope = scopeContext?.scope if (scope?.has(identifierText)) { // In scope - stop here, let grammar parse property access