Compare commits
No commits in common. "0574629904bf9e1481b464579b44b958195d419b" and "ab12212df2afe7decd4277d192bf36d253ff1c9c" have entirely different histories.
0574629904
...
ab12212df2
2
.gitignore
vendored
2
.gitignore
vendored
|
|
@ -35,5 +35,3 @@ report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
|
||||||
|
|
||||||
/tmp
|
/tmp
|
||||||
/docs
|
/docs
|
||||||
|
|
||||||
*.vsix
|
|
||||||
|
|
@ -200,7 +200,7 @@ function parseExpression(input: string) {
|
||||||
- **Not in scope** → Parses as `Word("obj.prop")` → compiles to `PUSH 'obj.prop'` (treated as file path/string)
|
- **Not in scope** → Parses as `Word("obj.prop")` → compiles to `PUSH 'obj.prop'` (treated as file path/string)
|
||||||
|
|
||||||
Implementation files:
|
Implementation files:
|
||||||
- **src/parser/parserScopeContext.ts**: ContextTracker that maintains immutable scope chain
|
- **src/parser/scopeTracker.ts**: ContextTracker that maintains immutable scope chain
|
||||||
- **src/parser/tokenizer.ts**: External tokenizer checks `stack.context` to decide if dot creates DotGet or Word
|
- **src/parser/tokenizer.ts**: External tokenizer checks `stack.context` to decide if dot creates DotGet or Word
|
||||||
- Scope tracking: Captures variables from assignments (`x = 5`) and function parameters (`fn x:`)
|
- Scope tracking: Captures variables from assignments (`x = 5`) and function parameters (`fn x:`)
|
||||||
- See `src/parser/tests/dot-get.test.ts` for comprehensive examples
|
- See `src/parser/tests/dot-get.test.ts` for comprehensive examples
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
@external propSource highlighting from "./highlight"
|
@external propSource highlighting from "./highlight"
|
||||||
|
|
||||||
@context trackScope from "./parserScopeContext"
|
@context trackScope from "./scopeTracker"
|
||||||
|
|
||||||
@skip { space | Comment }
|
@skip { space | Comment }
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,7 @@
|
||||||
import {LRParser, LocalTokenGroup} from "@lezer/lr"
|
import {LRParser, LocalTokenGroup} from "@lezer/lr"
|
||||||
import {operatorTokenizer} from "./operatorTokenizer"
|
import {operatorTokenizer} from "./operatorTokenizer"
|
||||||
import {tokenizer, specializeKeyword} from "./tokenizer"
|
import {tokenizer, specializeKeyword} from "./tokenizer"
|
||||||
import {trackScope} from "./parserScopeContext"
|
import {trackScope} from "./scopeTracker"
|
||||||
import {highlighting} from "./highlight"
|
import {highlighting} from "./highlight"
|
||||||
const spec_Identifier = {__proto__:null,if:66, null:94, catch:100, finally:106, end:108, else:116, while:130, try:136, throw:140}
|
const spec_Identifier = {__proto__:null,if:66, null:94, catch:100, finally:106, end:108, else:116, while:130, try:136, throw:140}
|
||||||
export const parser = LRParser.deserialize({
|
export const parser = LRParser.deserialize({
|
||||||
|
|
|
||||||
4
vscode-extension/.gitignore
vendored
Normal file
4
vscode-extension/.gitignore
vendored
Normal file
|
|
@ -0,0 +1,4 @@
|
||||||
|
node_modules
|
||||||
|
client/dist
|
||||||
|
server/dist
|
||||||
|
*.vsix
|
||||||
|
|
@ -1,12 +1,12 @@
|
||||||
import { TextDocument, Position } from 'vscode-languageserver-textdocument'
|
import { TextDocument, Position } from 'vscode-languageserver-textdocument'
|
||||||
import { Diagnostic, DiagnosticSeverity } from 'vscode-languageserver/node'
|
import { Diagnostic, DiagnosticSeverity } from 'vscode-languageserver/node'
|
||||||
import { Tree } from '@lezer/common'
|
import { parser } from '../../../src/parser/shrimp'
|
||||||
import { Compiler } from '../../../src/compiler/compiler'
|
import { Compiler } from '../../../src/compiler/compiler'
|
||||||
import { CompilerError } from '../../../src/compiler/compilerError'
|
import { CompilerError } from '../../../src/compiler/compilerError'
|
||||||
|
|
||||||
export const buildDiagnostics = (textDocument: TextDocument, tree: Tree): Diagnostic[] => {
|
export const buildDiagnostics = (textDocument: TextDocument): Diagnostic[] => {
|
||||||
const text = textDocument.getText()
|
const text = textDocument.getText()
|
||||||
const diagnostics = getParseErrors(textDocument, tree)
|
const diagnostics = getParseErrors(textDocument)
|
||||||
|
|
||||||
if (diagnostics.length > 0) {
|
if (diagnostics.length > 0) {
|
||||||
return diagnostics
|
return diagnostics
|
||||||
|
|
@ -59,7 +59,9 @@ const unknownDiagnostic = (message: string): Diagnostic => {
|
||||||
return diagnostic
|
return diagnostic
|
||||||
}
|
}
|
||||||
|
|
||||||
const getParseErrors = (textDocument: TextDocument, tree: Tree): Diagnostic[] => {
|
const getParseErrors = (textDocument: TextDocument): Diagnostic[] => {
|
||||||
|
const tree = parser.parse(textDocument.getText())
|
||||||
|
|
||||||
const ranges: { start: Position; end: Position }[] = []
|
const ranges: { start: Position; end: Position }[] = []
|
||||||
tree.iterate({
|
tree.iterate({
|
||||||
enter(n) {
|
enter(n) {
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,10 @@
|
||||||
import { test, expect, describe } from 'bun:test'
|
import { test, expect, describe } from 'bun:test'
|
||||||
import { EditorScopeAnalyzer } from './editorScopeAnalyzer'
|
import { ScopeTracker } from './scopeTracker'
|
||||||
import { TextDocument } from 'vscode-languageserver-textdocument'
|
import { TextDocument } from 'vscode-languageserver-textdocument'
|
||||||
import { parser } from '../../../src/parser/shrimp'
|
import { parser } from '../../../src/parser/shrimp'
|
||||||
import * as Terms from '../../../src/parser/shrimp.terms'
|
import * as Terms from '../../../src/parser/shrimp.terms'
|
||||||
|
|
||||||
describe('EditorScopeAnalyzer', () => {
|
describe('ScopeTracker', () => {
|
||||||
test('top-level assignment is in scope', () => {
|
test('top-level assignment is in scope', () => {
|
||||||
const code = 'x = 5\necho x'
|
const code = 'x = 5\necho x'
|
||||||
const { tree, tracker } = parseAndGetScope(code)
|
const { tree, tracker } = parseAndGetScope(code)
|
||||||
|
|
@ -146,6 +146,6 @@ end`
|
||||||
const parseAndGetScope = (code: string) => {
|
const parseAndGetScope = (code: string) => {
|
||||||
const document = TextDocument.create('test://test.sh', 'shrimp', 1, code)
|
const document = TextDocument.create('test://test.sh', 'shrimp', 1, code)
|
||||||
const tree = parser.parse(code)
|
const tree = parser.parse(code)
|
||||||
const tracker = new EditorScopeAnalyzer(document)
|
const tracker = new ScopeTracker(document)
|
||||||
return { document, tree, tracker }
|
return { document, tree, tracker }
|
||||||
}
|
}
|
||||||
|
|
@ -7,7 +7,7 @@ import { globals } from '../../../src/prelude'
|
||||||
* Tracks variables in scope at a given position in the parse tree.
|
* Tracks variables in scope at a given position in the parse tree.
|
||||||
* Used to distinguish identifiers (in scope) from words (not in scope).
|
* Used to distinguish identifiers (in scope) from words (not in scope).
|
||||||
*/
|
*/
|
||||||
export class EditorScopeAnalyzer {
|
export class ScopeTracker {
|
||||||
private document: TextDocument
|
private document: TextDocument
|
||||||
private scopeCache = new Map<number, Set<string>>()
|
private scopeCache = new Map<number, Set<string>>()
|
||||||
|
|
||||||
|
|
@ -1,13 +1,13 @@
|
||||||
import { parser } from '../../../src/parser/shrimp'
|
import { parser } from '../../../src/parser/shrimp'
|
||||||
import * as Terms from '../../../src/parser/shrimp.terms'
|
import * as Terms from '../../../src/parser/shrimp.terms'
|
||||||
import { SyntaxNode, Tree } from '@lezer/common'
|
import { SyntaxNode } from '@lezer/common'
|
||||||
import { TextDocument } from 'vscode-languageserver-textdocument'
|
import { TextDocument } from 'vscode-languageserver-textdocument'
|
||||||
import {
|
import {
|
||||||
SemanticTokensBuilder,
|
SemanticTokensBuilder,
|
||||||
SemanticTokenTypes,
|
SemanticTokenTypes,
|
||||||
SemanticTokenModifiers,
|
SemanticTokenModifiers,
|
||||||
} from 'vscode-languageserver/node'
|
} from 'vscode-languageserver/node'
|
||||||
import { EditorScopeAnalyzer } from './editorScopeAnalyzer'
|
import { ScopeTracker } from './scopeTracker'
|
||||||
|
|
||||||
export const TOKEN_TYPES = [
|
export const TOKEN_TYPES = [
|
||||||
SemanticTokenTypes.function,
|
SemanticTokenTypes.function,
|
||||||
|
|
@ -28,9 +28,11 @@ export const TOKEN_MODIFIERS = [
|
||||||
SemanticTokenModifiers.readonly,
|
SemanticTokenModifiers.readonly,
|
||||||
]
|
]
|
||||||
|
|
||||||
export function buildSemanticTokens(document: TextDocument, tree: Tree): number[] {
|
export function buildSemanticTokens(document: TextDocument): number[] {
|
||||||
|
const text = document.getText()
|
||||||
|
const tree = parser.parse(text)
|
||||||
const builder = new SemanticTokensBuilder()
|
const builder = new SemanticTokensBuilder()
|
||||||
const scopeTracker = new EditorScopeAnalyzer(document)
|
const scopeTracker = new ScopeTracker(document)
|
||||||
|
|
||||||
walkTree(tree.topNode, document, builder, scopeTracker)
|
walkTree(tree.topNode, document, builder, scopeTracker)
|
||||||
|
|
||||||
|
|
@ -75,7 +77,7 @@ function walkTree(
|
||||||
node: SyntaxNode,
|
node: SyntaxNode,
|
||||||
document: TextDocument,
|
document: TextDocument,
|
||||||
builder: SemanticTokensBuilder,
|
builder: SemanticTokensBuilder,
|
||||||
scopeTracker: EditorScopeAnalyzer
|
scopeTracker: ScopeTracker
|
||||||
) {
|
) {
|
||||||
// Special handling for NamedArgPrefix to split "name=" into two tokens
|
// Special handling for NamedArgPrefix to split "name=" into two tokens
|
||||||
if (node.type.id === Terms.NamedArgPrefix) {
|
if (node.type.id === Terms.NamedArgPrefix) {
|
||||||
|
|
@ -102,7 +104,7 @@ type TokenInfo = { type: number; modifiers: number } | undefined
|
||||||
function getTokenType(
|
function getTokenType(
|
||||||
node: SyntaxNode,
|
node: SyntaxNode,
|
||||||
document: TextDocument,
|
document: TextDocument,
|
||||||
scopeTracker: EditorScopeAnalyzer
|
scopeTracker: ScopeTracker
|
||||||
): TokenInfo {
|
): TokenInfo {
|
||||||
const nodeTypeId = node.type.id
|
const nodeTypeId = node.type.id
|
||||||
const parentTypeId = node.parent?.type.id
|
const parentTypeId = node.parent?.type.id
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,6 @@ import { buildDiagnostics } from './diagnostics'
|
||||||
import { buildSemanticTokens, TOKEN_MODIFIERS, TOKEN_TYPES } from './semanticTokens'
|
import { buildSemanticTokens, TOKEN_MODIFIERS, TOKEN_TYPES } from './semanticTokens'
|
||||||
import { parser } from '../../../src/parser/shrimp'
|
import { parser } from '../../../src/parser/shrimp'
|
||||||
import { Compiler } from '../../../src/compiler/compiler'
|
import { Compiler } from '../../../src/compiler/compiler'
|
||||||
import { Tree } from '@lezer/common'
|
|
||||||
import {
|
import {
|
||||||
InitializeResult,
|
InitializeResult,
|
||||||
TextDocuments,
|
TextDocuments,
|
||||||
|
|
@ -11,23 +10,18 @@ import {
|
||||||
createConnection,
|
createConnection,
|
||||||
ProposedFeatures,
|
ProposedFeatures,
|
||||||
CompletionItemKind,
|
CompletionItemKind,
|
||||||
TextDocumentChangeEvent,
|
|
||||||
} from 'vscode-languageserver/node'
|
} from 'vscode-languageserver/node'
|
||||||
|
|
||||||
const connection = createConnection(ProposedFeatures.all)
|
const connection = createConnection(ProposedFeatures.all)
|
||||||
const documents = new TextDocuments(TextDocument)
|
const documents = new TextDocuments(TextDocument)
|
||||||
documents.listen(connection)
|
documents.listen(connection)
|
||||||
|
|
||||||
const documentTrees = new Map<string, Tree>()
|
|
||||||
|
|
||||||
// Server capabilities
|
// Server capabilities
|
||||||
connection.onInitialize(handleInitialize)
|
connection.onInitialize(handleInitialize)
|
||||||
|
|
||||||
// Language features
|
// Language features
|
||||||
connection.languages.semanticTokens.on(handleSemanticTokens)
|
connection.languages.semanticTokens.on(handleSemanticTokens)
|
||||||
documents.onDidOpen(handleDocumentOpen)
|
|
||||||
documents.onDidChangeContent(handleDocumentChange)
|
documents.onDidChangeContent(handleDocumentChange)
|
||||||
documents.onDidClose(handleDocumentClose)
|
|
||||||
connection.onCompletion(handleCompletion)
|
connection.onCompletion(handleCompletion)
|
||||||
|
|
||||||
// Debug commands
|
// Debug commands
|
||||||
|
|
@ -37,7 +31,10 @@ connection.onRequest('shrimp/bytecode', handleBytecode)
|
||||||
// Start listening
|
// Start listening
|
||||||
connection.listen()
|
connection.listen()
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Handler implementations
|
// Handler implementations
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
function handleInitialize(): InitializeResult {
|
function handleInitialize(): InitializeResult {
|
||||||
connection.console.log('🦐 Server initialized with capabilities')
|
connection.console.log('🦐 Server initialized with capabilities')
|
||||||
const result: InitializeResult = {
|
const result: InitializeResult = {
|
||||||
|
|
@ -59,40 +56,21 @@ function handleInitialize(): InitializeResult {
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
function handleDocumentOpen(event: TextDocumentChangeEvent<TextDocument>) {
|
|
||||||
const document = event.document
|
|
||||||
const tree = parser.parse(document.getText())
|
|
||||||
documentTrees.set(document.uri, tree)
|
|
||||||
}
|
|
||||||
|
|
||||||
function handleSemanticTokens(params: any) {
|
function handleSemanticTokens(params: any) {
|
||||||
const document = documents.get(params.textDocument.uri)
|
const document = documents.get(params.textDocument.uri)
|
||||||
if (!document) return { data: [] }
|
if (!document) return { data: [] }
|
||||||
|
|
||||||
const tree = documentTrees.get(params.textDocument.uri)
|
const data = buildSemanticTokens(document)
|
||||||
if (!tree) return { data: [] }
|
|
||||||
|
|
||||||
const data = buildSemanticTokens(document, tree)
|
|
||||||
return { data }
|
return { data }
|
||||||
}
|
}
|
||||||
|
|
||||||
function handleDocumentChange(change: TextDocumentChangeEvent<TextDocument>) {
|
function handleDocumentChange(change: any) {
|
||||||
const document = change.document
|
const textDocument = change.document
|
||||||
|
const diagnostics = buildDiagnostics(textDocument)
|
||||||
// Parse and cache
|
connection.sendDiagnostics({ uri: textDocument.uri, diagnostics })
|
||||||
const tree = parser.parse(document.getText())
|
|
||||||
documentTrees.set(document.uri, tree)
|
|
||||||
|
|
||||||
// Build diagnostics using cached tree
|
|
||||||
const diagnostics = buildDiagnostics(document, tree)
|
|
||||||
connection.sendDiagnostics({ uri: document.uri, diagnostics })
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function handleDocumentClose(event: TextDocumentChangeEvent<TextDocument>) {
|
function handleCompletion(params: any) {
|
||||||
documentTrees.delete(event.document.uri)
|
|
||||||
}
|
|
||||||
|
|
||||||
function handleCompletion() {
|
|
||||||
const keywords = ['if', 'else', 'do', 'end', 'and', 'or', 'true', 'false', 'null']
|
const keywords = ['if', 'else', 'do', 'end', 'and', 'or', 'true', 'false', 'null']
|
||||||
|
|
||||||
return keywords.map((keyword) => ({
|
return keywords.map((keyword) => ({
|
||||||
|
|
@ -106,13 +84,8 @@ function handleParseTree(params: { uri: string }) {
|
||||||
const document = documents.get(params.uri)
|
const document = documents.get(params.uri)
|
||||||
if (!document) return 'Document not found'
|
if (!document) return 'Document not found'
|
||||||
|
|
||||||
const tree = documentTrees.get(params.uri)
|
|
||||||
if (!tree) {
|
|
||||||
connection.console.error(`🦐 No cached tree for ${params.uri}`)
|
|
||||||
return 'No cached parse tree available'
|
|
||||||
}
|
|
||||||
|
|
||||||
const text = document.getText()
|
const text = document.getText()
|
||||||
|
const tree = parser.parse(text)
|
||||||
const cursor = tree.cursor()
|
const cursor = tree.cursor()
|
||||||
|
|
||||||
let formatted = ''
|
let formatted = ''
|
||||||
|
|
|
||||||
BIN
vscode-extension/shrimp-0.0.1.vsix
Normal file
BIN
vscode-extension/shrimp-0.0.1.vsix
Normal file
Binary file not shown.
Loading…
Reference in New Issue
Block a user