// This file was generated by lezer-generator. You probably shouldn't edit it. import {LRParser} from "@lezer/lr" import {tokenizer} from "./tokenizers" import {highlighting} from "./highlight.js" export const parser = LRParser.deserialize({ version: 14, states: "'UQVQTOOOnQPO'#DSO!tQUO'#DSO#OQPOOOOQO'#DR'#DRO#oQTO'#CbOOQS'#DP'#DPO#vQTO'#DUOOQO'#Cn'#CnOOQO'#C|'#C|O$OQPO'#CuQVQTOOOOQS'#DO'#DOOOQS'#Ca'#CaO$TQTO'#ClOOQS'#C}'#C}OOQS'#Cv'#CvO$[QUO,58zOVQTO,59_O$lQTO,58}O$lQTO,58}O$sQPO,58|O%UQUO'#DSO%]QPO,58|OOQS'#Cw'#CwO%bQTO'#CpO%jQPO,59pOOQS,59a,59aOOQS-E6s-E6sOOQS,59W,59WOOQS-E6t-E6tOOQO1G.y1G.yOOQO'#DS'#DSOOQO1G.i1G.iO%oQPO1G.iOOQS1G.h1G.hOOQS-E6u-E6uO&WQTO1G/[O&bQPO7+$vO&sQTO7+$wOOQO<