// This file was generated by lezer-generator. You probably shouldn't edit it. import {LRParser} from "@lezer/lr" export const parser = LRParser.deserialize({ version: 14, states: "#rOQOPOOOOOO'#Cf'#CfOYOPO'#CfQ_OQOOOOOO,59Q,59QOOOO'#Cb'#CbOmOQO'#ChO{OSO'#CcOpOQO'#ChOOOO'#Ch'#ChOOOO'#Ca'#CaQ_OQOOOOOO-E6`-E6`O!ZOQO,59SOOOO,59S,59SOOOO'#Cd'#CdO!fOSO,58}OOOO-E6a-E6aOOOO-E6_-E6_OOOO1G.n1G.nOOOO-E6b-E6b", stateData: "!w~OQQOZPO~OZSO~ORTOSVOZXO]VO~ORTOSVOZ^O]VO~O^_OSVXZVX]VX~OSVOZcO]VO~O^_OSVaZVa]Va~OS]~", goto: "!T]PPPPP^dkvP|P!PQZRRbZSURZR[USWRZQ]UTaW]Q`VRd`RROTYRZ", nodeNames: "⚠ Document Heading Indent AutoLink", maxTerm: 14, skippedNodes: [0], repeatNodeCount: 4, tokenData: "(U~RWOXkXY!^YZ!mZpkpq!^q#[k#[#]!r#]~kVrTQP]UOXkXY!RZpkpq!Rq~kP!WQQPOY!RZ~!RV!gQQPRQ^SOY!RZ~!R~!rOZ~V!yVQP]UOXkXY!RZpkpq!Rq#hk#h#i#`#i~kV#gVQP]UOXkXY!RZpkpq!Rq#hk#h#i#|#i~kV$TVQP]UOXkXY!RZpkpq!Rq#dk#d#e$j#e~kV$qXQP]UOXkXY!RZpkpq!Rq![k![!]%^!]#gk#g#h'h#h~kV%eVQP]UOXkXY!RZpkpq!Rq!Pk!P!Q%z!Q~kV&RVQP]UOXkXY!RZpkpq!Rq!Pk!P!Q&h!Q~kV&oTQP]UOX'OXY!RZp'Opq!Rq~'OV'XTQPSU]UOX'OXY!RZp'Opq!Rq~'OV'oVQP]UOXkXY!RZpkpq!Rq![k![!]%^!]~k", tokenizers: [0, 1, 2], topRules: {"Document":[0,1]}, tokenPrec: 82 })