summaryrefslogtreecommitdiff
path: root/node_modules/sucrase/dist/esm/parser/tokenizer
diff options
context:
space:
mode:
authorPhilipp Tanlak <philipp.tanlak@gmail.com>2025-11-24 20:54:57 +0100
committerPhilipp Tanlak <philipp.tanlak@gmail.com>2025-11-24 20:57:48 +0100
commitb1e2c8fd5cb5dfa46bc440a12eafaf56cd844b1c (patch)
tree49d360fd6cbc6a2754efe93524ac47ff0fbe0f7d /node_modules/sucrase/dist/esm/parser/tokenizer
Docs
Diffstat (limited to 'node_modules/sucrase/dist/esm/parser/tokenizer')
-rw-r--r--node_modules/sucrase/dist/esm/parser/tokenizer/index.js1004
-rw-r--r--node_modules/sucrase/dist/esm/parser/tokenizer/keywords.js43
-rw-r--r--node_modules/sucrase/dist/esm/parser/tokenizer/readWord.js64
-rw-r--r--node_modules/sucrase/dist/esm/parser/tokenizer/readWordTree.js671
-rw-r--r--node_modules/sucrase/dist/esm/parser/tokenizer/state.js106
-rw-r--r--node_modules/sucrase/dist/esm/parser/tokenizer/types.js361
6 files changed, 2249 insertions, 0 deletions
diff --git a/node_modules/sucrase/dist/esm/parser/tokenizer/index.js b/node_modules/sucrase/dist/esm/parser/tokenizer/index.js
new file mode 100644
index 0000000..69f286a
--- /dev/null
+++ b/node_modules/sucrase/dist/esm/parser/tokenizer/index.js
@@ -0,0 +1,1004 @@
+/* eslint max-len: 0 */
+
+import {input, isFlowEnabled, state} from "../traverser/base";
+import {unexpected} from "../traverser/util";
+import {charCodes} from "../util/charcodes";
+import {IS_IDENTIFIER_CHAR, IS_IDENTIFIER_START} from "../util/identifier";
+import {IS_WHITESPACE, skipWhiteSpace} from "../util/whitespace";
+import {ContextualKeyword} from "./keywords";
+import readWord from "./readWord";
+import { TokenType as tt} from "./types";
+
+export var IdentifierRole; (function (IdentifierRole) {
+ const Access = 0; IdentifierRole[IdentifierRole["Access"] = Access] = "Access";
+ const ExportAccess = Access + 1; IdentifierRole[IdentifierRole["ExportAccess"] = ExportAccess] = "ExportAccess";
+ const TopLevelDeclaration = ExportAccess + 1; IdentifierRole[IdentifierRole["TopLevelDeclaration"] = TopLevelDeclaration] = "TopLevelDeclaration";
+ const FunctionScopedDeclaration = TopLevelDeclaration + 1; IdentifierRole[IdentifierRole["FunctionScopedDeclaration"] = FunctionScopedDeclaration] = "FunctionScopedDeclaration";
+ const BlockScopedDeclaration = FunctionScopedDeclaration + 1; IdentifierRole[IdentifierRole["BlockScopedDeclaration"] = BlockScopedDeclaration] = "BlockScopedDeclaration";
+ const ObjectShorthandTopLevelDeclaration = BlockScopedDeclaration + 1; IdentifierRole[IdentifierRole["ObjectShorthandTopLevelDeclaration"] = ObjectShorthandTopLevelDeclaration] = "ObjectShorthandTopLevelDeclaration";
+ const ObjectShorthandFunctionScopedDeclaration = ObjectShorthandTopLevelDeclaration + 1; IdentifierRole[IdentifierRole["ObjectShorthandFunctionScopedDeclaration"] = ObjectShorthandFunctionScopedDeclaration] = "ObjectShorthandFunctionScopedDeclaration";
+ const ObjectShorthandBlockScopedDeclaration = ObjectShorthandFunctionScopedDeclaration + 1; IdentifierRole[IdentifierRole["ObjectShorthandBlockScopedDeclaration"] = ObjectShorthandBlockScopedDeclaration] = "ObjectShorthandBlockScopedDeclaration";
+ const ObjectShorthand = ObjectShorthandBlockScopedDeclaration + 1; IdentifierRole[IdentifierRole["ObjectShorthand"] = ObjectShorthand] = "ObjectShorthand";
+ // Any identifier bound in an import statement, e.g. both A and b from
+ // `import A, * as b from 'A';`
+ const ImportDeclaration = ObjectShorthand + 1; IdentifierRole[IdentifierRole["ImportDeclaration"] = ImportDeclaration] = "ImportDeclaration";
+ const ObjectKey = ImportDeclaration + 1; IdentifierRole[IdentifierRole["ObjectKey"] = ObjectKey] = "ObjectKey";
+ // The `foo` in `import {foo as bar} from "./abc";`.
+ const ImportAccess = ObjectKey + 1; IdentifierRole[IdentifierRole["ImportAccess"] = ImportAccess] = "ImportAccess";
+})(IdentifierRole || (IdentifierRole = {}));
+
+/**
+ * Extra information on jsxTagStart tokens, used to determine which of the three
+ * jsx functions are called in the automatic transform.
+ */
+export var JSXRole; (function (JSXRole) {
+ // The element is self-closing or has a body that resolves to empty. We
+ // shouldn't emit children at all in this case.
+ const NoChildren = 0; JSXRole[JSXRole["NoChildren"] = NoChildren] = "NoChildren";
+ // The element has a single explicit child, which might still be an arbitrary
+ // expression like an array. We should emit that expression as the children.
+ const OneChild = NoChildren + 1; JSXRole[JSXRole["OneChild"] = OneChild] = "OneChild";
+ // The element has at least two explicitly-specified children or has spread
+ // children, so child positions are assumed to be "static". We should wrap
+ // these children in an array.
+ const StaticChildren = OneChild + 1; JSXRole[JSXRole["StaticChildren"] = StaticChildren] = "StaticChildren";
+ // The element has a prop named "key" after a prop spread, so we should fall
+ // back to the createElement function.
+ const KeyAfterPropSpread = StaticChildren + 1; JSXRole[JSXRole["KeyAfterPropSpread"] = KeyAfterPropSpread] = "KeyAfterPropSpread";
+})(JSXRole || (JSXRole = {}));
+
+export function isDeclaration(token) {
+ const role = token.identifierRole;
+ return (
+ role === IdentifierRole.TopLevelDeclaration ||
+ role === IdentifierRole.FunctionScopedDeclaration ||
+ role === IdentifierRole.BlockScopedDeclaration ||
+ role === IdentifierRole.ObjectShorthandTopLevelDeclaration ||
+ role === IdentifierRole.ObjectShorthandFunctionScopedDeclaration ||
+ role === IdentifierRole.ObjectShorthandBlockScopedDeclaration
+ );
+}
+
+export function isNonTopLevelDeclaration(token) {
+ const role = token.identifierRole;
+ return (
+ role === IdentifierRole.FunctionScopedDeclaration ||
+ role === IdentifierRole.BlockScopedDeclaration ||
+ role === IdentifierRole.ObjectShorthandFunctionScopedDeclaration ||
+ role === IdentifierRole.ObjectShorthandBlockScopedDeclaration
+ );
+}
+
+export function isTopLevelDeclaration(token) {
+ const role = token.identifierRole;
+ return (
+ role === IdentifierRole.TopLevelDeclaration ||
+ role === IdentifierRole.ObjectShorthandTopLevelDeclaration ||
+ role === IdentifierRole.ImportDeclaration
+ );
+}
+
+export function isBlockScopedDeclaration(token) {
+ const role = token.identifierRole;
+ // Treat top-level declarations as block scope since the distinction doesn't matter here.
+ return (
+ role === IdentifierRole.TopLevelDeclaration ||
+ role === IdentifierRole.BlockScopedDeclaration ||
+ role === IdentifierRole.ObjectShorthandTopLevelDeclaration ||
+ role === IdentifierRole.ObjectShorthandBlockScopedDeclaration
+ );
+}
+
+export function isFunctionScopedDeclaration(token) {
+ const role = token.identifierRole;
+ return (
+ role === IdentifierRole.FunctionScopedDeclaration ||
+ role === IdentifierRole.ObjectShorthandFunctionScopedDeclaration
+ );
+}
+
+export function isObjectShorthandDeclaration(token) {
+ return (
+ token.identifierRole === IdentifierRole.ObjectShorthandTopLevelDeclaration ||
+ token.identifierRole === IdentifierRole.ObjectShorthandBlockScopedDeclaration ||
+ token.identifierRole === IdentifierRole.ObjectShorthandFunctionScopedDeclaration
+ );
+}
+
+// Object type used to represent tokens. Note that normally, tokens
+// simply exist as properties on the parser object. This is only
+// used for the onToken callback and the external tokenizer.
+export class Token {
+ constructor() {
+ this.type = state.type;
+ this.contextualKeyword = state.contextualKeyword;
+ this.start = state.start;
+ this.end = state.end;
+ this.scopeDepth = state.scopeDepth;
+ this.isType = state.isType;
+ this.identifierRole = null;
+ this.jsxRole = null;
+ this.shadowsGlobal = false;
+ this.isAsyncOperation = false;
+ this.contextId = null;
+ this.rhsEndIndex = null;
+ this.isExpression = false;
+ this.numNullishCoalesceStarts = 0;
+ this.numNullishCoalesceEnds = 0;
+ this.isOptionalChainStart = false;
+ this.isOptionalChainEnd = false;
+ this.subscriptStartIndex = null;
+ this.nullishStartIndex = null;
+ }
+
+
+
+
+
+
+
+
+
+ // Initially false for all tokens, then may be computed in a follow-up step that does scope
+ // analysis.
+
+ // Initially false for all tokens, but may be set during transform to mark it as containing an
+ // await operation.
+
+
+ // For assignments, the index of the RHS. For export tokens, the end of the export.
+
+ // For class tokens, records if the class is a class expression or a class statement.
+
+ // Number of times to insert a `nullishCoalesce(` snippet before this token.
+
+ // Number of times to insert a `)` snippet after this token.
+
+ // If true, insert an `optionalChain([` snippet before this token.
+
+ // If true, insert a `])` snippet after this token.
+
+ // Tag for `.`, `?.`, `[`, `?.[`, `(`, and `?.(` to denote the "root" token for this
+ // subscript chain. This can be used to determine if this chain is an optional chain.
+
+ // Tag for `??` operators to denote the root token for this nullish coalescing call.
+
+}
+
+// ## Tokenizer
+
+// Move to the next token
+export function next() {
+ state.tokens.push(new Token());
+ nextToken();
+}
+
+// Call instead of next when inside a template, since that needs to be handled differently.
+export function nextTemplateToken() {
+ state.tokens.push(new Token());
+ state.start = state.pos;
+ readTmplToken();
+}
+
+// The tokenizer never parses regexes by default. Instead, the parser is responsible for
+// instructing it to parse a regex when we see a slash at the start of an expression.
+export function retokenizeSlashAsRegex() {
+ if (state.type === tt.assign) {
+ --state.pos;
+ }
+ readRegexp();
+}
+
+export function pushTypeContext(existingTokensInType) {
+ for (let i = state.tokens.length - existingTokensInType; i < state.tokens.length; i++) {
+ state.tokens[i].isType = true;
+ }
+ const oldIsType = state.isType;
+ state.isType = true;
+ return oldIsType;
+}
+
+export function popTypeContext(oldIsType) {
+ state.isType = oldIsType;
+}
+
+export function eat(type) {
+ if (match(type)) {
+ next();
+ return true;
+ } else {
+ return false;
+ }
+}
+
+export function eatTypeToken(tokenType) {
+ const oldIsType = state.isType;
+ state.isType = true;
+ eat(tokenType);
+ state.isType = oldIsType;
+}
+
+export function match(type) {
+ return state.type === type;
+}
+
+export function lookaheadType() {
+ const snapshot = state.snapshot();
+ next();
+ const type = state.type;
+ state.restoreFromSnapshot(snapshot);
+ return type;
+}
+
+export class TypeAndKeyword {
+
+
+ constructor(type, contextualKeyword) {
+ this.type = type;
+ this.contextualKeyword = contextualKeyword;
+ }
+}
+
+export function lookaheadTypeAndKeyword() {
+ const snapshot = state.snapshot();
+ next();
+ const type = state.type;
+ const contextualKeyword = state.contextualKeyword;
+ state.restoreFromSnapshot(snapshot);
+ return new TypeAndKeyword(type, contextualKeyword);
+}
+
+export function nextTokenStart() {
+ return nextTokenStartSince(state.pos);
+}
+
+export function nextTokenStartSince(pos) {
+ skipWhiteSpace.lastIndex = pos;
+ const skip = skipWhiteSpace.exec(input);
+ return pos + skip[0].length;
+}
+
+export function lookaheadCharCode() {
+ return input.charCodeAt(nextTokenStart());
+}
+
+// Read a single token, updating the parser object's token-related
+// properties.
+export function nextToken() {
+ skipSpace();
+ state.start = state.pos;
+ if (state.pos >= input.length) {
+ const tokens = state.tokens;
+ // We normally run past the end a bit, but if we're way past the end, avoid an infinite loop.
+ // Also check the token positions rather than the types since sometimes we rewrite the token
+ // type to something else.
+ if (
+ tokens.length >= 2 &&
+ tokens[tokens.length - 1].start >= input.length &&
+ tokens[tokens.length - 2].start >= input.length
+ ) {
+ unexpected("Unexpectedly reached the end of input.");
+ }
+ finishToken(tt.eof);
+ return;
+ }
+ readToken(input.charCodeAt(state.pos));
+}
+
+function readToken(code) {
+ // Identifier or keyword. '\uXXXX' sequences are allowed in
+ // identifiers, so '\' also dispatches to that.
+ if (
+ IS_IDENTIFIER_START[code] ||
+ code === charCodes.backslash ||
+ (code === charCodes.atSign && input.charCodeAt(state.pos + 1) === charCodes.atSign)
+ ) {
+ readWord();
+ } else {
+ getTokenFromCode(code);
+ }
+}
+
+function skipBlockComment() {
+ while (
+ input.charCodeAt(state.pos) !== charCodes.asterisk ||
+ input.charCodeAt(state.pos + 1) !== charCodes.slash
+ ) {
+ state.pos++;
+ if (state.pos > input.length) {
+ unexpected("Unterminated comment", state.pos - 2);
+ return;
+ }
+ }
+ state.pos += 2;
+}
+
+export function skipLineComment(startSkip) {
+ let ch = input.charCodeAt((state.pos += startSkip));
+ if (state.pos < input.length) {
+ while (
+ ch !== charCodes.lineFeed &&
+ ch !== charCodes.carriageReturn &&
+ ch !== charCodes.lineSeparator &&
+ ch !== charCodes.paragraphSeparator &&
+ ++state.pos < input.length
+ ) {
+ ch = input.charCodeAt(state.pos);
+ }
+ }
+}
+
+// Called at the start of the parse and after every token. Skips
+// whitespace and comments.
+export function skipSpace() {
+ while (state.pos < input.length) {
+ const ch = input.charCodeAt(state.pos);
+ switch (ch) {
+ case charCodes.carriageReturn:
+ if (input.charCodeAt(state.pos + 1) === charCodes.lineFeed) {
+ ++state.pos;
+ }
+
+ case charCodes.lineFeed:
+ case charCodes.lineSeparator:
+ case charCodes.paragraphSeparator:
+ ++state.pos;
+ break;
+
+ case charCodes.slash:
+ switch (input.charCodeAt(state.pos + 1)) {
+ case charCodes.asterisk:
+ state.pos += 2;
+ skipBlockComment();
+ break;
+
+ case charCodes.slash:
+ skipLineComment(2);
+ break;
+
+ default:
+ return;
+ }
+ break;
+
+ default:
+ if (IS_WHITESPACE[ch]) {
+ ++state.pos;
+ } else {
+ return;
+ }
+ }
+ }
+}
+
+// Called at the end of every token. Sets various fields, and skips the space after the token, so
+// that the next one's `start` will point at the right position.
+export function finishToken(
+ type,
+ contextualKeyword = ContextualKeyword.NONE,
+) {
+ state.end = state.pos;
+ state.type = type;
+ state.contextualKeyword = contextualKeyword;
+}
+
+// ### Token reading
+
+// This is the function that is called to fetch the next token. It
+// is somewhat obscure, because it works in character codes rather
+// than characters, and because operator parsing has been inlined
+// into it.
+//
+// All in the name of speed.
+function readToken_dot() {
+ const nextChar = input.charCodeAt(state.pos + 1);
+ if (nextChar >= charCodes.digit0 && nextChar <= charCodes.digit9) {
+ readNumber(true);
+ return;
+ }
+
+ if (nextChar === charCodes.dot && input.charCodeAt(state.pos + 2) === charCodes.dot) {
+ state.pos += 3;
+ finishToken(tt.ellipsis);
+ } else {
+ ++state.pos;
+ finishToken(tt.dot);
+ }
+}
+
+function readToken_slash() {
+ const nextChar = input.charCodeAt(state.pos + 1);
+ if (nextChar === charCodes.equalsTo) {
+ finishOp(tt.assign, 2);
+ } else {
+ finishOp(tt.slash, 1);
+ }
+}
+
+function readToken_mult_modulo(code) {
+ // '%*'
+ let tokenType = code === charCodes.asterisk ? tt.star : tt.modulo;
+ let width = 1;
+ let nextChar = input.charCodeAt(state.pos + 1);
+
+ // Exponentiation operator **
+ if (code === charCodes.asterisk && nextChar === charCodes.asterisk) {
+ width++;
+ nextChar = input.charCodeAt(state.pos + 2);
+ tokenType = tt.exponent;
+ }
+
+ // Match *= or %=, disallowing *=> which can be valid in flow.
+ if (
+ nextChar === charCodes.equalsTo &&
+ input.charCodeAt(state.pos + 2) !== charCodes.greaterThan
+ ) {
+ width++;
+ tokenType = tt.assign;
+ }
+
+ finishOp(tokenType, width);
+}
+
+function readToken_pipe_amp(code) {
+ // '|&'
+ const nextChar = input.charCodeAt(state.pos + 1);
+
+ if (nextChar === code) {
+ if (input.charCodeAt(state.pos + 2) === charCodes.equalsTo) {
+ // ||= or &&=
+ finishOp(tt.assign, 3);
+ } else {
+ // || or &&
+ finishOp(code === charCodes.verticalBar ? tt.logicalOR : tt.logicalAND, 2);
+ }
+ return;
+ }
+
+ if (code === charCodes.verticalBar) {
+ // '|>'
+ if (nextChar === charCodes.greaterThan) {
+ finishOp(tt.pipeline, 2);
+ return;
+ } else if (nextChar === charCodes.rightCurlyBrace && isFlowEnabled) {
+ // '|}'
+ finishOp(tt.braceBarR, 2);
+ return;
+ }
+ }
+
+ if (nextChar === charCodes.equalsTo) {
+ finishOp(tt.assign, 2);
+ return;
+ }
+
+ finishOp(code === charCodes.verticalBar ? tt.bitwiseOR : tt.bitwiseAND, 1);
+}
+
+function readToken_caret() {
+ // '^'
+ const nextChar = input.charCodeAt(state.pos + 1);
+ if (nextChar === charCodes.equalsTo) {
+ finishOp(tt.assign, 2);
+ } else {
+ finishOp(tt.bitwiseXOR, 1);
+ }
+}
+
+function readToken_plus_min(code) {
+ // '+-'
+ const nextChar = input.charCodeAt(state.pos + 1);
+
+ if (nextChar === code) {
+ // Tentatively call this a prefix operator, but it might be changed to postfix later.
+ finishOp(tt.preIncDec, 2);
+ return;
+ }
+
+ if (nextChar === charCodes.equalsTo) {
+ finishOp(tt.assign, 2);
+ } else if (code === charCodes.plusSign) {
+ finishOp(tt.plus, 1);
+ } else {
+ finishOp(tt.minus, 1);
+ }
+}
+
+function readToken_lt() {
+ const nextChar = input.charCodeAt(state.pos + 1);
+
+ if (nextChar === charCodes.lessThan) {
+ if (input.charCodeAt(state.pos + 2) === charCodes.equalsTo) {
+ finishOp(tt.assign, 3);
+ return;
+ }
+ // We see <<, but need to be really careful about whether to treat it as a
+ // true left-shift or as two < tokens.
+ if (state.isType) {
+ // Within a type, << might come up in a snippet like `Array<<T>() => void>`,
+ // so treat it as two < tokens. Importantly, this should only override <<
+ // rather than other tokens like <= . If we treated <= as < in a type
+ // context, then the snippet `a as T <= 1` would incorrectly start parsing
+ // a type argument on T. We don't need to worry about `a as T << 1`
+ // because TypeScript disallows that syntax.
+ finishOp(tt.lessThan, 1);
+ } else {
+ // Outside a type, this might be a true left-shift operator, or it might
+ // still be two open-type-arg tokens, such as in `f<<T>() => void>()`. We
+ // look at the token while considering the `f`, so we don't yet know that
+ // we're in a type context. In this case, we initially tokenize as a
+ // left-shift and correct after-the-fact as necessary in
+ // tsParseTypeArgumentsWithPossibleBitshift .
+ finishOp(tt.bitShiftL, 2);
+ }
+ return;
+ }
+
+ if (nextChar === charCodes.equalsTo) {
+ // <=
+ finishOp(tt.relationalOrEqual, 2);
+ } else {
+ finishOp(tt.lessThan, 1);
+ }
+}
+
+function readToken_gt() {
+ if (state.isType) {
+ // Avoid right-shift for things like `Array<Array<string>>` and
+ // greater-than-or-equal for things like `const a: Array<number>=[];`.
+ finishOp(tt.greaterThan, 1);
+ return;
+ }
+
+ const nextChar = input.charCodeAt(state.pos + 1);
+
+ if (nextChar === charCodes.greaterThan) {
+ const size = input.charCodeAt(state.pos + 2) === charCodes.greaterThan ? 3 : 2;
+ if (input.charCodeAt(state.pos + size) === charCodes.equalsTo) {
+ finishOp(tt.assign, size + 1);
+ return;
+ }
+ finishOp(tt.bitShiftR, size);
+ return;
+ }
+
+ if (nextChar === charCodes.equalsTo) {
+ // >=
+ finishOp(tt.relationalOrEqual, 2);
+ } else {
+ finishOp(tt.greaterThan, 1);
+ }
+}
+
+/**
+ * Reinterpret a possible > token when transitioning from a type to a non-type
+ * context.
+ *
+ * This comes up in two situations where >= needs to be treated as one token:
+ * - After an `as` expression, like in the code `a as T >= 1`.
+ * - In a type argument in an expression context, e.g. `f(a < b, c >= d)`, we
+ * need to see the token as >= so that we get an error and backtrack to
+ * normal expression parsing.
+ *
+ * Other situations require >= to be seen as two tokens, e.g.
+ * `const x: Array<T>=[];`, so it's important to treat > as its own token in
+ * typical type parsing situations.
+ */
+export function rescan_gt() {
+ if (state.type === tt.greaterThan) {
+ state.pos -= 1;
+ readToken_gt();
+ }
+}
+
+function readToken_eq_excl(code) {
+ // '=!'
+ const nextChar = input.charCodeAt(state.pos + 1);
+ if (nextChar === charCodes.equalsTo) {
+ finishOp(tt.equality, input.charCodeAt(state.pos + 2) === charCodes.equalsTo ? 3 : 2);
+ return;
+ }
+ if (code === charCodes.equalsTo && nextChar === charCodes.greaterThan) {
+ // '=>'
+ state.pos += 2;
+ finishToken(tt.arrow);
+ return;
+ }
+ finishOp(code === charCodes.equalsTo ? tt.eq : tt.bang, 1);
+}
+
+function readToken_question() {
+ // '?'
+ const nextChar = input.charCodeAt(state.pos + 1);
+ const nextChar2 = input.charCodeAt(state.pos + 2);
+ if (
+ nextChar === charCodes.questionMark &&
+ // In Flow (but not TypeScript), ??string is a valid type that should be
+ // tokenized as two individual ? tokens.
+ !(isFlowEnabled && state.isType)
+ ) {
+ if (nextChar2 === charCodes.equalsTo) {
+ // '??='
+ finishOp(tt.assign, 3);
+ } else {
+ // '??'
+ finishOp(tt.nullishCoalescing, 2);
+ }
+ } else if (
+ nextChar === charCodes.dot &&
+ !(nextChar2 >= charCodes.digit0 && nextChar2 <= charCodes.digit9)
+ ) {
+ // '.' not followed by a number
+ state.pos += 2;
+ finishToken(tt.questionDot);
+ } else {
+ ++state.pos;
+ finishToken(tt.question);
+ }
+}
+
+export function getTokenFromCode(code) {
+ switch (code) {
+ case charCodes.numberSign:
+ ++state.pos;
+ finishToken(tt.hash);
+ return;
+
+ // The interpretation of a dot depends on whether it is followed
+ // by a digit or another two dots.
+
+ case charCodes.dot:
+ readToken_dot();
+ return;
+
+ // Punctuation tokens.
+ case charCodes.leftParenthesis:
+ ++state.pos;
+ finishToken(tt.parenL);
+ return;
+ case charCodes.rightParenthesis:
+ ++state.pos;
+ finishToken(tt.parenR);
+ return;
+ case charCodes.semicolon:
+ ++state.pos;
+ finishToken(tt.semi);
+ return;
+ case charCodes.comma:
+ ++state.pos;
+ finishToken(tt.comma);
+ return;
+ case charCodes.leftSquareBracket:
+ ++state.pos;
+ finishToken(tt.bracketL);
+ return;
+ case charCodes.rightSquareBracket:
+ ++state.pos;
+ finishToken(tt.bracketR);
+ return;
+
+ case charCodes.leftCurlyBrace:
+ if (isFlowEnabled && input.charCodeAt(state.pos + 1) === charCodes.verticalBar) {
+ finishOp(tt.braceBarL, 2);
+ } else {
+ ++state.pos;
+ finishToken(tt.braceL);
+ }
+ return;
+
+ case charCodes.rightCurlyBrace:
+ ++state.pos;
+ finishToken(tt.braceR);
+ return;
+
+ case charCodes.colon:
+ if (input.charCodeAt(state.pos + 1) === charCodes.colon) {
+ finishOp(tt.doubleColon, 2);
+ } else {
+ ++state.pos;
+ finishToken(tt.colon);
+ }
+ return;
+
+ case charCodes.questionMark:
+ readToken_question();
+ return;
+ case charCodes.atSign:
+ ++state.pos;
+ finishToken(tt.at);
+ return;
+
+ case charCodes.graveAccent:
+ ++state.pos;
+ finishToken(tt.backQuote);
+ return;
+
+ case charCodes.digit0: {
+ const nextChar = input.charCodeAt(state.pos + 1);
+ // '0x', '0X', '0o', '0O', '0b', '0B'
+ if (
+ nextChar === charCodes.lowercaseX ||
+ nextChar === charCodes.uppercaseX ||
+ nextChar === charCodes.lowercaseO ||
+ nextChar === charCodes.uppercaseO ||
+ nextChar === charCodes.lowercaseB ||
+ nextChar === charCodes.uppercaseB
+ ) {
+ readRadixNumber();
+ return;
+ }
+ }
+ // Anything else beginning with a digit is an integer, octal
+ // number, or float.
+ case charCodes.digit1:
+ case charCodes.digit2:
+ case charCodes.digit3:
+ case charCodes.digit4:
+ case charCodes.digit5:
+ case charCodes.digit6:
+ case charCodes.digit7:
+ case charCodes.digit8:
+ case charCodes.digit9:
+ readNumber(false);
+ return;
+
+ // Quotes produce strings.
+ case charCodes.quotationMark:
+ case charCodes.apostrophe:
+ readString(code);
+ return;
+
+ // Operators are parsed inline in tiny state machines. '=' (charCodes.equalsTo) is
+ // often referred to. `finishOp` simply skips the amount of
+ // characters it is given as second argument, and returns a token
+ // of the type given by its first argument.
+
+ case charCodes.slash:
+ readToken_slash();
+ return;
+
+ case charCodes.percentSign:
+ case charCodes.asterisk:
+ readToken_mult_modulo(code);
+ return;
+
+ case charCodes.verticalBar:
+ case charCodes.ampersand:
+ readToken_pipe_amp(code);
+ return;
+
+ case charCodes.caret:
+ readToken_caret();
+ return;
+
+ case charCodes.plusSign:
+ case charCodes.dash:
+ readToken_plus_min(code);
+ return;
+
+ case charCodes.lessThan:
+ readToken_lt();
+ return;
+
+ case charCodes.greaterThan:
+ readToken_gt();
+ return;
+
+ case charCodes.equalsTo:
+ case charCodes.exclamationMark:
+ readToken_eq_excl(code);
+ return;
+
+ case charCodes.tilde:
+ finishOp(tt.tilde, 1);
+ return;
+
+ default:
+ break;
+ }
+
+ unexpected(`Unexpected character '${String.fromCharCode(code)}'`, state.pos);
+}
+
+function finishOp(type, size) {
+ state.pos += size;
+ finishToken(type);
+}
+
+function readRegexp() {
+ const start = state.pos;
+ let escaped = false;
+ let inClass = false;
+ for (;;) {
+ if (state.pos >= input.length) {
+ unexpected("Unterminated regular expression", start);
+ return;
+ }
+ const code = input.charCodeAt(state.pos);
+ if (escaped) {
+ escaped = false;
+ } else {
+ if (code === charCodes.leftSquareBracket) {
+ inClass = true;
+ } else if (code === charCodes.rightSquareBracket && inClass) {
+ inClass = false;
+ } else if (code === charCodes.slash && !inClass) {
+ break;
+ }
+ escaped = code === charCodes.backslash;
+ }
+ ++state.pos;
+ }
+ ++state.pos;
+ // Need to use `skipWord` because '\uXXXX' sequences are allowed here (don't ask).
+ skipWord();
+
+ finishToken(tt.regexp);
+}
+
+/**
+ * Read a decimal integer. Note that this can't be unified with the similar code
+ * in readRadixNumber (which also handles hex digits) because "e" needs to be
+ * the end of the integer so that we can properly handle scientific notation.
+ */
+function readInt() {
+ while (true) {
+ const code = input.charCodeAt(state.pos);
+ if ((code >= charCodes.digit0 && code <= charCodes.digit9) || code === charCodes.underscore) {
+ state.pos++;
+ } else {
+ break;
+ }
+ }
+}
+
+function readRadixNumber() {
+ state.pos += 2; // 0x
+
+ // Walk to the end of the number, allowing hex digits.
+ while (true) {
+ const code = input.charCodeAt(state.pos);
+ if (
+ (code >= charCodes.digit0 && code <= charCodes.digit9) ||
+ (code >= charCodes.lowercaseA && code <= charCodes.lowercaseF) ||
+ (code >= charCodes.uppercaseA && code <= charCodes.uppercaseF) ||
+ code === charCodes.underscore
+ ) {
+ state.pos++;
+ } else {
+ break;
+ }
+ }
+
+ const nextChar = input.charCodeAt(state.pos);
+ if (nextChar === charCodes.lowercaseN) {
+ ++state.pos;
+ finishToken(tt.bigint);
+ } else {
+ finishToken(tt.num);
+ }
+}
+
+// Read an integer, octal integer, or floating-point number.
+function readNumber(startsWithDot) {
+ let isBigInt = false;
+ let isDecimal = false;
+
+ if (!startsWithDot) {
+ readInt();
+ }
+
+ let nextChar = input.charCodeAt(state.pos);
+ if (nextChar === charCodes.dot) {
+ ++state.pos;
+ readInt();
+ nextChar = input.charCodeAt(state.pos);
+ }
+
+ if (nextChar === charCodes.uppercaseE || nextChar === charCodes.lowercaseE) {
+ nextChar = input.charCodeAt(++state.pos);
+ if (nextChar === charCodes.plusSign || nextChar === charCodes.dash) {
+ ++state.pos;
+ }
+ readInt();
+ nextChar = input.charCodeAt(state.pos);
+ }
+
+ if (nextChar === charCodes.lowercaseN) {
+ ++state.pos;
+ isBigInt = true;
+ } else if (nextChar === charCodes.lowercaseM) {
+ ++state.pos;
+ isDecimal = true;
+ }
+
+ if (isBigInt) {
+ finishToken(tt.bigint);
+ return;
+ }
+
+ if (isDecimal) {
+ finishToken(tt.decimal);
+ return;
+ }
+
+ finishToken(tt.num);
+}
+
+function readString(quote) {
+ state.pos++;
+ for (;;) {
+ if (state.pos >= input.length) {
+ unexpected("Unterminated string constant");
+ return;
+ }
+ const ch = input.charCodeAt(state.pos);
+ if (ch === charCodes.backslash) {
+ state.pos++;
+ } else if (ch === quote) {
+ break;
+ }
+ state.pos++;
+ }
+ state.pos++;
+ finishToken(tt.string);
+}
+
+// Reads template string tokens.
+function readTmplToken() {
+ for (;;) {
+ if (state.pos >= input.length) {
+ unexpected("Unterminated template");
+ return;
+ }
+ const ch = input.charCodeAt(state.pos);
+ if (
+ ch === charCodes.graveAccent ||
+ (ch === charCodes.dollarSign && input.charCodeAt(state.pos + 1) === charCodes.leftCurlyBrace)
+ ) {
+ if (state.pos === state.start && match(tt.template)) {
+ if (ch === charCodes.dollarSign) {
+ state.pos += 2;
+ finishToken(tt.dollarBraceL);
+ return;
+ } else {
+ ++state.pos;
+ finishToken(tt.backQuote);
+ return;
+ }
+ }
+ finishToken(tt.template);
+ return;
+ }
+ if (ch === charCodes.backslash) {
+ state.pos++;
+ }
+ state.pos++;
+ }
+}
+
+// Skip to the end of the current word. Note that this is the same as the snippet at the end of
+// readWord, but calling skipWord from readWord seems to slightly hurt performance from some rough
+// measurements.
+export function skipWord() {
+ while (state.pos < input.length) {
+ const ch = input.charCodeAt(state.pos);
+ if (IS_IDENTIFIER_CHAR[ch]) {
+ state.pos++;
+ } else if (ch === charCodes.backslash) {
+ // \u
+ state.pos += 2;
+ if (input.charCodeAt(state.pos) === charCodes.leftCurlyBrace) {
+ while (
+ state.pos < input.length &&
+ input.charCodeAt(state.pos) !== charCodes.rightCurlyBrace
+ ) {
+ state.pos++;
+ }
+ state.pos++;
+ }
+ } else {
+ break;
+ }
+ }
+}
diff --git a/node_modules/sucrase/dist/esm/parser/tokenizer/keywords.js b/node_modules/sucrase/dist/esm/parser/tokenizer/keywords.js
new file mode 100644
index 0000000..0dcf1b0
--- /dev/null
+++ b/node_modules/sucrase/dist/esm/parser/tokenizer/keywords.js
@@ -0,0 +1,43 @@
+export var ContextualKeyword; (function (ContextualKeyword) {
+ const NONE = 0; ContextualKeyword[ContextualKeyword["NONE"] = NONE] = "NONE";
+ const _abstract = NONE + 1; ContextualKeyword[ContextualKeyword["_abstract"] = _abstract] = "_abstract";
+ const _accessor = _abstract + 1; ContextualKeyword[ContextualKeyword["_accessor"] = _accessor] = "_accessor";
+ const _as = _accessor + 1; ContextualKeyword[ContextualKeyword["_as"] = _as] = "_as";
+ const _assert = _as + 1; ContextualKeyword[ContextualKeyword["_assert"] = _assert] = "_assert";
+ const _asserts = _assert + 1; ContextualKeyword[ContextualKeyword["_asserts"] = _asserts] = "_asserts";
+ const _async = _asserts + 1; ContextualKeyword[ContextualKeyword["_async"] = _async] = "_async";
+ const _await = _async + 1; ContextualKeyword[ContextualKeyword["_await"] = _await] = "_await";
+ const _checks = _await + 1; ContextualKeyword[ContextualKeyword["_checks"] = _checks] = "_checks";
+ const _constructor = _checks + 1; ContextualKeyword[ContextualKeyword["_constructor"] = _constructor] = "_constructor";
+ const _declare = _constructor + 1; ContextualKeyword[ContextualKeyword["_declare"] = _declare] = "_declare";
+ const _enum = _declare + 1; ContextualKeyword[ContextualKeyword["_enum"] = _enum] = "_enum";
+ const _exports = _enum + 1; ContextualKeyword[ContextualKeyword["_exports"] = _exports] = "_exports";
+ const _from = _exports + 1; ContextualKeyword[ContextualKeyword["_from"] = _from] = "_from";
+ const _get = _from + 1; ContextualKeyword[ContextualKeyword["_get"] = _get] = "_get";
+ const _global = _get + 1; ContextualKeyword[ContextualKeyword["_global"] = _global] = "_global";
+ const _implements = _global + 1; ContextualKeyword[ContextualKeyword["_implements"] = _implements] = "_implements";
+ const _infer = _implements + 1; ContextualKeyword[ContextualKeyword["_infer"] = _infer] = "_infer";
+ const _interface = _infer + 1; ContextualKeyword[ContextualKeyword["_interface"] = _interface] = "_interface";
+ const _is = _interface + 1; ContextualKeyword[ContextualKeyword["_is"] = _is] = "_is";
+ const _keyof = _is + 1; ContextualKeyword[ContextualKeyword["_keyof"] = _keyof] = "_keyof";
+ const _mixins = _keyof + 1; ContextualKeyword[ContextualKeyword["_mixins"] = _mixins] = "_mixins";
+ const _module = _mixins + 1; ContextualKeyword[ContextualKeyword["_module"] = _module] = "_module";
+ const _namespace = _module + 1; ContextualKeyword[ContextualKeyword["_namespace"] = _namespace] = "_namespace";
+ const _of = _namespace + 1; ContextualKeyword[ContextualKeyword["_of"] = _of] = "_of";
+ const _opaque = _of + 1; ContextualKeyword[ContextualKeyword["_opaque"] = _opaque] = "_opaque";
+ const _out = _opaque + 1; ContextualKeyword[ContextualKeyword["_out"] = _out] = "_out";
+ const _override = _out + 1; ContextualKeyword[ContextualKeyword["_override"] = _override] = "_override";
+ const _private = _override + 1; ContextualKeyword[ContextualKeyword["_private"] = _private] = "_private";
+ const _protected = _private + 1; ContextualKeyword[ContextualKeyword["_protected"] = _protected] = "_protected";
+ const _proto = _protected + 1; ContextualKeyword[ContextualKeyword["_proto"] = _proto] = "_proto";
+ const _public = _proto + 1; ContextualKeyword[ContextualKeyword["_public"] = _public] = "_public";
+ const _readonly = _public + 1; ContextualKeyword[ContextualKeyword["_readonly"] = _readonly] = "_readonly";
+ const _require = _readonly + 1; ContextualKeyword[ContextualKeyword["_require"] = _require] = "_require";
+ const _satisfies = _require + 1; ContextualKeyword[ContextualKeyword["_satisfies"] = _satisfies] = "_satisfies";
+ const _set = _satisfies + 1; ContextualKeyword[ContextualKeyword["_set"] = _set] = "_set";
+ const _static = _set + 1; ContextualKeyword[ContextualKeyword["_static"] = _static] = "_static";
+ const _symbol = _static + 1; ContextualKeyword[ContextualKeyword["_symbol"] = _symbol] = "_symbol";
+ const _type = _symbol + 1; ContextualKeyword[ContextualKeyword["_type"] = _type] = "_type";
+ const _unique = _type + 1; ContextualKeyword[ContextualKeyword["_unique"] = _unique] = "_unique";
+ const _using = _unique + 1; ContextualKeyword[ContextualKeyword["_using"] = _using] = "_using";
+})(ContextualKeyword || (ContextualKeyword = {}));
diff --git a/node_modules/sucrase/dist/esm/parser/tokenizer/readWord.js b/node_modules/sucrase/dist/esm/parser/tokenizer/readWord.js
new file mode 100644
index 0000000..cf3df89
--- /dev/null
+++ b/node_modules/sucrase/dist/esm/parser/tokenizer/readWord.js
@@ -0,0 +1,64 @@
+import {input, state} from "../traverser/base";
+import {charCodes} from "../util/charcodes";
+import {IS_IDENTIFIER_CHAR} from "../util/identifier";
+import {finishToken} from "./index";
+import {READ_WORD_TREE} from "./readWordTree";
+import {TokenType as tt} from "./types";
+
+/**
+ * Read an identifier, producing either a name token or matching on one of the existing keywords.
+ * For performance, we pre-generate big decision tree that we traverse. Each node represents a
+ * prefix and has 27 values, where the first value is the token or contextual token, if any (-1 if
+ * not), and the other 26 values are the transitions to other nodes, or -1 to stop.
+ */
+export default function readWord() {
+ let treePos = 0;
+ let code = 0;
+ let pos = state.pos;
+ while (pos < input.length) {
+ code = input.charCodeAt(pos);
+ if (code < charCodes.lowercaseA || code > charCodes.lowercaseZ) {
+ break;
+ }
+ const next = READ_WORD_TREE[treePos + (code - charCodes.lowercaseA) + 1];
+ if (next === -1) {
+ break;
+ } else {
+ treePos = next;
+ pos++;
+ }
+ }
+
+ const keywordValue = READ_WORD_TREE[treePos];
+ if (keywordValue > -1 && !IS_IDENTIFIER_CHAR[code]) {
+ state.pos = pos;
+ if (keywordValue & 1) {
+ finishToken(keywordValue >>> 1);
+ } else {
+ finishToken(tt.name, keywordValue >>> 1);
+ }
+ return;
+ }
+
+ while (pos < input.length) {
+ const ch = input.charCodeAt(pos);
+ if (IS_IDENTIFIER_CHAR[ch]) {
+ pos++;
+ } else if (ch === charCodes.backslash) {
+ // \u
+ pos += 2;
+ if (input.charCodeAt(pos) === charCodes.leftCurlyBrace) {
+ while (pos < input.length && input.charCodeAt(pos) !== charCodes.rightCurlyBrace) {
+ pos++;
+ }
+ pos++;
+ }
+ } else if (ch === charCodes.atSign && input.charCodeAt(pos + 1) === charCodes.atSign) {
+ pos += 2;
+ } else {
+ break;
+ }
+ }
+ state.pos = pos;
+ finishToken(tt.name);
+}
diff --git a/node_modules/sucrase/dist/esm/parser/tokenizer/readWordTree.js b/node_modules/sucrase/dist/esm/parser/tokenizer/readWordTree.js
new file mode 100644
index 0000000..ffb8cac
--- /dev/null
+++ b/node_modules/sucrase/dist/esm/parser/tokenizer/readWordTree.js
@@ -0,0 +1,671 @@
+// Generated file, do not edit! Run "yarn generate" to re-generate this file.
+import {ContextualKeyword} from "./keywords";
+import {TokenType as tt} from "./types";
+
+// prettier-ignore
+export const READ_WORD_TREE = new Int32Array([
+ // ""
+ -1, 27, 783, 918, 1755, 2376, 2862, 3483, -1, 3699, -1, 4617, 4752, 4833, 5130, 5508, 5940, -1, 6480, 6939, 7749, 8181, 8451, 8613, -1, 8829, -1,
+ // "a"
+ -1, -1, 54, 243, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 432, -1, -1, -1, 675, -1, -1, -1,
+ // "ab"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 81, -1, -1, -1, -1, -1, -1, -1,
+ // "abs"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 108, -1, -1, -1, -1, -1, -1,
+ // "abst"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 135, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "abstr"
+ -1, 162, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "abstra"
+ -1, -1, -1, 189, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "abstrac"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 216, -1, -1, -1, -1, -1, -1,
+ // "abstract"
+ ContextualKeyword._abstract << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "ac"
+ -1, -1, -1, 270, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "acc"
+ -1, -1, -1, -1, -1, 297, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "acce"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 324, -1, -1, -1, -1, -1, -1, -1,
+ // "acces"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 351, -1, -1, -1, -1, -1, -1, -1,
+ // "access"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 378, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "accesso"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 405, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "accessor"
+ ContextualKeyword._accessor << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "as"
+ ContextualKeyword._as << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 459, -1, -1, -1, -1, -1, 594, -1,
+ // "ass"
+ -1, -1, -1, -1, -1, 486, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "asse"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 513, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "asser"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 540, -1, -1, -1, -1, -1, -1,
+ // "assert"
+ ContextualKeyword._assert << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 567, -1, -1, -1, -1, -1, -1, -1,
+ // "asserts"
+ ContextualKeyword._asserts << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "asy"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 621, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "asyn"
+ -1, -1, -1, 648, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "async"
+ ContextualKeyword._async << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "aw"
+ -1, 702, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "awa"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, 729, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "awai"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 756, -1, -1, -1, -1, -1, -1,
+ // "await"
+ ContextualKeyword._await << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "b"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 810, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "br"
+ -1, -1, -1, -1, -1, 837, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "bre"
+ -1, 864, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "brea"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 891, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "break"
+ (tt._break << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "c"
+ -1, 945, -1, -1, -1, -1, -1, -1, 1107, -1, -1, -1, 1242, -1, -1, 1350, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "ca"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 972, 1026, -1, -1, -1, -1, -1, -1,
+ // "cas"
+ -1, -1, -1, -1, -1, 999, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "case"
+ (tt._case << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "cat"
+ -1, -1, -1, 1053, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "catc"
+ -1, -1, -1, -1, -1, -1, -1, -1, 1080, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "catch"
+ (tt._catch << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "ch"
+ -1, -1, -1, -1, -1, 1134, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "che"
+ -1, -1, -1, 1161, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "chec"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1188, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "check"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1215, -1, -1, -1, -1, -1, -1, -1,
+ // "checks"
+ ContextualKeyword._checks << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "cl"
+ -1, 1269, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "cla"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1296, -1, -1, -1, -1, -1, -1, -1,
+ // "clas"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1323, -1, -1, -1, -1, -1, -1, -1,
+ // "class"
+ (tt._class << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "co"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1377, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "con"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1404, 1620, -1, -1, -1, -1, -1, -1,
+ // "cons"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1431, -1, -1, -1, -1, -1, -1,
+ // "const"
+ (tt._const << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1458, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "constr"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1485, -1, -1, -1, -1, -1,
+ // "constru"
+ -1, -1, -1, 1512, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "construc"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1539, -1, -1, -1, -1, -1, -1,
+ // "construct"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1566, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "constructo"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1593, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "constructor"
+ ContextualKeyword._constructor << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "cont"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, 1647, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "conti"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1674, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "contin"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1701, -1, -1, -1, -1, -1,
+ // "continu"
+ -1, -1, -1, -1, -1, 1728, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "continue"
+ (tt._continue << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "d"
+ -1, -1, -1, -1, -1, 1782, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2349, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "de"
+ -1, -1, 1809, 1971, -1, -1, 2106, -1, -1, -1, -1, -1, 2241, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "deb"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1836, -1, -1, -1, -1, -1,
+ // "debu"
+ -1, -1, -1, -1, -1, -1, -1, 1863, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "debug"
+ -1, -1, -1, -1, -1, -1, -1, 1890, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "debugg"
+ -1, -1, -1, -1, -1, 1917, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "debugge"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1944, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "debugger"
+ (tt._debugger << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "dec"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1998, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "decl"
+ -1, 2025, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "decla"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2052, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "declar"
+ -1, -1, -1, -1, -1, 2079, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "declare"
+ ContextualKeyword._declare << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "def"
+ -1, 2133, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "defa"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2160, -1, -1, -1, -1, -1,
+ // "defau"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2187, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "defaul"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2214, -1, -1, -1, -1, -1, -1,
+ // "default"
+ (tt._default << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "del"
+ -1, -1, -1, -1, -1, 2268, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "dele"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2295, -1, -1, -1, -1, -1, -1,
+ // "delet"
+ -1, -1, -1, -1, -1, 2322, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "delete"
+ (tt._delete << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "do"
+ (tt._do << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "e"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2403, -1, 2484, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2565, -1, -1,
+ // "el"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2430, -1, -1, -1, -1, -1, -1, -1,
+ // "els"
+ -1, -1, -1, -1, -1, 2457, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "else"
+ (tt._else << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "en"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2511, -1, -1, -1, -1, -1,
+ // "enu"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2538, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "enum"
+ ContextualKeyword._enum << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "ex"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2592, -1, -1, -1, 2727, -1, -1, -1, -1, -1, -1,
+ // "exp"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2619, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "expo"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2646, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "expor"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2673, -1, -1, -1, -1, -1, -1,
+ // "export"
+ (tt._export << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2700, -1, -1, -1, -1, -1, -1, -1,
+ // "exports"
+ ContextualKeyword._exports << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "ext"
+ -1, -1, -1, -1, -1, 2754, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "exte"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2781, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "exten"
+ -1, -1, -1, -1, 2808, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "extend"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2835, -1, -1, -1, -1, -1, -1, -1,
+ // "extends"
+ (tt._extends << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "f"
+ -1, 2889, -1, -1, -1, -1, -1, -1, -1, 2997, -1, -1, -1, -1, -1, 3159, -1, -1, 3213, -1, -1, 3294, -1, -1, -1, -1, -1,
+ // "fa"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2916, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "fal"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2943, -1, -1, -1, -1, -1, -1, -1,
+ // "fals"
+ -1, -1, -1, -1, -1, 2970, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "false"
+ (tt._false << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "fi"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3024, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "fin"
+ -1, 3051, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "fina"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3078, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "final"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3105, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "finall"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3132, -1,
+ // "finally"
+ (tt._finally << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "fo"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3186, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "for"
+ (tt._for << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "fr"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3240, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "fro"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3267, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "from"
+ ContextualKeyword._from << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "fu"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3321, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "fun"
+ -1, -1, -1, 3348, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "func"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3375, -1, -1, -1, -1, -1, -1,
+ // "funct"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, 3402, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "functi"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3429, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "functio"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3456, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "function"
+ (tt._function << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "g"
+ -1, -1, -1, -1, -1, 3510, -1, -1, -1, -1, -1, -1, 3564, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "ge"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3537, -1, -1, -1, -1, -1, -1,
+ // "get"
+ ContextualKeyword._get << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "gl"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3591, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "glo"
+ -1, -1, 3618, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "glob"
+ -1, 3645, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "globa"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3672, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "global"
+ ContextualKeyword._global << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "i"
+ -1, -1, -1, -1, -1, -1, 3726, -1, -1, -1, -1, -1, -1, 3753, 4077, -1, -1, -1, -1, 4590, -1, -1, -1, -1, -1, -1, -1,
+ // "if"
+ (tt._if << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "im"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3780, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "imp"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3807, -1, -1, 3996, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "impl"
+ -1, -1, -1, -1, -1, 3834, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "imple"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3861, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "implem"
+ -1, -1, -1, -1, -1, 3888, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "impleme"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3915, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "implemen"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3942, -1, -1, -1, -1, -1, -1,
+ // "implement"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3969, -1, -1, -1, -1, -1, -1, -1,
+ // "implements"
+ ContextualKeyword._implements << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "impo"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4023, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "impor"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4050, -1, -1, -1, -1, -1, -1,
+ // "import"
+ (tt._import << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "in"
+ (tt._in << 1) + 1, -1, -1, -1, -1, -1, 4104, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4185, 4401, -1, -1, -1, -1, -1, -1,
+ // "inf"
+ -1, -1, -1, -1, -1, 4131, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "infe"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4158, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "infer"
+ ContextualKeyword._infer << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "ins"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4212, -1, -1, -1, -1, -1, -1,
+ // "inst"
+ -1, 4239, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "insta"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4266, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "instan"
+ -1, -1, -1, 4293, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "instanc"
+ -1, -1, -1, -1, -1, 4320, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "instance"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4347, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "instanceo"
+ -1, -1, -1, -1, -1, -1, 4374, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "instanceof"
+ (tt._instanceof << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "int"
+ -1, -1, -1, -1, -1, 4428, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "inte"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4455, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "inter"
+ -1, -1, -1, -1, -1, -1, 4482, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "interf"
+ -1, 4509, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "interfa"
+ -1, -1, -1, 4536, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "interfac"
+ -1, -1, -1, -1, -1, 4563, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "interface"
+ ContextualKeyword._interface << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "is"
+ ContextualKeyword._is << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "k"
+ -1, -1, -1, -1, -1, 4644, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "ke"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4671, -1,
+ // "key"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4698, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "keyo"
+ -1, -1, -1, -1, -1, -1, 4725, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "keyof"
+ ContextualKeyword._keyof << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "l"
+ -1, -1, -1, -1, -1, 4779, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "le"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4806, -1, -1, -1, -1, -1, -1,
+ // "let"
+ (tt._let << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "m"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, 4860, -1, -1, -1, -1, -1, 4995, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "mi"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4887, -1, -1,
+ // "mix"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, 4914, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "mixi"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4941, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "mixin"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4968, -1, -1, -1, -1, -1, -1, -1,
+ // "mixins"
+ ContextualKeyword._mixins << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "mo"
+ -1, -1, -1, -1, 5022, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "mod"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5049, -1, -1, -1, -1, -1,
+ // "modu"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5076, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "modul"
+ -1, -1, -1, -1, -1, 5103, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "module"
+ ContextualKeyword._module << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "n"
+ -1, 5157, -1, -1, -1, 5373, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5427, -1, -1, -1, -1, -1,
+ // "na"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5184, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "nam"
+ -1, -1, -1, -1, -1, 5211, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "name"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5238, -1, -1, -1, -1, -1, -1, -1,
+ // "names"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5265, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "namesp"
+ -1, 5292, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "namespa"
+ -1, -1, -1, 5319, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "namespac"
+ -1, -1, -1, -1, -1, 5346, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "namespace"
+ ContextualKeyword._namespace << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "ne"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5400, -1, -1, -1,
+ // "new"
+ (tt._new << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "nu"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5454, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "nul"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5481, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "null"
+ (tt._null << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "o"
+ -1, -1, -1, -1, -1, -1, 5535, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5562, -1, -1, -1, -1, 5697, 5751, -1, -1, -1, -1,
+ // "of"
+ ContextualKeyword._of << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "op"
+ -1, 5589, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "opa"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5616, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "opaq"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5643, -1, -1, -1, -1, -1,
+ // "opaqu"
+ -1, -1, -1, -1, -1, 5670, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "opaque"
+ ContextualKeyword._opaque << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "ou"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5724, -1, -1, -1, -1, -1, -1,
+ // "out"
+ ContextualKeyword._out << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "ov"
+ -1, -1, -1, -1, -1, 5778, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "ove"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5805, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "over"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5832, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "overr"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, 5859, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "overri"
+ -1, -1, -1, -1, 5886, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "overrid"
+ -1, -1, -1, -1, -1, 5913, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "override"
+ ContextualKeyword._override << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "p"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5967, -1, -1, 6345, -1, -1, -1, -1, -1,
+ // "pr"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, 5994, -1, -1, -1, -1, -1, 6129, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "pri"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6021, -1, -1, -1, -1,
+ // "priv"
+ -1, 6048, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "priva"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6075, -1, -1, -1, -1, -1, -1,
+ // "privat"
+ -1, -1, -1, -1, -1, 6102, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "private"
+ ContextualKeyword._private << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "pro"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6156, -1, -1, -1, -1, -1, -1,
+ // "prot"
+ -1, -1, -1, -1, -1, 6183, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6318, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "prote"
+ -1, -1, -1, 6210, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "protec"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6237, -1, -1, -1, -1, -1, -1,
+ // "protect"
+ -1, -1, -1, -1, -1, 6264, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "protecte"
+ -1, -1, -1, -1, 6291, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "protected"
+ ContextualKeyword._protected << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "proto"
+ ContextualKeyword._proto << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "pu"
+ -1, -1, 6372, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "pub"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6399, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "publ"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, 6426, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "publi"
+ -1, -1, -1, 6453, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "public"
+ ContextualKeyword._public << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "r"
+ -1, -1, -1, -1, -1, 6507, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "re"
+ -1, 6534, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6696, -1, -1, 6831, -1, -1, -1, -1, -1, -1,
+ // "rea"
+ -1, -1, -1, -1, 6561, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "read"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6588, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "reado"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6615, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "readon"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6642, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "readonl"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6669, -1,
+ // "readonly"
+ ContextualKeyword._readonly << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "req"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6723, -1, -1, -1, -1, -1,
+ // "requ"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, 6750, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "requi"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6777, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "requir"
+ -1, -1, -1, -1, -1, 6804, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "require"
+ ContextualKeyword._require << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "ret"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6858, -1, -1, -1, -1, -1,
+ // "retu"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6885, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "retur"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6912, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "return"
+ (tt._return << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "s"
+ -1, 6966, -1, -1, -1, 7182, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7236, 7371, -1, 7479, -1, 7614, -1,
+ // "sa"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6993, -1, -1, -1, -1, -1, -1,
+ // "sat"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, 7020, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "sati"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7047, -1, -1, -1, -1, -1, -1, -1,
+ // "satis"
+ -1, -1, -1, -1, -1, -1, 7074, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "satisf"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, 7101, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "satisfi"
+ -1, -1, -1, -1, -1, 7128, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "satisfie"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7155, -1, -1, -1, -1, -1, -1, -1,
+ // "satisfies"
+ ContextualKeyword._satisfies << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "se"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7209, -1, -1, -1, -1, -1, -1,
+ // "set"
+ ContextualKeyword._set << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "st"
+ -1, 7263, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "sta"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7290, -1, -1, -1, -1, -1, -1,
+ // "stat"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, 7317, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "stati"
+ -1, -1, -1, 7344, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "static"
+ ContextualKeyword._static << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "su"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7398, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "sup"
+ -1, -1, -1, -1, -1, 7425, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "supe"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7452, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "super"
+ (tt._super << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "sw"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, 7506, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "swi"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7533, -1, -1, -1, -1, -1, -1,
+ // "swit"
+ -1, -1, -1, 7560, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "switc"
+ -1, -1, -1, -1, -1, -1, -1, -1, 7587, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "switch"
+ (tt._switch << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "sy"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7641, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "sym"
+ -1, -1, 7668, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "symb"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7695, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "symbo"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7722, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "symbol"
+ ContextualKeyword._symbol << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "t"
+ -1, -1, -1, -1, -1, -1, -1, -1, 7776, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7938, -1, -1, -1, -1, -1, -1, 8046, -1,
+ // "th"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, 7803, -1, -1, -1, -1, -1, -1, -1, -1, 7857, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "thi"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7830, -1, -1, -1, -1, -1, -1, -1,
+ // "this"
+ (tt._this << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "thr"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7884, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "thro"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7911, -1, -1, -1,
+ // "throw"
+ (tt._throw << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "tr"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7965, -1, -1, -1, 8019, -1,
+ // "tru"
+ -1, -1, -1, -1, -1, 7992, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "true"
+ (tt._true << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "try"
+ (tt._try << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "ty"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 8073, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "typ"
+ -1, -1, -1, -1, -1, 8100, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "type"
+ ContextualKeyword._type << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 8127, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "typeo"
+ -1, -1, -1, -1, -1, -1, 8154, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "typeof"
+ (tt._typeof << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "u"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 8208, -1, -1, -1, -1, 8343, -1, -1, -1, -1, -1, -1, -1,
+ // "un"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, 8235, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "uni"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 8262, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "uniq"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 8289, -1, -1, -1, -1, -1,
+ // "uniqu"
+ -1, -1, -1, -1, -1, 8316, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "unique"
+ ContextualKeyword._unique << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "us"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, 8370, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "usi"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 8397, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "usin"
+ -1, -1, -1, -1, -1, -1, -1, 8424, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "using"
+ ContextualKeyword._using << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "v"
+ -1, 8478, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 8532, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "va"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 8505, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "var"
+ (tt._var << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "vo"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, 8559, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "voi"
+ -1, -1, -1, -1, 8586, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "void"
+ (tt._void << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "w"
+ -1, -1, -1, -1, -1, -1, -1, -1, 8640, 8748, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "wh"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, 8667, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "whi"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 8694, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "whil"
+ -1, -1, -1, -1, -1, 8721, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "while"
+ (tt._while << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "wi"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 8775, -1, -1, -1, -1, -1, -1,
+ // "wit"
+ -1, -1, -1, -1, -1, -1, -1, -1, 8802, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "with"
+ (tt._with << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "y"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, 8856, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "yi"
+ -1, -1, -1, -1, -1, 8883, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "yie"
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 8910, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "yiel"
+ -1, -1, -1, -1, 8937, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ // "yield"
+ (tt._yield << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+]);
diff --git a/node_modules/sucrase/dist/esm/parser/tokenizer/state.js b/node_modules/sucrase/dist/esm/parser/tokenizer/state.js
new file mode 100644
index 0000000..940cde0
--- /dev/null
+++ b/node_modules/sucrase/dist/esm/parser/tokenizer/state.js
@@ -0,0 +1,106 @@
+
+import {ContextualKeyword} from "./keywords";
+import { TokenType as tt} from "./types";
+
+export class Scope {
+
+
+
+
+ constructor(startTokenIndex, endTokenIndex, isFunctionScope) {
+ this.startTokenIndex = startTokenIndex;
+ this.endTokenIndex = endTokenIndex;
+ this.isFunctionScope = isFunctionScope;
+ }
+}
+
+export class StateSnapshot {
+ constructor(
+ potentialArrowAt,
+ noAnonFunctionType,
+ inDisallowConditionalTypesContext,
+ tokensLength,
+ scopesLength,
+ pos,
+ type,
+ contextualKeyword,
+ start,
+ end,
+ isType,
+ scopeDepth,
+ error,
+ ) {;this.potentialArrowAt = potentialArrowAt;this.noAnonFunctionType = noAnonFunctionType;this.inDisallowConditionalTypesContext = inDisallowConditionalTypesContext;this.tokensLength = tokensLength;this.scopesLength = scopesLength;this.pos = pos;this.type = type;this.contextualKeyword = contextualKeyword;this.start = start;this.end = end;this.isType = isType;this.scopeDepth = scopeDepth;this.error = error;}
+}
+
+export default class State {constructor() { State.prototype.__init.call(this);State.prototype.__init2.call(this);State.prototype.__init3.call(this);State.prototype.__init4.call(this);State.prototype.__init5.call(this);State.prototype.__init6.call(this);State.prototype.__init7.call(this);State.prototype.__init8.call(this);State.prototype.__init9.call(this);State.prototype.__init10.call(this);State.prototype.__init11.call(this);State.prototype.__init12.call(this);State.prototype.__init13.call(this); }
+ // Used to signify the start of a potential arrow function
+ __init() {this.potentialArrowAt = -1}
+
+ // Used by Flow to handle an edge case involving function type parsing.
+ __init2() {this.noAnonFunctionType = false}
+
+ // Used by TypeScript to handle ambiguities when parsing conditional types.
+ __init3() {this.inDisallowConditionalTypesContext = false}
+
+ // Token store.
+ __init4() {this.tokens = []}
+
+ // Array of all observed scopes, ordered by their ending position.
+ __init5() {this.scopes = []}
+
+ // The current position of the tokenizer in the input.
+ __init6() {this.pos = 0}
+
+ // Information about the current token.
+ __init7() {this.type = tt.eof}
+ __init8() {this.contextualKeyword = ContextualKeyword.NONE}
+ __init9() {this.start = 0}
+ __init10() {this.end = 0}
+
+ __init11() {this.isType = false}
+ __init12() {this.scopeDepth = 0}
+
+ /**
+ * If the parser is in an error state, then the token is always tt.eof and all functions can
+ * keep executing but should be written so they don't get into an infinite loop in this situation.
+ *
+ * This approach, combined with the ability to snapshot and restore state, allows us to implement
+ * backtracking without exceptions and without needing to explicitly propagate error states
+ * everywhere.
+ */
+ __init13() {this.error = null}
+
+ snapshot() {
+ return new StateSnapshot(
+ this.potentialArrowAt,
+ this.noAnonFunctionType,
+ this.inDisallowConditionalTypesContext,
+ this.tokens.length,
+ this.scopes.length,
+ this.pos,
+ this.type,
+ this.contextualKeyword,
+ this.start,
+ this.end,
+ this.isType,
+ this.scopeDepth,
+ this.error,
+ );
+ }
+
+ restoreFromSnapshot(snapshot) {
+ this.potentialArrowAt = snapshot.potentialArrowAt;
+ this.noAnonFunctionType = snapshot.noAnonFunctionType;
+ this.inDisallowConditionalTypesContext = snapshot.inDisallowConditionalTypesContext;
+ this.tokens.length = snapshot.tokensLength;
+ this.scopes.length = snapshot.scopesLength;
+ this.pos = snapshot.pos;
+ this.type = snapshot.type;
+ this.contextualKeyword = snapshot.contextualKeyword;
+ this.start = snapshot.start;
+ this.end = snapshot.end;
+ this.isType = snapshot.isType;
+ this.scopeDepth = snapshot.scopeDepth;
+ this.error = snapshot.error;
+ }
+}
diff --git a/node_modules/sucrase/dist/esm/parser/tokenizer/types.js b/node_modules/sucrase/dist/esm/parser/tokenizer/types.js
new file mode 100644
index 0000000..9746ad6
--- /dev/null
+++ b/node_modules/sucrase/dist/esm/parser/tokenizer/types.js
@@ -0,0 +1,361 @@
+// Generated file, do not edit! Run "yarn generate" to re-generate this file.
+/* istanbul ignore file */
+/**
+ * Enum of all token types, with bit fields to signify meaningful properties.
+ */
+export var TokenType; (function (TokenType) {
+ // Precedence 0 means not an operator; otherwise it is a positive number up to 12.
+ const PRECEDENCE_MASK = 0xf; TokenType[TokenType["PRECEDENCE_MASK"] = PRECEDENCE_MASK] = "PRECEDENCE_MASK";
+ const IS_KEYWORD = 1 << 4; TokenType[TokenType["IS_KEYWORD"] = IS_KEYWORD] = "IS_KEYWORD";
+ const IS_ASSIGN = 1 << 5; TokenType[TokenType["IS_ASSIGN"] = IS_ASSIGN] = "IS_ASSIGN";
+ const IS_RIGHT_ASSOCIATIVE = 1 << 6; TokenType[TokenType["IS_RIGHT_ASSOCIATIVE"] = IS_RIGHT_ASSOCIATIVE] = "IS_RIGHT_ASSOCIATIVE";
+ const IS_PREFIX = 1 << 7; TokenType[TokenType["IS_PREFIX"] = IS_PREFIX] = "IS_PREFIX";
+ const IS_POSTFIX = 1 << 8; TokenType[TokenType["IS_POSTFIX"] = IS_POSTFIX] = "IS_POSTFIX";
+ const IS_EXPRESSION_START = 1 << 9; TokenType[TokenType["IS_EXPRESSION_START"] = IS_EXPRESSION_START] = "IS_EXPRESSION_START";
+
+ const num = 512; TokenType[TokenType["num"] = num] = "num"; // num startsExpr
+ const bigint = 1536; TokenType[TokenType["bigint"] = bigint] = "bigint"; // bigint startsExpr
+ const decimal = 2560; TokenType[TokenType["decimal"] = decimal] = "decimal"; // decimal startsExpr
+ const regexp = 3584; TokenType[TokenType["regexp"] = regexp] = "regexp"; // regexp startsExpr
+ const string = 4608; TokenType[TokenType["string"] = string] = "string"; // string startsExpr
+ const name = 5632; TokenType[TokenType["name"] = name] = "name"; // name startsExpr
+ const eof = 6144; TokenType[TokenType["eof"] = eof] = "eof"; // eof
+ const bracketL = 7680; TokenType[TokenType["bracketL"] = bracketL] = "bracketL"; // [ startsExpr
+ const bracketR = 8192; TokenType[TokenType["bracketR"] = bracketR] = "bracketR"; // ]
+ const braceL = 9728; TokenType[TokenType["braceL"] = braceL] = "braceL"; // { startsExpr
+ const braceBarL = 10752; TokenType[TokenType["braceBarL"] = braceBarL] = "braceBarL"; // {| startsExpr
+ const braceR = 11264; TokenType[TokenType["braceR"] = braceR] = "braceR"; // }
+ const braceBarR = 12288; TokenType[TokenType["braceBarR"] = braceBarR] = "braceBarR"; // |}
+ const parenL = 13824; TokenType[TokenType["parenL"] = parenL] = "parenL"; // ( startsExpr
+ const parenR = 14336; TokenType[TokenType["parenR"] = parenR] = "parenR"; // )
+ const comma = 15360; TokenType[TokenType["comma"] = comma] = "comma"; // ,
+ const semi = 16384; TokenType[TokenType["semi"] = semi] = "semi"; // ;
+ const colon = 17408; TokenType[TokenType["colon"] = colon] = "colon"; // :
+ const doubleColon = 18432; TokenType[TokenType["doubleColon"] = doubleColon] = "doubleColon"; // ::
+ const dot = 19456; TokenType[TokenType["dot"] = dot] = "dot"; // .
+ const question = 20480; TokenType[TokenType["question"] = question] = "question"; // ?
+ const questionDot = 21504; TokenType[TokenType["questionDot"] = questionDot] = "questionDot"; // ?.
+ const arrow = 22528; TokenType[TokenType["arrow"] = arrow] = "arrow"; // =>
+ const template = 23552; TokenType[TokenType["template"] = template] = "template"; // template
+ const ellipsis = 24576; TokenType[TokenType["ellipsis"] = ellipsis] = "ellipsis"; // ...
+ const backQuote = 25600; TokenType[TokenType["backQuote"] = backQuote] = "backQuote"; // `
+ const dollarBraceL = 27136; TokenType[TokenType["dollarBraceL"] = dollarBraceL] = "dollarBraceL"; // ${ startsExpr
+ const at = 27648; TokenType[TokenType["at"] = at] = "at"; // @
+ const hash = 29184; TokenType[TokenType["hash"] = hash] = "hash"; // # startsExpr
+ const eq = 29728; TokenType[TokenType["eq"] = eq] = "eq"; // = isAssign
+ const assign = 30752; TokenType[TokenType["assign"] = assign] = "assign"; // _= isAssign
+ const preIncDec = 32640; TokenType[TokenType["preIncDec"] = preIncDec] = "preIncDec"; // ++/-- prefix postfix startsExpr
+ const postIncDec = 33664; TokenType[TokenType["postIncDec"] = postIncDec] = "postIncDec"; // ++/-- prefix postfix startsExpr
+ const bang = 34432; TokenType[TokenType["bang"] = bang] = "bang"; // ! prefix startsExpr
+ const tilde = 35456; TokenType[TokenType["tilde"] = tilde] = "tilde"; // ~ prefix startsExpr
+ const pipeline = 35841; TokenType[TokenType["pipeline"] = pipeline] = "pipeline"; // |> prec:1
+ const nullishCoalescing = 36866; TokenType[TokenType["nullishCoalescing"] = nullishCoalescing] = "nullishCoalescing"; // ?? prec:2
+ const logicalOR = 37890; TokenType[TokenType["logicalOR"] = logicalOR] = "logicalOR"; // || prec:2
+ const logicalAND = 38915; TokenType[TokenType["logicalAND"] = logicalAND] = "logicalAND"; // && prec:3
+ const bitwiseOR = 39940; TokenType[TokenType["bitwiseOR"] = bitwiseOR] = "bitwiseOR"; // | prec:4
+ const bitwiseXOR = 40965; TokenType[TokenType["bitwiseXOR"] = bitwiseXOR] = "bitwiseXOR"; // ^ prec:5
+ const bitwiseAND = 41990; TokenType[TokenType["bitwiseAND"] = bitwiseAND] = "bitwiseAND"; // & prec:6
+ const equality = 43015; TokenType[TokenType["equality"] = equality] = "equality"; // ==/!= prec:7
+ const lessThan = 44040; TokenType[TokenType["lessThan"] = lessThan] = "lessThan"; // < prec:8
+ const greaterThan = 45064; TokenType[TokenType["greaterThan"] = greaterThan] = "greaterThan"; // > prec:8
+ const relationalOrEqual = 46088; TokenType[TokenType["relationalOrEqual"] = relationalOrEqual] = "relationalOrEqual"; // <=/>= prec:8
+ const bitShiftL = 47113; TokenType[TokenType["bitShiftL"] = bitShiftL] = "bitShiftL"; // << prec:9
+ const bitShiftR = 48137; TokenType[TokenType["bitShiftR"] = bitShiftR] = "bitShiftR"; // >>/>>> prec:9
+ const plus = 49802; TokenType[TokenType["plus"] = plus] = "plus"; // + prec:10 prefix startsExpr
+ const minus = 50826; TokenType[TokenType["minus"] = minus] = "minus"; // - prec:10 prefix startsExpr
+ const modulo = 51723; TokenType[TokenType["modulo"] = modulo] = "modulo"; // % prec:11 startsExpr
+ const star = 52235; TokenType[TokenType["star"] = star] = "star"; // * prec:11
+ const slash = 53259; TokenType[TokenType["slash"] = slash] = "slash"; // / prec:11
+ const exponent = 54348; TokenType[TokenType["exponent"] = exponent] = "exponent"; // ** prec:12 rightAssociative
+ const jsxName = 55296; TokenType[TokenType["jsxName"] = jsxName] = "jsxName"; // jsxName
+ const jsxText = 56320; TokenType[TokenType["jsxText"] = jsxText] = "jsxText"; // jsxText
+ const jsxEmptyText = 57344; TokenType[TokenType["jsxEmptyText"] = jsxEmptyText] = "jsxEmptyText"; // jsxEmptyText
+ const jsxTagStart = 58880; TokenType[TokenType["jsxTagStart"] = jsxTagStart] = "jsxTagStart"; // jsxTagStart startsExpr
+ const jsxTagEnd = 59392; TokenType[TokenType["jsxTagEnd"] = jsxTagEnd] = "jsxTagEnd"; // jsxTagEnd
+ const typeParameterStart = 60928; TokenType[TokenType["typeParameterStart"] = typeParameterStart] = "typeParameterStart"; // typeParameterStart startsExpr
+ const nonNullAssertion = 61440; TokenType[TokenType["nonNullAssertion"] = nonNullAssertion] = "nonNullAssertion"; // nonNullAssertion
+ const _break = 62480; TokenType[TokenType["_break"] = _break] = "_break"; // break keyword
+ const _case = 63504; TokenType[TokenType["_case"] = _case] = "_case"; // case keyword
+ const _catch = 64528; TokenType[TokenType["_catch"] = _catch] = "_catch"; // catch keyword
+ const _continue = 65552; TokenType[TokenType["_continue"] = _continue] = "_continue"; // continue keyword
+ const _debugger = 66576; TokenType[TokenType["_debugger"] = _debugger] = "_debugger"; // debugger keyword
+ const _default = 67600; TokenType[TokenType["_default"] = _default] = "_default"; // default keyword
+ const _do = 68624; TokenType[TokenType["_do"] = _do] = "_do"; // do keyword
+ const _else = 69648; TokenType[TokenType["_else"] = _else] = "_else"; // else keyword
+ const _finally = 70672; TokenType[TokenType["_finally"] = _finally] = "_finally"; // finally keyword
+ const _for = 71696; TokenType[TokenType["_for"] = _for] = "_for"; // for keyword
+ const _function = 73232; TokenType[TokenType["_function"] = _function] = "_function"; // function keyword startsExpr
+ const _if = 73744; TokenType[TokenType["_if"] = _if] = "_if"; // if keyword
+ const _return = 74768; TokenType[TokenType["_return"] = _return] = "_return"; // return keyword
+ const _switch = 75792; TokenType[TokenType["_switch"] = _switch] = "_switch"; // switch keyword
+ const _throw = 77456; TokenType[TokenType["_throw"] = _throw] = "_throw"; // throw keyword prefix startsExpr
+ const _try = 77840; TokenType[TokenType["_try"] = _try] = "_try"; // try keyword
+ const _var = 78864; TokenType[TokenType["_var"] = _var] = "_var"; // var keyword
+ const _let = 79888; TokenType[TokenType["_let"] = _let] = "_let"; // let keyword
+ const _const = 80912; TokenType[TokenType["_const"] = _const] = "_const"; // const keyword
+ const _while = 81936; TokenType[TokenType["_while"] = _while] = "_while"; // while keyword
+ const _with = 82960; TokenType[TokenType["_with"] = _with] = "_with"; // with keyword
+ const _new = 84496; TokenType[TokenType["_new"] = _new] = "_new"; // new keyword startsExpr
+ const _this = 85520; TokenType[TokenType["_this"] = _this] = "_this"; // this keyword startsExpr
+ const _super = 86544; TokenType[TokenType["_super"] = _super] = "_super"; // super keyword startsExpr
+ const _class = 87568; TokenType[TokenType["_class"] = _class] = "_class"; // class keyword startsExpr
+ const _extends = 88080; TokenType[TokenType["_extends"] = _extends] = "_extends"; // extends keyword
+ const _export = 89104; TokenType[TokenType["_export"] = _export] = "_export"; // export keyword
+ const _import = 90640; TokenType[TokenType["_import"] = _import] = "_import"; // import keyword startsExpr
+ const _yield = 91664; TokenType[TokenType["_yield"] = _yield] = "_yield"; // yield keyword startsExpr
+ const _null = 92688; TokenType[TokenType["_null"] = _null] = "_null"; // null keyword startsExpr
+ const _true = 93712; TokenType[TokenType["_true"] = _true] = "_true"; // true keyword startsExpr
+ const _false = 94736; TokenType[TokenType["_false"] = _false] = "_false"; // false keyword startsExpr
+ const _in = 95256; TokenType[TokenType["_in"] = _in] = "_in"; // in prec:8 keyword
+ const _instanceof = 96280; TokenType[TokenType["_instanceof"] = _instanceof] = "_instanceof"; // instanceof prec:8 keyword
+ const _typeof = 97936; TokenType[TokenType["_typeof"] = _typeof] = "_typeof"; // typeof keyword prefix startsExpr
+ const _void = 98960; TokenType[TokenType["_void"] = _void] = "_void"; // void keyword prefix startsExpr
+ const _delete = 99984; TokenType[TokenType["_delete"] = _delete] = "_delete"; // delete keyword prefix startsExpr
+ const _async = 100880; TokenType[TokenType["_async"] = _async] = "_async"; // async keyword startsExpr
+ const _get = 101904; TokenType[TokenType["_get"] = _get] = "_get"; // get keyword startsExpr
+ const _set = 102928; TokenType[TokenType["_set"] = _set] = "_set"; // set keyword startsExpr
+ const _declare = 103952; TokenType[TokenType["_declare"] = _declare] = "_declare"; // declare keyword startsExpr
+ const _readonly = 104976; TokenType[TokenType["_readonly"] = _readonly] = "_readonly"; // readonly keyword startsExpr
+ const _abstract = 106000; TokenType[TokenType["_abstract"] = _abstract] = "_abstract"; // abstract keyword startsExpr
+ const _static = 107024; TokenType[TokenType["_static"] = _static] = "_static"; // static keyword startsExpr
+ const _public = 107536; TokenType[TokenType["_public"] = _public] = "_public"; // public keyword
+ const _private = 108560; TokenType[TokenType["_private"] = _private] = "_private"; // private keyword
+ const _protected = 109584; TokenType[TokenType["_protected"] = _protected] = "_protected"; // protected keyword
+ const _override = 110608; TokenType[TokenType["_override"] = _override] = "_override"; // override keyword
+ const _as = 112144; TokenType[TokenType["_as"] = _as] = "_as"; // as keyword startsExpr
+ const _enum = 113168; TokenType[TokenType["_enum"] = _enum] = "_enum"; // enum keyword startsExpr
+ const _type = 114192; TokenType[TokenType["_type"] = _type] = "_type"; // type keyword startsExpr
+ const _implements = 115216; TokenType[TokenType["_implements"] = _implements] = "_implements"; // implements keyword startsExpr
+})(TokenType || (TokenType = {}));
+export function formatTokenType(tokenType) {
+ switch (tokenType) {
+ case TokenType.num:
+ return "num";
+ case TokenType.bigint:
+ return "bigint";
+ case TokenType.decimal:
+ return "decimal";
+ case TokenType.regexp:
+ return "regexp";
+ case TokenType.string:
+ return "string";
+ case TokenType.name:
+ return "name";
+ case TokenType.eof:
+ return "eof";
+ case TokenType.bracketL:
+ return "[";
+ case TokenType.bracketR:
+ return "]";
+ case TokenType.braceL:
+ return "{";
+ case TokenType.braceBarL:
+ return "{|";
+ case TokenType.braceR:
+ return "}";
+ case TokenType.braceBarR:
+ return "|}";
+ case TokenType.parenL:
+ return "(";
+ case TokenType.parenR:
+ return ")";
+ case TokenType.comma:
+ return ",";
+ case TokenType.semi:
+ return ";";
+ case TokenType.colon:
+ return ":";
+ case TokenType.doubleColon:
+ return "::";
+ case TokenType.dot:
+ return ".";
+ case TokenType.question:
+ return "?";
+ case TokenType.questionDot:
+ return "?.";
+ case TokenType.arrow:
+ return "=>";
+ case TokenType.template:
+ return "template";
+ case TokenType.ellipsis:
+ return "...";
+ case TokenType.backQuote:
+ return "`";
+ case TokenType.dollarBraceL:
+ return "${";
+ case TokenType.at:
+ return "@";
+ case TokenType.hash:
+ return "#";
+ case TokenType.eq:
+ return "=";
+ case TokenType.assign:
+ return "_=";
+ case TokenType.preIncDec:
+ return "++/--";
+ case TokenType.postIncDec:
+ return "++/--";
+ case TokenType.bang:
+ return "!";
+ case TokenType.tilde:
+ return "~";
+ case TokenType.pipeline:
+ return "|>";
+ case TokenType.nullishCoalescing:
+ return "??";
+ case TokenType.logicalOR:
+ return "||";
+ case TokenType.logicalAND:
+ return "&&";
+ case TokenType.bitwiseOR:
+ return "|";
+ case TokenType.bitwiseXOR:
+ return "^";
+ case TokenType.bitwiseAND:
+ return "&";
+ case TokenType.equality:
+ return "==/!=";
+ case TokenType.lessThan:
+ return "<";
+ case TokenType.greaterThan:
+ return ">";
+ case TokenType.relationalOrEqual:
+ return "<=/>=";
+ case TokenType.bitShiftL:
+ return "<<";
+ case TokenType.bitShiftR:
+ return ">>/>>>";
+ case TokenType.plus:
+ return "+";
+ case TokenType.minus:
+ return "-";
+ case TokenType.modulo:
+ return "%";
+ case TokenType.star:
+ return "*";
+ case TokenType.slash:
+ return "/";
+ case TokenType.exponent:
+ return "**";
+ case TokenType.jsxName:
+ return "jsxName";
+ case TokenType.jsxText:
+ return "jsxText";
+ case TokenType.jsxEmptyText:
+ return "jsxEmptyText";
+ case TokenType.jsxTagStart:
+ return "jsxTagStart";
+ case TokenType.jsxTagEnd:
+ return "jsxTagEnd";
+ case TokenType.typeParameterStart:
+ return "typeParameterStart";
+ case TokenType.nonNullAssertion:
+ return "nonNullAssertion";
+ case TokenType._break:
+ return "break";
+ case TokenType._case:
+ return "case";
+ case TokenType._catch:
+ return "catch";
+ case TokenType._continue:
+ return "continue";
+ case TokenType._debugger:
+ return "debugger";
+ case TokenType._default:
+ return "default";
+ case TokenType._do:
+ return "do";
+ case TokenType._else:
+ return "else";
+ case TokenType._finally:
+ return "finally";
+ case TokenType._for:
+ return "for";
+ case TokenType._function:
+ return "function";
+ case TokenType._if:
+ return "if";
+ case TokenType._return:
+ return "return";
+ case TokenType._switch:
+ return "switch";
+ case TokenType._throw:
+ return "throw";
+ case TokenType._try:
+ return "try";
+ case TokenType._var:
+ return "var";
+ case TokenType._let:
+ return "let";
+ case TokenType._const:
+ return "const";
+ case TokenType._while:
+ return "while";
+ case TokenType._with:
+ return "with";
+ case TokenType._new:
+ return "new";
+ case TokenType._this:
+ return "this";
+ case TokenType._super:
+ return "super";
+ case TokenType._class:
+ return "class";
+ case TokenType._extends:
+ return "extends";
+ case TokenType._export:
+ return "export";
+ case TokenType._import:
+ return "import";
+ case TokenType._yield:
+ return "yield";
+ case TokenType._null:
+ return "null";
+ case TokenType._true:
+ return "true";
+ case TokenType._false:
+ return "false";
+ case TokenType._in:
+ return "in";
+ case TokenType._instanceof:
+ return "instanceof";
+ case TokenType._typeof:
+ return "typeof";
+ case TokenType._void:
+ return "void";
+ case TokenType._delete:
+ return "delete";
+ case TokenType._async:
+ return "async";
+ case TokenType._get:
+ return "get";
+ case TokenType._set:
+ return "set";
+ case TokenType._declare:
+ return "declare";
+ case TokenType._readonly:
+ return "readonly";
+ case TokenType._abstract:
+ return "abstract";
+ case TokenType._static:
+ return "static";
+ case TokenType._public:
+ return "public";
+ case TokenType._private:
+ return "private";
+ case TokenType._protected:
+ return "protected";
+ case TokenType._override:
+ return "override";
+ case TokenType._as:
+ return "as";
+ case TokenType._enum:
+ return "enum";
+ case TokenType._type:
+ return "type";
+ case TokenType._implements:
+ return "implements";
+ default:
+ return "";
+ }
+}