commit
96a7343142
@ -14,12 +14,16 @@ module.exports = function(api) {
|
|||||||
let convertESM = true;
|
let convertESM = true;
|
||||||
let ignoreLib = true;
|
let ignoreLib = true;
|
||||||
let includeRuntime = false;
|
let includeRuntime = false;
|
||||||
|
const nodeVersion = "6.9";
|
||||||
|
|
||||||
switch (env) {
|
switch (env) {
|
||||||
// Configs used during bundling builds.
|
// Configs used during bundling builds.
|
||||||
case "babel-parser":
|
case "babel-parser":
|
||||||
convertESM = false;
|
convertESM = false;
|
||||||
ignoreLib = false;
|
ignoreLib = false;
|
||||||
|
envOpts.targets = {
|
||||||
|
node: nodeVersion,
|
||||||
|
};
|
||||||
break;
|
break;
|
||||||
case "standalone":
|
case "standalone":
|
||||||
convertESM = false;
|
convertESM = false;
|
||||||
@ -29,7 +33,7 @@ module.exports = function(api) {
|
|||||||
case "production":
|
case "production":
|
||||||
// Config during builds before publish.
|
// Config during builds before publish.
|
||||||
envOpts.targets = {
|
envOpts.targets = {
|
||||||
node: "6.9",
|
node: nodeVersion,
|
||||||
};
|
};
|
||||||
break;
|
break;
|
||||||
case "development":
|
case "development":
|
||||||
|
|||||||
@ -30,7 +30,7 @@
|
|||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@babel/helper-fixtures": "^7.2.0",
|
"@babel/helper-fixtures": "^7.2.0",
|
||||||
"charcodes": "0.1.0",
|
"charcodes": "0.1.0",
|
||||||
"unicode-11.0.0": "^0.7.7"
|
"unicode-11.0.0": "^0.7.8"
|
||||||
},
|
},
|
||||||
"bin": {
|
"bin": {
|
||||||
"parser": "./bin/babel-parser.js"
|
"parser": "./bin/babel-parser.js"
|
||||||
|
|||||||
@ -60,11 +60,15 @@ function generate(chars) {
|
|||||||
const startData = generate(start);
|
const startData = generate(start);
|
||||||
const contData = generate(cont);
|
const contData = generate(cont);
|
||||||
|
|
||||||
|
console.log("/* prettier-ignore */");
|
||||||
console.log('let nonASCIIidentifierStartChars = "' + startData.nonASCII + '";');
|
console.log('let nonASCIIidentifierStartChars = "' + startData.nonASCII + '";');
|
||||||
|
console.log("/* prettier-ignore */");
|
||||||
console.log('let nonASCIIidentifierChars = "' + contData.nonASCII + '";');
|
console.log('let nonASCIIidentifierChars = "' + contData.nonASCII + '";');
|
||||||
|
console.log("/* prettier-ignore */");
|
||||||
console.log(
|
console.log(
|
||||||
"const astralIdentifierStartCodes = " + JSON.stringify(startData.astral) + ";"
|
"const astralIdentifierStartCodes = " + JSON.stringify(startData.astral) + ";"
|
||||||
);
|
);
|
||||||
|
console.log("/* prettier-ignore */");
|
||||||
console.log(
|
console.log(
|
||||||
"const astralIdentifierCodes = " + JSON.stringify(contData.astral) + ";"
|
"const astralIdentifierCodes = " + JSON.stringify(contData.astral) + ";"
|
||||||
);
|
);
|
||||||
|
|||||||
@ -1,7 +1,7 @@
|
|||||||
// @flow
|
// @flow
|
||||||
|
|
||||||
import type { Options } from "../options";
|
import type { Options } from "../options";
|
||||||
import { reservedWords } from "../util/identifier";
|
import { isES2015ReservedWord } from "../util/identifier";
|
||||||
|
|
||||||
import type State from "../tokenizer/state";
|
import type State from "../tokenizer/state";
|
||||||
import type { PluginsMap } from "./index";
|
import type { PluginsMap } from "./index";
|
||||||
@ -16,21 +16,21 @@ export default class BaseParser {
|
|||||||
|
|
||||||
// Initialized by Tokenizer
|
// Initialized by Tokenizer
|
||||||
state: State;
|
state: State;
|
||||||
input: string;
|
|
||||||
|
|
||||||
isReservedWord(word: string): boolean {
|
isReservedWord(word: string): boolean {
|
||||||
if (word === "await") {
|
if (word === "await") {
|
||||||
return this.inModule;
|
return this.inModule;
|
||||||
} else {
|
} else {
|
||||||
return reservedWords[6](word);
|
return isES2015ReservedWord(word);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
hasPlugin(name: string): boolean {
|
hasPlugin(name: string): boolean {
|
||||||
return Object.hasOwnProperty.call(this.plugins, name);
|
return this.plugins.has(name);
|
||||||
}
|
}
|
||||||
|
|
||||||
getPluginOption(plugin: string, name: string) {
|
getPluginOption(plugin: string, name: string) {
|
||||||
if (this.hasPlugin(plugin)) return this.plugins[plugin][name];
|
// $FlowIssue
|
||||||
|
if (this.hasPlugin(plugin)) return this.plugins.get(plugin)[name];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -21,7 +21,11 @@
|
|||||||
import { types as tt, type TokenType } from "../tokenizer/types";
|
import { types as tt, type TokenType } from "../tokenizer/types";
|
||||||
import * as N from "../types";
|
import * as N from "../types";
|
||||||
import LValParser from "./lval";
|
import LValParser from "./lval";
|
||||||
import { reservedWords } from "../util/identifier";
|
import {
|
||||||
|
isStrictReservedWord,
|
||||||
|
isStrictBindReservedWord,
|
||||||
|
isKeyword,
|
||||||
|
} from "../util/identifier";
|
||||||
import type { Pos, Position } from "../util/location";
|
import type { Pos, Position } from "../util/location";
|
||||||
import * as charCodes from "charcodes";
|
import * as charCodes from "charcodes";
|
||||||
|
|
||||||
@ -525,7 +529,7 @@ export default class ExpressionParser extends LValParser {
|
|||||||
} else if (this.match(tt.questionDot)) {
|
} else if (this.match(tt.questionDot)) {
|
||||||
this.expectPlugin("optionalChaining");
|
this.expectPlugin("optionalChaining");
|
||||||
state.optionalChainMember = true;
|
state.optionalChainMember = true;
|
||||||
if (noCalls && this.lookahead().type == tt.parenL) {
|
if (noCalls && this.lookahead().type === tt.parenL) {
|
||||||
state.stop = true;
|
state.stop = true;
|
||||||
return base;
|
return base;
|
||||||
}
|
}
|
||||||
@ -869,9 +873,9 @@ export default class ExpressionParser extends LValParser {
|
|||||||
return this.parseFunction(node, false, false, true);
|
return this.parseFunction(node, false, false, true);
|
||||||
} else if (
|
} else if (
|
||||||
canBeArrow &&
|
canBeArrow &&
|
||||||
!this.canInsertSemicolon() &&
|
|
||||||
id.name === "async" &&
|
id.name === "async" &&
|
||||||
this.match(tt.name)
|
this.match(tt.name) &&
|
||||||
|
!this.canInsertSemicolon()
|
||||||
) {
|
) {
|
||||||
const oldYOAIPAP = this.state.yieldOrAwaitInPossibleArrowParameters;
|
const oldYOAIPAP = this.state.yieldOrAwaitInPossibleArrowParameters;
|
||||||
const oldInAsync = this.state.inAsync;
|
const oldInAsync = this.state.inAsync;
|
||||||
@ -886,7 +890,8 @@ export default class ExpressionParser extends LValParser {
|
|||||||
return node;
|
return node;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (canBeArrow && !this.canInsertSemicolon() && this.eat(tt.arrow)) {
|
if (canBeArrow && this.match(tt.arrow) && !this.canInsertSemicolon()) {
|
||||||
|
this.next();
|
||||||
const oldYOAIPAP = this.state.yieldOrAwaitInPossibleArrowParameters;
|
const oldYOAIPAP = this.state.yieldOrAwaitInPossibleArrowParameters;
|
||||||
this.state.yieldOrAwaitInPossibleArrowParameters = null;
|
this.state.yieldOrAwaitInPossibleArrowParameters = null;
|
||||||
this.parseArrowExpression(node, [id]);
|
this.parseArrowExpression(node, [id]);
|
||||||
@ -1146,7 +1151,11 @@ export default class ExpressionParser extends LValParser {
|
|||||||
|
|
||||||
const node = this.startNodeAt(startPos, startLoc);
|
const node = this.startNodeAt(startPos, startLoc);
|
||||||
this.addExtra(node, "rawValue", value);
|
this.addExtra(node, "rawValue", value);
|
||||||
this.addExtra(node, "raw", this.input.slice(startPos, this.state.end));
|
this.addExtra(
|
||||||
|
node,
|
||||||
|
"raw",
|
||||||
|
this.state.input.slice(startPos, this.state.end),
|
||||||
|
);
|
||||||
node.value = value;
|
node.value = value;
|
||||||
this.next();
|
this.next();
|
||||||
return this.finishNode(node, type);
|
return this.finishNode(node, type);
|
||||||
@ -1365,7 +1374,7 @@ export default class ExpressionParser extends LValParser {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
elem.value = {
|
elem.value = {
|
||||||
raw: this.input
|
raw: this.state.input
|
||||||
.slice(this.state.start, this.state.end)
|
.slice(this.state.start, this.state.end)
|
||||||
.replace(/\r\n?/g, "\n"),
|
.replace(/\r\n?/g, "\n"),
|
||||||
cooked: this.state.value,
|
cooked: this.state.value,
|
||||||
@ -1967,7 +1976,8 @@ export default class ExpressionParser extends LValParser {
|
|||||||
if (
|
if (
|
||||||
(name === "class" || name === "function") &&
|
(name === "class" || name === "function") &&
|
||||||
(this.state.lastTokEnd !== this.state.lastTokStart + 1 ||
|
(this.state.lastTokEnd !== this.state.lastTokStart + 1 ||
|
||||||
this.input.charCodeAt(this.state.lastTokStart) !== charCodes.dot)
|
this.state.input.charCodeAt(this.state.lastTokStart) !==
|
||||||
|
charCodes.dot)
|
||||||
) {
|
) {
|
||||||
this.state.context.pop();
|
this.state.context.pop();
|
||||||
}
|
}
|
||||||
@ -1991,8 +2001,8 @@ export default class ExpressionParser extends LValParser {
|
|||||||
): void {
|
): void {
|
||||||
if (
|
if (
|
||||||
this.state.strict &&
|
this.state.strict &&
|
||||||
(reservedWords.strict(word) ||
|
(isStrictReservedWord(word) ||
|
||||||
(isBinding && reservedWords.strictBind(word)))
|
(isBinding && isStrictBindReservedWord(word)))
|
||||||
) {
|
) {
|
||||||
this.raise(startLoc, word + " is a reserved word in strict mode");
|
this.raise(startLoc, word + " is a reserved word in strict mode");
|
||||||
}
|
}
|
||||||
@ -2011,7 +2021,7 @@ export default class ExpressionParser extends LValParser {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this.isReservedWord(word) || (checkKeywords && this.isKeyword(word))) {
|
if (this.isReservedWord(word) || (checkKeywords && isKeyword(word))) {
|
||||||
this.raise(startLoc, word + " is a reserved word");
|
this.raise(startLoc, word + " is a reserved word");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -2071,8 +2081,8 @@ export default class ExpressionParser extends LValParser {
|
|||||||
this.next();
|
this.next();
|
||||||
if (
|
if (
|
||||||
this.match(tt.semi) ||
|
this.match(tt.semi) ||
|
||||||
this.canInsertSemicolon() ||
|
(!this.match(tt.star) && !this.state.type.startsExpr) ||
|
||||||
(!this.match(tt.star) && !this.state.type.startsExpr)
|
this.canInsertSemicolon()
|
||||||
) {
|
) {
|
||||||
node.delegate = false;
|
node.delegate = false;
|
||||||
node.argument = null;
|
node.argument = null;
|
||||||
|
|||||||
@ -6,9 +6,7 @@ import type { PluginList } from "../plugin-utils";
|
|||||||
import { getOptions } from "../options";
|
import { getOptions } from "../options";
|
||||||
import StatementParser from "./statement";
|
import StatementParser from "./statement";
|
||||||
|
|
||||||
export type PluginsMap = {
|
export type PluginsMap = Map<string, { [string]: any }>;
|
||||||
[key: string]: { [option: string]: any },
|
|
||||||
};
|
|
||||||
|
|
||||||
export default class Parser extends StatementParser {
|
export default class Parser extends StatementParser {
|
||||||
// Forward-declaration so typescript plugin can override jsx plugin
|
// Forward-declaration so typescript plugin can override jsx plugin
|
||||||
@ -22,7 +20,6 @@ export default class Parser extends StatementParser {
|
|||||||
|
|
||||||
this.options = options;
|
this.options = options;
|
||||||
this.inModule = this.options.sourceType === "module";
|
this.inModule = this.options.sourceType === "module";
|
||||||
this.input = input;
|
|
||||||
this.plugins = pluginsMap(this.options.plugins);
|
this.plugins = pluginsMap(this.options.plugins);
|
||||||
this.filename = options.sourceFilename;
|
this.filename = options.sourceFilename;
|
||||||
}
|
}
|
||||||
@ -36,10 +33,10 @@ export default class Parser extends StatementParser {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function pluginsMap(plugins: PluginList): PluginsMap {
|
function pluginsMap(plugins: PluginList): PluginsMap {
|
||||||
const pluginMap: PluginsMap = (Object.create(null): Object);
|
const pluginMap: PluginsMap = new Map();
|
||||||
for (const plugin of plugins) {
|
for (const plugin of plugins) {
|
||||||
const [name, options = {}] = Array.isArray(plugin) ? plugin : [plugin, {}];
|
const [name, options] = Array.isArray(plugin) ? plugin : [plugin, {}];
|
||||||
if (!pluginMap[name]) pluginMap[name] = options || {};
|
if (!pluginMap.has(name)) pluginMap.set(name, options || {});
|
||||||
}
|
}
|
||||||
return pluginMap;
|
return pluginMap;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -21,7 +21,7 @@ export default class LocationParser extends CommentsParser {
|
|||||||
code?: string,
|
code?: string,
|
||||||
} = {},
|
} = {},
|
||||||
): empty {
|
): empty {
|
||||||
const loc = getLineInfo(this.input, pos);
|
const loc = getLineInfo(this.state.input, pos);
|
||||||
message += ` (${loc.line}:${loc.column})`;
|
message += ` (${loc.line}:${loc.column})`;
|
||||||
// $FlowIgnore
|
// $FlowIgnore
|
||||||
const err: SyntaxError & { pos: number, loc: Position } = new SyntaxError(
|
const err: SyntaxError & { pos: number, loc: Position } = new SyntaxError(
|
||||||
|
|||||||
@ -56,9 +56,13 @@ export default class LValParser extends NodeUtils {
|
|||||||
|
|
||||||
case "ObjectExpression":
|
case "ObjectExpression":
|
||||||
node.type = "ObjectPattern";
|
node.type = "ObjectPattern";
|
||||||
for (let index = 0; index < node.properties.length; index++) {
|
for (
|
||||||
const prop = node.properties[index];
|
let i = 0, length = node.properties.length, last = length - 1;
|
||||||
const isLast = index === node.properties.length - 1;
|
i < length;
|
||||||
|
i++
|
||||||
|
) {
|
||||||
|
const prop = node.properties[i];
|
||||||
|
const isLast = i === last;
|
||||||
this.toAssignableObjectExpressionProp(prop, isBinding, isLast);
|
this.toAssignableObjectExpressionProp(prop, isBinding, isLast);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
@ -145,12 +149,10 @@ export default class LValParser extends NodeUtils {
|
|||||||
const arg = last.argument;
|
const arg = last.argument;
|
||||||
this.toAssignable(arg, isBinding, contextDescription);
|
this.toAssignable(arg, isBinding, contextDescription);
|
||||||
if (
|
if (
|
||||||
[
|
arg.type !== "Identifier" &&
|
||||||
"Identifier",
|
arg.type !== "MemberExpression" &&
|
||||||
"MemberExpression",
|
arg.type !== "ArrayPattern" &&
|
||||||
"ArrayPattern",
|
arg.type !== "ObjectPattern"
|
||||||
"ObjectPattern",
|
|
||||||
].indexOf(arg.type) === -1
|
|
||||||
) {
|
) {
|
||||||
this.unexpected(arg.start);
|
this.unexpected(arg.start);
|
||||||
}
|
}
|
||||||
@ -426,14 +428,13 @@ export default class LValParser extends NodeUtils {
|
|||||||
}
|
}
|
||||||
|
|
||||||
checkToRestConversion(node: SpreadElement): void {
|
checkToRestConversion(node: SpreadElement): void {
|
||||||
const validArgumentTypes = ["Identifier", "MemberExpression"];
|
if (
|
||||||
|
node.argument.type !== "Identifier" &&
|
||||||
if (validArgumentTypes.indexOf(node.argument.type) !== -1) {
|
node.argument.type !== "MemberExpression"
|
||||||
return;
|
) {
|
||||||
}
|
|
||||||
|
|
||||||
this.raise(node.argument.start, "Invalid rest operator's argument");
|
this.raise(node.argument.start, "Invalid rest operator's argument");
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
checkCommaAfterRest(close: TokenType, kind: string): void {
|
checkCommaAfterRest(close: TokenType, kind: string): void {
|
||||||
if (this.match(tt.comma)) {
|
if (this.match(tt.comma)) {
|
||||||
|
|||||||
@ -7,8 +7,6 @@ import type { Comment, Node as NodeType, NodeBase } from "../types";
|
|||||||
|
|
||||||
// Start an AST node, attaching a start offset.
|
// Start an AST node, attaching a start offset.
|
||||||
|
|
||||||
const commentKeys = ["leadingComments", "trailingComments", "innerComments"];
|
|
||||||
|
|
||||||
class Node implements NodeBase {
|
class Node implements NodeBase {
|
||||||
constructor(parser: Parser, pos: number, loc: Position) {
|
constructor(parser: Parser, pos: number, loc: Position) {
|
||||||
this.type = "";
|
this.type = "";
|
||||||
@ -31,16 +29,22 @@ class Node implements NodeBase {
|
|||||||
|
|
||||||
__clone(): this {
|
__clone(): this {
|
||||||
// $FlowIgnore
|
// $FlowIgnore
|
||||||
const node2: any = new Node();
|
const newNode: any = new Node();
|
||||||
Object.keys(this).forEach(key => {
|
const keys = Object.keys(this);
|
||||||
|
for (let i = 0, length = keys.length; i < length; i++) {
|
||||||
|
const key = keys[i];
|
||||||
// Do not clone comments that are already attached to the node
|
// Do not clone comments that are already attached to the node
|
||||||
if (commentKeys.indexOf(key) < 0) {
|
if (
|
||||||
|
key !== "leadingComments" &&
|
||||||
|
key !== "trailingComments" &&
|
||||||
|
key !== "innerComments"
|
||||||
|
) {
|
||||||
// $FlowIgnore
|
// $FlowIgnore
|
||||||
node2[key] = this[key];
|
newNode[key] = this[key];
|
||||||
|
}
|
||||||
}
|
}
|
||||||
});
|
|
||||||
|
|
||||||
return node2;
|
return newNode;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -44,7 +44,7 @@ export default class StatementParser extends ExpressionParser {
|
|||||||
const directiveLiteral = this.startNodeAt(expr.start, expr.loc.start);
|
const directiveLiteral = this.startNodeAt(expr.start, expr.loc.start);
|
||||||
const directive = this.startNodeAt(stmt.start, stmt.loc.start);
|
const directive = this.startNodeAt(stmt.start, stmt.loc.start);
|
||||||
|
|
||||||
const raw = this.input.slice(expr.start, expr.end);
|
const raw = this.state.input.slice(expr.start, expr.end);
|
||||||
const val = (directiveLiteral.value = raw.slice(1, -1)); // remove quotes
|
const val = (directiveLiteral.value = raw.slice(1, -1)); // remove quotes
|
||||||
|
|
||||||
this.addExtra(directiveLiteral, "raw", raw);
|
this.addExtra(directiveLiteral, "raw", raw);
|
||||||
@ -161,7 +161,7 @@ export default class StatementParser extends ExpressionParser {
|
|||||||
this.next();
|
this.next();
|
||||||
|
|
||||||
let result;
|
let result;
|
||||||
if (starttype == tt._import) {
|
if (starttype === tt._import) {
|
||||||
result = this.parseImport(node);
|
result = this.parseImport(node);
|
||||||
|
|
||||||
if (
|
if (
|
||||||
@ -551,7 +551,9 @@ export default class StatementParser extends ExpressionParser {
|
|||||||
parseThrowStatement(node: N.ThrowStatement): N.ThrowStatement {
|
parseThrowStatement(node: N.ThrowStatement): N.ThrowStatement {
|
||||||
this.next();
|
this.next();
|
||||||
if (
|
if (
|
||||||
lineBreak.test(this.input.slice(this.state.lastTokEnd, this.state.start))
|
lineBreak.test(
|
||||||
|
this.state.input.slice(this.state.lastTokEnd, this.state.start),
|
||||||
|
)
|
||||||
) {
|
) {
|
||||||
this.raise(this.state.lastTokEnd, "Illegal newline after throw");
|
this.raise(this.state.lastTokEnd, "Illegal newline after throw");
|
||||||
}
|
}
|
||||||
@ -691,9 +693,9 @@ export default class StatementParser extends ExpressionParser {
|
|||||||
node.body = this.parseStatement(declaration);
|
node.body = this.parseStatement(declaration);
|
||||||
|
|
||||||
if (
|
if (
|
||||||
node.body.type == "ClassDeclaration" ||
|
node.body.type === "ClassDeclaration" ||
|
||||||
(node.body.type == "VariableDeclaration" && node.body.kind !== "var") ||
|
(node.body.type === "VariableDeclaration" && node.body.kind !== "var") ||
|
||||||
(node.body.type == "FunctionDeclaration" &&
|
(node.body.type === "FunctionDeclaration" &&
|
||||||
(this.state.strict || node.body.generator || node.body.async))
|
(this.state.strict || node.body.generator || node.body.async))
|
||||||
) {
|
) {
|
||||||
this.raise(node.body.start, "Invalid labeled declaration");
|
this.raise(node.body.start, "Invalid labeled declaration");
|
||||||
@ -863,6 +865,7 @@ export default class StatementParser extends ExpressionParser {
|
|||||||
kind: TokenType,
|
kind: TokenType,
|
||||||
): N.VariableDeclaration {
|
): N.VariableDeclaration {
|
||||||
const declarations = (node.declarations = []);
|
const declarations = (node.declarations = []);
|
||||||
|
const isTypescript = this.hasPlugin("typescript");
|
||||||
// $FlowFixMe
|
// $FlowFixMe
|
||||||
node.kind = kind.keyword;
|
node.kind = kind.keyword;
|
||||||
for (;;) {
|
for (;;) {
|
||||||
@ -877,7 +880,7 @@ export default class StatementParser extends ExpressionParser {
|
|||||||
) {
|
) {
|
||||||
// `const` with no initializer is allowed in TypeScript.
|
// `const` with no initializer is allowed in TypeScript.
|
||||||
// It could be a declaration like `const x: number;`.
|
// It could be a declaration like `const x: number;`.
|
||||||
if (!this.hasPlugin("typescript")) {
|
if (!isTypescript) {
|
||||||
this.unexpected();
|
this.unexpected();
|
||||||
}
|
}
|
||||||
} else if (
|
} else if (
|
||||||
@ -1278,7 +1281,7 @@ export default class StatementParser extends ExpressionParser {
|
|||||||
} else if (
|
} else if (
|
||||||
isSimple &&
|
isSimple &&
|
||||||
(key.name === "get" || key.name === "set") &&
|
(key.name === "get" || key.name === "set") &&
|
||||||
!(this.isLineTerminator() && this.match(tt.star))
|
!(this.match(tt.star) && this.isLineTerminator())
|
||||||
) {
|
) {
|
||||||
// `get\n*` is an uninitialized property named 'get' followed by a generator.
|
// `get\n*` is an uninitialized property named 'get' followed by a generator.
|
||||||
// a getter or setter
|
// a getter or setter
|
||||||
@ -1521,7 +1524,7 @@ export default class StatementParser extends ExpressionParser {
|
|||||||
isAsyncFunction() {
|
isAsyncFunction() {
|
||||||
if (!this.isContextual("async")) return false;
|
if (!this.isContextual("async")) return false;
|
||||||
|
|
||||||
const { input, pos } = this.state;
|
const { input, pos, length } = this.state;
|
||||||
|
|
||||||
skipWhiteSpace.lastIndex = pos;
|
skipWhiteSpace.lastIndex = pos;
|
||||||
const skip = skipWhiteSpace.exec(input);
|
const skip = skipWhiteSpace.exec(input);
|
||||||
@ -1533,7 +1536,7 @@ export default class StatementParser extends ExpressionParser {
|
|||||||
return (
|
return (
|
||||||
!lineBreak.test(input.slice(pos, next)) &&
|
!lineBreak.test(input.slice(pos, next)) &&
|
||||||
input.slice(next, next + 8) === "function" &&
|
input.slice(next, next + 8) === "function" &&
|
||||||
(next + 8 === input.length || !isIdentifierChar(input.charAt(next + 8)))
|
(next + 8 === length || !isIdentifierChar(input.charCodeAt(next + 8)))
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -25,7 +25,7 @@ export default class UtilParser extends Tokenizer {
|
|||||||
|
|
||||||
isLookaheadRelational(op: "<" | ">"): boolean {
|
isLookaheadRelational(op: "<" | ">"): boolean {
|
||||||
const l = this.lookahead();
|
const l = this.lookahead();
|
||||||
return l.type == tt.relational && l.value == op;
|
return l.type === tt.relational && l.value === op;
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO
|
// TODO
|
||||||
@ -87,7 +87,7 @@ export default class UtilParser extends Tokenizer {
|
|||||||
|
|
||||||
hasPrecedingLineBreak(): boolean {
|
hasPrecedingLineBreak(): boolean {
|
||||||
return lineBreak.test(
|
return lineBreak.test(
|
||||||
this.input.slice(this.state.lastTokEnd, this.state.start),
|
this.state.input.slice(this.state.lastTokEnd, this.state.start),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -1124,7 +1124,7 @@ export default (superClass: Class<Parser>): Class<Parser> =>
|
|||||||
node.types = [];
|
node.types = [];
|
||||||
this.expect(tt.bracketL);
|
this.expect(tt.bracketL);
|
||||||
// We allow trailing commas
|
// We allow trailing commas
|
||||||
while (this.state.pos < this.input.length && !this.match(tt.bracketR)) {
|
while (this.state.pos < this.state.length && !this.match(tt.bracketR)) {
|
||||||
node.types.push(this.flowParseType());
|
node.types.push(this.flowParseType());
|
||||||
if (this.match(tt.bracketR)) break;
|
if (this.match(tt.bracketR)) break;
|
||||||
this.expect(tt.comma);
|
this.expect(tt.comma);
|
||||||
@ -1190,9 +1190,6 @@ export default (superClass: Class<Parser>): Class<Parser> =>
|
|||||||
case "any":
|
case "any":
|
||||||
return this.finishNode(node, "AnyTypeAnnotation");
|
return this.finishNode(node, "AnyTypeAnnotation");
|
||||||
|
|
||||||
case "void":
|
|
||||||
return this.finishNode(node, "VoidTypeAnnotation");
|
|
||||||
|
|
||||||
case "bool":
|
case "bool":
|
||||||
case "boolean":
|
case "boolean":
|
||||||
return this.finishNode(node, "BooleanTypeAnnotation");
|
return this.finishNode(node, "BooleanTypeAnnotation");
|
||||||
@ -1369,6 +1366,10 @@ export default (superClass: Class<Parser>): Class<Parser> =>
|
|||||||
"NumberLiteralTypeAnnotation",
|
"NumberLiteralTypeAnnotation",
|
||||||
);
|
);
|
||||||
|
|
||||||
|
case tt._void:
|
||||||
|
this.next();
|
||||||
|
return this.finishNode(node, "VoidTypeAnnotation");
|
||||||
|
|
||||||
case tt._null:
|
case tt._null:
|
||||||
this.next();
|
this.next();
|
||||||
return this.finishNode(node, "NullLiteralTypeAnnotation");
|
return this.finishNode(node, "NullLiteralTypeAnnotation");
|
||||||
@ -1398,7 +1399,7 @@ export default (superClass: Class<Parser>): Class<Parser> =>
|
|||||||
const startPos = this.state.start,
|
const startPos = this.state.start,
|
||||||
startLoc = this.state.startLoc;
|
startLoc = this.state.startLoc;
|
||||||
let type = this.flowParsePrimaryType();
|
let type = this.flowParsePrimaryType();
|
||||||
while (!this.canInsertSemicolon() && this.match(tt.bracketL)) {
|
while (this.match(tt.bracketL) && !this.canInsertSemicolon()) {
|
||||||
const node = this.startNodeAt(startPos, startLoc);
|
const node = this.startNodeAt(startPos, startLoc);
|
||||||
node.elementType = type;
|
node.elementType = type;
|
||||||
this.expect(tt.bracketL);
|
this.expect(tt.bracketL);
|
||||||
@ -1630,7 +1631,7 @@ export default (superClass: Class<Parser>): Class<Parser> =>
|
|||||||
this.match(tt.name) &&
|
this.match(tt.name) &&
|
||||||
(this.state.value === "type" ||
|
(this.state.value === "type" ||
|
||||||
this.state.value === "interface" ||
|
this.state.value === "interface" ||
|
||||||
this.state.value == "opaque")
|
this.state.value === "opaque")
|
||||||
) {
|
) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@ -1922,20 +1923,12 @@ export default (superClass: Class<Parser>): Class<Parser> =>
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// don't consider `void` to be a keyword as then it'll use the void token type
|
|
||||||
// and set startExpr
|
|
||||||
isKeyword(name: string): boolean {
|
|
||||||
if (this.state.inType && name === "void") {
|
|
||||||
return false;
|
|
||||||
} else {
|
|
||||||
return super.isKeyword(name);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ensure that inside flow types, we bypass the jsx parser plugin
|
// ensure that inside flow types, we bypass the jsx parser plugin
|
||||||
readToken(code: number): void {
|
getTokenFromCode(code: number): void {
|
||||||
const next = this.input.charCodeAt(this.state.pos + 1);
|
const next = this.state.input.charCodeAt(this.state.pos + 1);
|
||||||
if (
|
if (code === charCodes.leftCurlyBrace && next === charCodes.verticalBar) {
|
||||||
|
return this.finishOp(tt.braceBarL, 2);
|
||||||
|
} else if (
|
||||||
this.state.inType &&
|
this.state.inType &&
|
||||||
(code === charCodes.greaterThan || code === charCodes.lessThan)
|
(code === charCodes.greaterThan || code === charCodes.lessThan)
|
||||||
) {
|
) {
|
||||||
@ -1944,7 +1937,7 @@ export default (superClass: Class<Parser>): Class<Parser> =>
|
|||||||
this.state.isIterator = true;
|
this.state.isIterator = true;
|
||||||
return super.readWord();
|
return super.readWord();
|
||||||
} else {
|
} else {
|
||||||
return super.readToken(code);
|
return super.getTokenFromCode(code);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2686,7 +2679,7 @@ export default (superClass: Class<Parser>): Class<Parser> =>
|
|||||||
}
|
}
|
||||||
|
|
||||||
readToken_mult_modulo(code: number): void {
|
readToken_mult_modulo(code: number): void {
|
||||||
const next = this.input.charCodeAt(this.state.pos + 1);
|
const next = this.state.input.charCodeAt(this.state.pos + 1);
|
||||||
if (
|
if (
|
||||||
code === charCodes.asterisk &&
|
code === charCodes.asterisk &&
|
||||||
next === charCodes.slash &&
|
next === charCodes.slash &&
|
||||||
@ -2701,6 +2694,20 @@ export default (superClass: Class<Parser>): Class<Parser> =>
|
|||||||
super.readToken_mult_modulo(code);
|
super.readToken_mult_modulo(code);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
readToken_pipe_amp(code: number): void {
|
||||||
|
const next = this.state.input.charCodeAt(this.state.pos + 1);
|
||||||
|
if (
|
||||||
|
code === charCodes.verticalBar &&
|
||||||
|
next === charCodes.rightCurlyBrace
|
||||||
|
) {
|
||||||
|
// '|}'
|
||||||
|
this.finishOp(tt.braceBarR, 2);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
super.readToken_pipe_amp(code);
|
||||||
|
}
|
||||||
|
|
||||||
parseTopLevel(file: N.File, program: N.Program): N.File {
|
parseTopLevel(file: N.File, program: N.Program): N.File {
|
||||||
const fileNode = super.parseTopLevel(file, program);
|
const fileNode = super.parseTopLevel(file, program);
|
||||||
if (this.state.hasFlowComment) {
|
if (this.state.hasFlowComment) {
|
||||||
@ -2710,11 +2717,7 @@ export default (superClass: Class<Parser>): Class<Parser> =>
|
|||||||
}
|
}
|
||||||
|
|
||||||
skipBlockComment(): void {
|
skipBlockComment(): void {
|
||||||
if (
|
if (this.hasPlugin("flowComments") && this.skipFlowComment()) {
|
||||||
this.hasPlugin("flow") &&
|
|
||||||
this.hasPlugin("flowComments") &&
|
|
||||||
this.skipFlowComment()
|
|
||||||
) {
|
|
||||||
if (this.state.hasFlowComment) {
|
if (this.state.hasFlowComment) {
|
||||||
this.unexpected(
|
this.unexpected(
|
||||||
null,
|
null,
|
||||||
@ -2727,8 +2730,8 @@ export default (superClass: Class<Parser>): Class<Parser> =>
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this.hasPlugin("flow") && this.state.hasFlowComment) {
|
if (this.state.hasFlowComment) {
|
||||||
const end = this.input.indexOf("*-/", (this.state.pos += 2));
|
const end = this.state.input.indexOf("*-/", (this.state.pos += 2));
|
||||||
if (end === -1) this.raise(this.state.pos - 2, "Unterminated comment");
|
if (end === -1) this.raise(this.state.pos - 2, "Unterminated comment");
|
||||||
this.state.pos = end + 3;
|
this.state.pos = end + 3;
|
||||||
return;
|
return;
|
||||||
@ -2742,20 +2745,22 @@ export default (superClass: Class<Parser>): Class<Parser> =>
|
|||||||
let shiftToFirstNonWhiteSpace = 2;
|
let shiftToFirstNonWhiteSpace = 2;
|
||||||
while (
|
while (
|
||||||
[charCodes.space, charCodes.tab].includes(
|
[charCodes.space, charCodes.tab].includes(
|
||||||
this.input.charCodeAt(pos + shiftToFirstNonWhiteSpace),
|
this.state.input.charCodeAt(pos + shiftToFirstNonWhiteSpace),
|
||||||
)
|
)
|
||||||
) {
|
) {
|
||||||
shiftToFirstNonWhiteSpace++;
|
shiftToFirstNonWhiteSpace++;
|
||||||
}
|
}
|
||||||
|
|
||||||
const ch2 = this.input.charCodeAt(shiftToFirstNonWhiteSpace + pos);
|
const ch2 = this.state.input.charCodeAt(shiftToFirstNonWhiteSpace + pos);
|
||||||
const ch3 = this.input.charCodeAt(shiftToFirstNonWhiteSpace + pos + 1);
|
const ch3 = this.state.input.charCodeAt(
|
||||||
|
shiftToFirstNonWhiteSpace + pos + 1,
|
||||||
|
);
|
||||||
|
|
||||||
if (ch2 === charCodes.colon && ch3 === charCodes.colon) {
|
if (ch2 === charCodes.colon && ch3 === charCodes.colon) {
|
||||||
return shiftToFirstNonWhiteSpace + 2; // check for /*::
|
return shiftToFirstNonWhiteSpace + 2; // check for /*::
|
||||||
}
|
}
|
||||||
if (
|
if (
|
||||||
this.input.slice(
|
this.state.input.slice(
|
||||||
shiftToFirstNonWhiteSpace + pos,
|
shiftToFirstNonWhiteSpace + pos,
|
||||||
shiftToFirstNonWhiteSpace + pos + 12,
|
shiftToFirstNonWhiteSpace + pos + 12,
|
||||||
) === "flow-include"
|
) === "flow-include"
|
||||||
@ -2769,7 +2774,7 @@ export default (superClass: Class<Parser>): Class<Parser> =>
|
|||||||
}
|
}
|
||||||
|
|
||||||
hasFlowCommentCompletion(): void {
|
hasFlowCommentCompletion(): void {
|
||||||
const end = this.input.indexOf("*/", this.state.pos);
|
const end = this.state.input.indexOf("*/", this.state.pos);
|
||||||
if (end === -1) {
|
if (end === -1) {
|
||||||
this.raise(this.state.pos, "Unterminated comment");
|
this.raise(this.state.pos, "Unterminated comment");
|
||||||
}
|
}
|
||||||
|
|||||||
@ -79,11 +79,11 @@ export default (superClass: Class<Parser>): Class<Parser> =>
|
|||||||
let out = "";
|
let out = "";
|
||||||
let chunkStart = this.state.pos;
|
let chunkStart = this.state.pos;
|
||||||
for (;;) {
|
for (;;) {
|
||||||
if (this.state.pos >= this.input.length) {
|
if (this.state.pos >= this.state.length) {
|
||||||
this.raise(this.state.start, "Unterminated JSX contents");
|
this.raise(this.state.start, "Unterminated JSX contents");
|
||||||
}
|
}
|
||||||
|
|
||||||
const ch = this.input.charCodeAt(this.state.pos);
|
const ch = this.state.input.charCodeAt(this.state.pos);
|
||||||
|
|
||||||
switch (ch) {
|
switch (ch) {
|
||||||
case charCodes.lessThan:
|
case charCodes.lessThan:
|
||||||
@ -93,20 +93,20 @@ export default (superClass: Class<Parser>): Class<Parser> =>
|
|||||||
++this.state.pos;
|
++this.state.pos;
|
||||||
return this.finishToken(tt.jsxTagStart);
|
return this.finishToken(tt.jsxTagStart);
|
||||||
}
|
}
|
||||||
return this.getTokenFromCode(ch);
|
return super.getTokenFromCode(ch);
|
||||||
}
|
}
|
||||||
out += this.input.slice(chunkStart, this.state.pos);
|
out += this.state.input.slice(chunkStart, this.state.pos);
|
||||||
return this.finishToken(tt.jsxText, out);
|
return this.finishToken(tt.jsxText, out);
|
||||||
|
|
||||||
case charCodes.ampersand:
|
case charCodes.ampersand:
|
||||||
out += this.input.slice(chunkStart, this.state.pos);
|
out += this.state.input.slice(chunkStart, this.state.pos);
|
||||||
out += this.jsxReadEntity();
|
out += this.jsxReadEntity();
|
||||||
chunkStart = this.state.pos;
|
chunkStart = this.state.pos;
|
||||||
break;
|
break;
|
||||||
|
|
||||||
default:
|
default:
|
||||||
if (isNewLine(ch)) {
|
if (isNewLine(ch)) {
|
||||||
out += this.input.slice(chunkStart, this.state.pos);
|
out += this.state.input.slice(chunkStart, this.state.pos);
|
||||||
out += this.jsxReadNewLine(true);
|
out += this.jsxReadNewLine(true);
|
||||||
chunkStart = this.state.pos;
|
chunkStart = this.state.pos;
|
||||||
} else {
|
} else {
|
||||||
@ -117,12 +117,12 @@ export default (superClass: Class<Parser>): Class<Parser> =>
|
|||||||
}
|
}
|
||||||
|
|
||||||
jsxReadNewLine(normalizeCRLF: boolean): string {
|
jsxReadNewLine(normalizeCRLF: boolean): string {
|
||||||
const ch = this.input.charCodeAt(this.state.pos);
|
const ch = this.state.input.charCodeAt(this.state.pos);
|
||||||
let out;
|
let out;
|
||||||
++this.state.pos;
|
++this.state.pos;
|
||||||
if (
|
if (
|
||||||
ch === charCodes.carriageReturn &&
|
ch === charCodes.carriageReturn &&
|
||||||
this.input.charCodeAt(this.state.pos) === charCodes.lineFeed
|
this.state.input.charCodeAt(this.state.pos) === charCodes.lineFeed
|
||||||
) {
|
) {
|
||||||
++this.state.pos;
|
++this.state.pos;
|
||||||
out = normalizeCRLF ? "\n" : "\r\n";
|
out = normalizeCRLF ? "\n" : "\r\n";
|
||||||
@ -139,25 +139,25 @@ export default (superClass: Class<Parser>): Class<Parser> =>
|
|||||||
let out = "";
|
let out = "";
|
||||||
let chunkStart = ++this.state.pos;
|
let chunkStart = ++this.state.pos;
|
||||||
for (;;) {
|
for (;;) {
|
||||||
if (this.state.pos >= this.input.length) {
|
if (this.state.pos >= this.state.length) {
|
||||||
this.raise(this.state.start, "Unterminated string constant");
|
this.raise(this.state.start, "Unterminated string constant");
|
||||||
}
|
}
|
||||||
|
|
||||||
const ch = this.input.charCodeAt(this.state.pos);
|
const ch = this.state.input.charCodeAt(this.state.pos);
|
||||||
if (ch === quote) break;
|
if (ch === quote) break;
|
||||||
if (ch === charCodes.ampersand) {
|
if (ch === charCodes.ampersand) {
|
||||||
out += this.input.slice(chunkStart, this.state.pos);
|
out += this.state.input.slice(chunkStart, this.state.pos);
|
||||||
out += this.jsxReadEntity();
|
out += this.jsxReadEntity();
|
||||||
chunkStart = this.state.pos;
|
chunkStart = this.state.pos;
|
||||||
} else if (isNewLine(ch)) {
|
} else if (isNewLine(ch)) {
|
||||||
out += this.input.slice(chunkStart, this.state.pos);
|
out += this.state.input.slice(chunkStart, this.state.pos);
|
||||||
out += this.jsxReadNewLine(false);
|
out += this.jsxReadNewLine(false);
|
||||||
chunkStart = this.state.pos;
|
chunkStart = this.state.pos;
|
||||||
} else {
|
} else {
|
||||||
++this.state.pos;
|
++this.state.pos;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
out += this.input.slice(chunkStart, this.state.pos++);
|
out += this.state.input.slice(chunkStart, this.state.pos++);
|
||||||
return this.finishToken(tt.string, out);
|
return this.finishToken(tt.string, out);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -165,11 +165,11 @@ export default (superClass: Class<Parser>): Class<Parser> =>
|
|||||||
let str = "";
|
let str = "";
|
||||||
let count = 0;
|
let count = 0;
|
||||||
let entity;
|
let entity;
|
||||||
let ch = this.input[this.state.pos];
|
let ch = this.state.input[this.state.pos];
|
||||||
|
|
||||||
const startPos = ++this.state.pos;
|
const startPos = ++this.state.pos;
|
||||||
while (this.state.pos < this.input.length && count++ < 10) {
|
while (this.state.pos < this.state.length && count++ < 10) {
|
||||||
ch = this.input[this.state.pos++];
|
ch = this.state.input[this.state.pos++];
|
||||||
if (ch === ";") {
|
if (ch === ";") {
|
||||||
if (str[0] === "#") {
|
if (str[0] === "#") {
|
||||||
if (str[1] === "x") {
|
if (str[1] === "x") {
|
||||||
@ -208,11 +208,11 @@ export default (superClass: Class<Parser>): Class<Parser> =>
|
|||||||
let ch;
|
let ch;
|
||||||
const start = this.state.pos;
|
const start = this.state.pos;
|
||||||
do {
|
do {
|
||||||
ch = this.input.charCodeAt(++this.state.pos);
|
ch = this.state.input.charCodeAt(++this.state.pos);
|
||||||
} while (isIdentifierChar(ch) || ch === charCodes.dash);
|
} while (isIdentifierChar(ch) || ch === charCodes.dash);
|
||||||
return this.finishToken(
|
return this.finishToken(
|
||||||
tt.jsxName,
|
tt.jsxName,
|
||||||
this.input.slice(start, this.state.pos),
|
this.state.input.slice(start, this.state.pos),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -520,8 +520,8 @@ export default (superClass: Class<Parser>): Class<Parser> =>
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
readToken(code: number): void {
|
getTokenFromCode(code: number): void {
|
||||||
if (this.state.inPropertyName) return super.readToken(code);
|
if (this.state.inPropertyName) return super.getTokenFromCode(code);
|
||||||
|
|
||||||
const context = this.curContext();
|
const context = this.curContext();
|
||||||
|
|
||||||
@ -557,7 +557,7 @@ export default (superClass: Class<Parser>): Class<Parser> =>
|
|||||||
return this.finishToken(tt.jsxTagStart);
|
return this.finishToken(tt.jsxTagStart);
|
||||||
}
|
}
|
||||||
|
|
||||||
return super.readToken(code);
|
return super.getTokenFromCode(code);
|
||||||
}
|
}
|
||||||
|
|
||||||
updateContext(prevType: TokenType): void {
|
updateContext(prevType: TokenType): void {
|
||||||
|
|||||||
@ -1324,7 +1324,7 @@ export default (superClass: Class<Parser>): Class<Parser> =>
|
|||||||
}
|
}
|
||||||
|
|
||||||
tsCheckLineTerminatorAndMatch(tokenType: TokenType, next: boolean) {
|
tsCheckLineTerminatorAndMatch(tokenType: TokenType, next: boolean) {
|
||||||
return !this.isLineTerminator() && (next || this.match(tokenType));
|
return (next || this.match(tokenType)) && !this.isLineTerminator();
|
||||||
}
|
}
|
||||||
|
|
||||||
tsTryParseGenericAsyncArrowFunction(
|
tsTryParseGenericAsyncArrowFunction(
|
||||||
@ -2218,11 +2218,11 @@ export default (superClass: Class<Parser>): Class<Parser> =>
|
|||||||
}
|
}
|
||||||
|
|
||||||
// ensure that inside types, we bypass the jsx parser plugin
|
// ensure that inside types, we bypass the jsx parser plugin
|
||||||
readToken(code: number): void {
|
getTokenFromCode(code: number): void {
|
||||||
if (this.state.inType && (code === 62 || code === 60)) {
|
if (this.state.inType && (code === 62 || code === 60)) {
|
||||||
return this.finishOp(tt.relational, 1);
|
return this.finishOp(tt.relational, 1);
|
||||||
} else {
|
} else {
|
||||||
return super.readToken(code);
|
return super.getTokenFromCode(code);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -107,7 +107,9 @@ tt._function.updateContext = tt._class.updateContext = function(prevType) {
|
|||||||
prevType !== tt._else &&
|
prevType !== tt._else &&
|
||||||
!(
|
!(
|
||||||
prevType === tt._return &&
|
prevType === tt._return &&
|
||||||
lineBreak.test(this.input.slice(this.state.lastTokEnd, this.state.start))
|
lineBreak.test(
|
||||||
|
this.state.input.slice(this.state.lastTokEnd, this.state.start),
|
||||||
|
)
|
||||||
) &&
|
) &&
|
||||||
!(
|
!(
|
||||||
(prevType === tt.colon || prevType === tt.braceL) &&
|
(prevType === tt.colon || prevType === tt.braceL) &&
|
||||||
|
|||||||
@ -3,11 +3,7 @@
|
|||||||
import type { Options } from "../options";
|
import type { Options } from "../options";
|
||||||
import type { Position } from "../util/location";
|
import type { Position } from "../util/location";
|
||||||
import * as charCodes from "charcodes";
|
import * as charCodes from "charcodes";
|
||||||
import {
|
import { isIdentifierStart, isIdentifierChar } from "../util/identifier";
|
||||||
isIdentifierStart,
|
|
||||||
isIdentifierChar,
|
|
||||||
isKeyword,
|
|
||||||
} from "../util/identifier";
|
|
||||||
import { types as tt, keywords as keywordTypes, type TokenType } from "./types";
|
import { types as tt, keywords as keywordTypes, type TokenType } from "./types";
|
||||||
import { type TokContext, types as ct } from "./context";
|
import { type TokContext, types as ct } from "./context";
|
||||||
import LocationParser from "../parser/location";
|
import LocationParser from "../parser/location";
|
||||||
@ -20,7 +16,7 @@ import {
|
|||||||
} from "../util/whitespace";
|
} from "../util/whitespace";
|
||||||
import State from "./state";
|
import State from "./state";
|
||||||
|
|
||||||
const VALID_REGEX_FLAGS = "gmsiyu";
|
const VALID_REGEX_FLAGS = new Set(["g", "m", "s", "i", "y", "u"]);
|
||||||
|
|
||||||
// The following character codes are forbidden from being
|
// The following character codes are forbidden from being
|
||||||
// an immediate sibling of NumericLiteralSeparator _
|
// an immediate sibling of NumericLiteralSeparator _
|
||||||
@ -157,12 +153,6 @@ export default class Tokenizer extends LocationParser {
|
|||||||
|
|
||||||
// TODO
|
// TODO
|
||||||
|
|
||||||
isKeyword(word: string): boolean {
|
|
||||||
return isKeyword(word);
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO
|
|
||||||
|
|
||||||
lookahead(): State {
|
lookahead(): State {
|
||||||
const old = this.state;
|
const old = this.state;
|
||||||
this.state = old.clone(true);
|
this.state = old.clone(true);
|
||||||
@ -185,7 +175,7 @@ export default class Tokenizer extends LocationParser {
|
|||||||
this.state.pos = this.state.start;
|
this.state.pos = this.state.start;
|
||||||
while (this.state.pos < this.state.lineStart) {
|
while (this.state.pos < this.state.lineStart) {
|
||||||
this.state.lineStart =
|
this.state.lineStart =
|
||||||
this.input.lastIndexOf("\n", this.state.lineStart - 2) + 1;
|
this.state.input.lastIndexOf("\n", this.state.lineStart - 2) + 1;
|
||||||
--this.state.curLine;
|
--this.state.curLine;
|
||||||
}
|
}
|
||||||
this.nextToken();
|
this.nextToken();
|
||||||
@ -206,7 +196,7 @@ export default class Tokenizer extends LocationParser {
|
|||||||
this.state.octalPosition = null;
|
this.state.octalPosition = null;
|
||||||
this.state.start = this.state.pos;
|
this.state.start = this.state.pos;
|
||||||
this.state.startLoc = this.state.curPosition();
|
this.state.startLoc = this.state.curPosition();
|
||||||
if (this.state.pos >= this.input.length) {
|
if (this.state.pos >= this.state.length) {
|
||||||
this.finishToken(tt.eof);
|
this.finishToken(tt.eof);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@ -214,17 +204,7 @@ export default class Tokenizer extends LocationParser {
|
|||||||
if (curContext.override) {
|
if (curContext.override) {
|
||||||
curContext.override(this);
|
curContext.override(this);
|
||||||
} else {
|
} else {
|
||||||
this.readToken(this.input.codePointAt(this.state.pos));
|
this.getTokenFromCode(this.state.input.codePointAt(this.state.pos));
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
readToken(code: number): void {
|
|
||||||
// Identifier or keyword. '\uXXXX' sequences are allowed in
|
|
||||||
// identifiers, so '\' also dispatches to that.
|
|
||||||
if (isIdentifierStart(code) || code === charCodes.backslash) {
|
|
||||||
this.readWord();
|
|
||||||
} else {
|
|
||||||
this.getTokenFromCode(code);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -254,14 +234,14 @@ export default class Tokenizer extends LocationParser {
|
|||||||
skipBlockComment(): void {
|
skipBlockComment(): void {
|
||||||
const startLoc = this.state.curPosition();
|
const startLoc = this.state.curPosition();
|
||||||
const start = this.state.pos;
|
const start = this.state.pos;
|
||||||
const end = this.input.indexOf("*/", (this.state.pos += 2));
|
const end = this.state.input.indexOf("*/", (this.state.pos += 2));
|
||||||
if (end === -1) this.raise(this.state.pos - 2, "Unterminated comment");
|
if (end === -1) this.raise(this.state.pos - 2, "Unterminated comment");
|
||||||
|
|
||||||
this.state.pos = end + 2;
|
this.state.pos = end + 2;
|
||||||
lineBreakG.lastIndex = start;
|
lineBreakG.lastIndex = start;
|
||||||
let match;
|
let match;
|
||||||
while (
|
while (
|
||||||
(match = lineBreakG.exec(this.input)) &&
|
(match = lineBreakG.exec(this.state.input)) &&
|
||||||
match.index < this.state.pos
|
match.index < this.state.pos
|
||||||
) {
|
) {
|
||||||
++this.state.curLine;
|
++this.state.curLine;
|
||||||
@ -270,7 +250,7 @@ export default class Tokenizer extends LocationParser {
|
|||||||
|
|
||||||
this.pushComment(
|
this.pushComment(
|
||||||
true,
|
true,
|
||||||
this.input.slice(start + 2, end),
|
this.state.input.slice(start + 2, end),
|
||||||
start,
|
start,
|
||||||
this.state.pos,
|
this.state.pos,
|
||||||
startLoc,
|
startLoc,
|
||||||
@ -281,22 +261,22 @@ export default class Tokenizer extends LocationParser {
|
|||||||
skipLineComment(startSkip: number): void {
|
skipLineComment(startSkip: number): void {
|
||||||
const start = this.state.pos;
|
const start = this.state.pos;
|
||||||
const startLoc = this.state.curPosition();
|
const startLoc = this.state.curPosition();
|
||||||
let ch = this.input.charCodeAt((this.state.pos += startSkip));
|
let ch = this.state.input.charCodeAt((this.state.pos += startSkip));
|
||||||
if (this.state.pos < this.input.length) {
|
if (this.state.pos < this.state.length) {
|
||||||
while (
|
while (
|
||||||
ch !== charCodes.lineFeed &&
|
ch !== charCodes.lineFeed &&
|
||||||
ch !== charCodes.carriageReturn &&
|
ch !== charCodes.carriageReturn &&
|
||||||
ch !== charCodes.lineSeparator &&
|
ch !== charCodes.lineSeparator &&
|
||||||
ch !== charCodes.paragraphSeparator &&
|
ch !== charCodes.paragraphSeparator &&
|
||||||
++this.state.pos < this.input.length
|
++this.state.pos < this.state.length
|
||||||
) {
|
) {
|
||||||
ch = this.input.charCodeAt(this.state.pos);
|
ch = this.state.input.charCodeAt(this.state.pos);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
this.pushComment(
|
this.pushComment(
|
||||||
false,
|
false,
|
||||||
this.input.slice(start + startSkip, this.state.pos),
|
this.state.input.slice(start + startSkip, this.state.pos),
|
||||||
start,
|
start,
|
||||||
this.state.pos,
|
this.state.pos,
|
||||||
startLoc,
|
startLoc,
|
||||||
@ -308,12 +288,18 @@ export default class Tokenizer extends LocationParser {
|
|||||||
// whitespace and comments, and.
|
// whitespace and comments, and.
|
||||||
|
|
||||||
skipSpace(): void {
|
skipSpace(): void {
|
||||||
loop: while (this.state.pos < this.input.length) {
|
loop: while (this.state.pos < this.state.length) {
|
||||||
const ch = this.input.charCodeAt(this.state.pos);
|
const ch = this.state.input.charCodeAt(this.state.pos);
|
||||||
switch (ch) {
|
switch (ch) {
|
||||||
|
case charCodes.space:
|
||||||
|
case charCodes.nonBreakingSpace:
|
||||||
|
case charCodes.tab:
|
||||||
|
++this.state.pos;
|
||||||
|
break;
|
||||||
case charCodes.carriageReturn:
|
case charCodes.carriageReturn:
|
||||||
if (
|
if (
|
||||||
this.input.charCodeAt(this.state.pos + 1) === charCodes.lineFeed
|
this.state.input.charCodeAt(this.state.pos + 1) ===
|
||||||
|
charCodes.lineFeed
|
||||||
) {
|
) {
|
||||||
++this.state.pos;
|
++this.state.pos;
|
||||||
}
|
}
|
||||||
@ -327,7 +313,7 @@ export default class Tokenizer extends LocationParser {
|
|||||||
break;
|
break;
|
||||||
|
|
||||||
case charCodes.slash:
|
case charCodes.slash:
|
||||||
switch (this.input.charCodeAt(this.state.pos + 1)) {
|
switch (this.state.input.charCodeAt(this.state.pos + 1)) {
|
||||||
case charCodes.asterisk:
|
case charCodes.asterisk:
|
||||||
this.skipBlockComment();
|
this.skipBlockComment();
|
||||||
break;
|
break;
|
||||||
@ -382,7 +368,7 @@ export default class Tokenizer extends LocationParser {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const nextPos = this.state.pos + 1;
|
const nextPos = this.state.pos + 1;
|
||||||
const next = this.input.charCodeAt(nextPos);
|
const next = this.state.input.charCodeAt(nextPos);
|
||||||
if (next >= charCodes.digit0 && next <= charCodes.digit9) {
|
if (next >= charCodes.digit0 && next <= charCodes.digit9) {
|
||||||
this.raise(this.state.pos, "Unexpected digit after hash token");
|
this.raise(this.state.pos, "Unexpected digit after hash token");
|
||||||
}
|
}
|
||||||
@ -405,13 +391,13 @@ export default class Tokenizer extends LocationParser {
|
|||||||
}
|
}
|
||||||
|
|
||||||
readToken_dot(): void {
|
readToken_dot(): void {
|
||||||
const next = this.input.charCodeAt(this.state.pos + 1);
|
const next = this.state.input.charCodeAt(this.state.pos + 1);
|
||||||
if (next >= charCodes.digit0 && next <= charCodes.digit9) {
|
if (next >= charCodes.digit0 && next <= charCodes.digit9) {
|
||||||
this.readNumber(true);
|
this.readNumber(true);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const next2 = this.input.charCodeAt(this.state.pos + 2);
|
const next2 = this.state.input.charCodeAt(this.state.pos + 2);
|
||||||
if (next === charCodes.dot && next2 === charCodes.dot) {
|
if (next === charCodes.dot && next2 === charCodes.dot) {
|
||||||
this.state.pos += 3;
|
this.state.pos += 3;
|
||||||
this.finishToken(tt.ellipsis);
|
this.finishToken(tt.ellipsis);
|
||||||
@ -429,7 +415,7 @@ export default class Tokenizer extends LocationParser {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const next = this.input.charCodeAt(this.state.pos + 1);
|
const next = this.state.input.charCodeAt(this.state.pos + 1);
|
||||||
if (next === charCodes.equalsTo) {
|
if (next === charCodes.equalsTo) {
|
||||||
this.finishOp(tt.assign, 2);
|
this.finishOp(tt.assign, 2);
|
||||||
} else {
|
} else {
|
||||||
@ -438,12 +424,12 @@ export default class Tokenizer extends LocationParser {
|
|||||||
}
|
}
|
||||||
|
|
||||||
readToken_interpreter(): boolean {
|
readToken_interpreter(): boolean {
|
||||||
if (this.state.pos !== 0 || this.state.input.length < 2) return false;
|
if (this.state.pos !== 0 || this.state.length < 2) return false;
|
||||||
|
|
||||||
const start = this.state.pos;
|
const start = this.state.pos;
|
||||||
this.state.pos += 1;
|
this.state.pos += 1;
|
||||||
|
|
||||||
let ch = this.input.charCodeAt(this.state.pos);
|
let ch = this.state.input.charCodeAt(this.state.pos);
|
||||||
if (ch !== charCodes.exclamationMark) return false;
|
if (ch !== charCodes.exclamationMark) return false;
|
||||||
|
|
||||||
while (
|
while (
|
||||||
@ -451,12 +437,12 @@ export default class Tokenizer extends LocationParser {
|
|||||||
ch !== charCodes.carriageReturn &&
|
ch !== charCodes.carriageReturn &&
|
||||||
ch !== charCodes.lineSeparator &&
|
ch !== charCodes.lineSeparator &&
|
||||||
ch !== charCodes.paragraphSeparator &&
|
ch !== charCodes.paragraphSeparator &&
|
||||||
++this.state.pos < this.input.length
|
++this.state.pos < this.state.length
|
||||||
) {
|
) {
|
||||||
ch = this.input.charCodeAt(this.state.pos);
|
ch = this.state.input.charCodeAt(this.state.pos);
|
||||||
}
|
}
|
||||||
|
|
||||||
const value = this.input.slice(start + 2, this.state.pos);
|
const value = this.state.input.slice(start + 2, this.state.pos);
|
||||||
|
|
||||||
this.finishToken(tt.interpreterDirective, value);
|
this.finishToken(tt.interpreterDirective, value);
|
||||||
|
|
||||||
@ -467,13 +453,13 @@ export default class Tokenizer extends LocationParser {
|
|||||||
// '%*'
|
// '%*'
|
||||||
let type = code === charCodes.asterisk ? tt.star : tt.modulo;
|
let type = code === charCodes.asterisk ? tt.star : tt.modulo;
|
||||||
let width = 1;
|
let width = 1;
|
||||||
let next = this.input.charCodeAt(this.state.pos + 1);
|
let next = this.state.input.charCodeAt(this.state.pos + 1);
|
||||||
const exprAllowed = this.state.exprAllowed;
|
const exprAllowed = this.state.exprAllowed;
|
||||||
|
|
||||||
// Exponentiation operator **
|
// Exponentiation operator **
|
||||||
if (code === charCodes.asterisk && next === charCodes.asterisk) {
|
if (code === charCodes.asterisk && next === charCodes.asterisk) {
|
||||||
width++;
|
width++;
|
||||||
next = this.input.charCodeAt(this.state.pos + 2);
|
next = this.state.input.charCodeAt(this.state.pos + 2);
|
||||||
type = tt.exponent;
|
type = tt.exponent;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -486,11 +472,13 @@ export default class Tokenizer extends LocationParser {
|
|||||||
}
|
}
|
||||||
|
|
||||||
readToken_pipe_amp(code: number): void {
|
readToken_pipe_amp(code: number): void {
|
||||||
// '|&'
|
// '||' '&&' '||=' '&&='
|
||||||
const next = this.input.charCodeAt(this.state.pos + 1);
|
const next = this.state.input.charCodeAt(this.state.pos + 1);
|
||||||
|
|
||||||
if (next === code) {
|
if (next === code) {
|
||||||
if (this.input.charCodeAt(this.state.pos + 2) === charCodes.equalsTo) {
|
if (
|
||||||
|
this.state.input.charCodeAt(this.state.pos + 2) === charCodes.equalsTo
|
||||||
|
) {
|
||||||
this.finishOp(tt.assign, 3);
|
this.finishOp(tt.assign, 3);
|
||||||
} else {
|
} else {
|
||||||
this.finishOp(
|
this.finishOp(
|
||||||
@ -506,10 +494,6 @@ export default class Tokenizer extends LocationParser {
|
|||||||
if (next === charCodes.greaterThan) {
|
if (next === charCodes.greaterThan) {
|
||||||
this.finishOp(tt.pipeline, 2);
|
this.finishOp(tt.pipeline, 2);
|
||||||
return;
|
return;
|
||||||
} else if (next === charCodes.rightCurlyBrace && this.hasPlugin("flow")) {
|
|
||||||
// '|}'
|
|
||||||
this.finishOp(tt.braceBarR, 2);
|
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -526,7 +510,7 @@ export default class Tokenizer extends LocationParser {
|
|||||||
|
|
||||||
readToken_caret(): void {
|
readToken_caret(): void {
|
||||||
// '^'
|
// '^'
|
||||||
const next = this.input.charCodeAt(this.state.pos + 1);
|
const next = this.state.input.charCodeAt(this.state.pos + 1);
|
||||||
if (next === charCodes.equalsTo) {
|
if (next === charCodes.equalsTo) {
|
||||||
this.finishOp(tt.assign, 2);
|
this.finishOp(tt.assign, 2);
|
||||||
} else {
|
} else {
|
||||||
@ -536,14 +520,17 @@ export default class Tokenizer extends LocationParser {
|
|||||||
|
|
||||||
readToken_plus_min(code: number): void {
|
readToken_plus_min(code: number): void {
|
||||||
// '+-'
|
// '+-'
|
||||||
const next = this.input.charCodeAt(this.state.pos + 1);
|
const next = this.state.input.charCodeAt(this.state.pos + 1);
|
||||||
|
|
||||||
if (next === code) {
|
if (next === code) {
|
||||||
if (
|
if (
|
||||||
next === charCodes.dash &&
|
next === charCodes.dash &&
|
||||||
!this.inModule &&
|
!this.inModule &&
|
||||||
this.input.charCodeAt(this.state.pos + 2) === charCodes.greaterThan &&
|
this.state.input.charCodeAt(this.state.pos + 2) ===
|
||||||
lineBreak.test(this.input.slice(this.state.lastTokEnd, this.state.pos))
|
charCodes.greaterThan &&
|
||||||
|
lineBreak.test(
|
||||||
|
this.state.input.slice(this.state.lastTokEnd, this.state.pos),
|
||||||
|
)
|
||||||
) {
|
) {
|
||||||
// A `-->` line comment
|
// A `-->` line comment
|
||||||
this.skipLineComment(3);
|
this.skipLineComment(3);
|
||||||
@ -564,16 +551,20 @@ export default class Tokenizer extends LocationParser {
|
|||||||
|
|
||||||
readToken_lt_gt(code: number): void {
|
readToken_lt_gt(code: number): void {
|
||||||
// '<>'
|
// '<>'
|
||||||
const next = this.input.charCodeAt(this.state.pos + 1);
|
const next = this.state.input.charCodeAt(this.state.pos + 1);
|
||||||
let size = 1;
|
let size = 1;
|
||||||
|
|
||||||
if (next === code) {
|
if (next === code) {
|
||||||
size =
|
size =
|
||||||
code === charCodes.greaterThan &&
|
code === charCodes.greaterThan &&
|
||||||
this.input.charCodeAt(this.state.pos + 2) === charCodes.greaterThan
|
this.state.input.charCodeAt(this.state.pos + 2) ===
|
||||||
|
charCodes.greaterThan
|
||||||
? 3
|
? 3
|
||||||
: 2;
|
: 2;
|
||||||
if (this.input.charCodeAt(this.state.pos + size) === charCodes.equalsTo) {
|
if (
|
||||||
|
this.state.input.charCodeAt(this.state.pos + size) ===
|
||||||
|
charCodes.equalsTo
|
||||||
|
) {
|
||||||
this.finishOp(tt.assign, size + 1);
|
this.finishOp(tt.assign, size + 1);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@ -585,8 +576,8 @@ export default class Tokenizer extends LocationParser {
|
|||||||
next === charCodes.exclamationMark &&
|
next === charCodes.exclamationMark &&
|
||||||
code === charCodes.lessThan &&
|
code === charCodes.lessThan &&
|
||||||
!this.inModule &&
|
!this.inModule &&
|
||||||
this.input.charCodeAt(this.state.pos + 2) === charCodes.dash &&
|
this.state.input.charCodeAt(this.state.pos + 2) === charCodes.dash &&
|
||||||
this.input.charCodeAt(this.state.pos + 3) === charCodes.dash
|
this.state.input.charCodeAt(this.state.pos + 3) === charCodes.dash
|
||||||
) {
|
) {
|
||||||
// `<!--`, an XML-style comment that should be interpreted as a line comment
|
// `<!--`, an XML-style comment that should be interpreted as a line comment
|
||||||
this.skipLineComment(4);
|
this.skipLineComment(4);
|
||||||
@ -605,11 +596,11 @@ export default class Tokenizer extends LocationParser {
|
|||||||
|
|
||||||
readToken_eq_excl(code: number): void {
|
readToken_eq_excl(code: number): void {
|
||||||
// '=!'
|
// '=!'
|
||||||
const next = this.input.charCodeAt(this.state.pos + 1);
|
const next = this.state.input.charCodeAt(this.state.pos + 1);
|
||||||
if (next === charCodes.equalsTo) {
|
if (next === charCodes.equalsTo) {
|
||||||
this.finishOp(
|
this.finishOp(
|
||||||
tt.equality,
|
tt.equality,
|
||||||
this.input.charCodeAt(this.state.pos + 2) === charCodes.equalsTo
|
this.state.input.charCodeAt(this.state.pos + 2) === charCodes.equalsTo
|
||||||
? 3
|
? 3
|
||||||
: 2,
|
: 2,
|
||||||
);
|
);
|
||||||
@ -626,8 +617,8 @@ export default class Tokenizer extends LocationParser {
|
|||||||
|
|
||||||
readToken_question(): void {
|
readToken_question(): void {
|
||||||
// '?'
|
// '?'
|
||||||
const next = this.input.charCodeAt(this.state.pos + 1);
|
const next = this.state.input.charCodeAt(this.state.pos + 1);
|
||||||
const next2 = this.input.charCodeAt(this.state.pos + 2);
|
const next2 = this.state.input.charCodeAt(this.state.pos + 2);
|
||||||
if (next === charCodes.questionMark && !this.state.inType) {
|
if (next === charCodes.questionMark && !this.state.inType) {
|
||||||
if (next2 === charCodes.equalsTo) {
|
if (next2 === charCodes.equalsTo) {
|
||||||
// '??='
|
// '??='
|
||||||
@ -651,10 +642,6 @@ export default class Tokenizer extends LocationParser {
|
|||||||
|
|
||||||
getTokenFromCode(code: number): void {
|
getTokenFromCode(code: number): void {
|
||||||
switch (code) {
|
switch (code) {
|
||||||
case charCodes.numberSign:
|
|
||||||
this.readToken_numberSign();
|
|
||||||
return;
|
|
||||||
|
|
||||||
// The interpretation of a dot depends on whether it is followed
|
// The interpretation of a dot depends on whether it is followed
|
||||||
// by a digit or another two dots.
|
// by a digit or another two dots.
|
||||||
|
|
||||||
@ -687,19 +674,10 @@ export default class Tokenizer extends LocationParser {
|
|||||||
++this.state.pos;
|
++this.state.pos;
|
||||||
this.finishToken(tt.bracketR);
|
this.finishToken(tt.bracketR);
|
||||||
return;
|
return;
|
||||||
|
|
||||||
case charCodes.leftCurlyBrace:
|
case charCodes.leftCurlyBrace:
|
||||||
if (
|
|
||||||
this.hasPlugin("flow") &&
|
|
||||||
this.input.charCodeAt(this.state.pos + 1) === charCodes.verticalBar
|
|
||||||
) {
|
|
||||||
this.finishOp(tt.braceBarL, 2);
|
|
||||||
} else {
|
|
||||||
++this.state.pos;
|
++this.state.pos;
|
||||||
this.finishToken(tt.braceL);
|
this.finishToken(tt.braceL);
|
||||||
}
|
|
||||||
return;
|
return;
|
||||||
|
|
||||||
case charCodes.rightCurlyBrace:
|
case charCodes.rightCurlyBrace:
|
||||||
++this.state.pos;
|
++this.state.pos;
|
||||||
this.finishToken(tt.braceR);
|
this.finishToken(tt.braceR);
|
||||||
@ -708,7 +686,7 @@ export default class Tokenizer extends LocationParser {
|
|||||||
case charCodes.colon:
|
case charCodes.colon:
|
||||||
if (
|
if (
|
||||||
this.hasPlugin("functionBind") &&
|
this.hasPlugin("functionBind") &&
|
||||||
this.input.charCodeAt(this.state.pos + 1) === charCodes.colon
|
this.state.input.charCodeAt(this.state.pos + 1) === charCodes.colon
|
||||||
) {
|
) {
|
||||||
this.finishOp(tt.doubleColon, 2);
|
this.finishOp(tt.doubleColon, 2);
|
||||||
} else {
|
} else {
|
||||||
@ -720,10 +698,6 @@ export default class Tokenizer extends LocationParser {
|
|||||||
case charCodes.questionMark:
|
case charCodes.questionMark:
|
||||||
this.readToken_question();
|
this.readToken_question();
|
||||||
return;
|
return;
|
||||||
case charCodes.atSign:
|
|
||||||
++this.state.pos;
|
|
||||||
this.finishToken(tt.at);
|
|
||||||
return;
|
|
||||||
|
|
||||||
case charCodes.graveAccent:
|
case charCodes.graveAccent:
|
||||||
++this.state.pos;
|
++this.state.pos;
|
||||||
@ -731,7 +705,7 @@ export default class Tokenizer extends LocationParser {
|
|||||||
return;
|
return;
|
||||||
|
|
||||||
case charCodes.digit0: {
|
case charCodes.digit0: {
|
||||||
const next = this.input.charCodeAt(this.state.pos + 1);
|
const next = this.state.input.charCodeAt(this.state.pos + 1);
|
||||||
// '0x', '0X' - hex number
|
// '0x', '0X' - hex number
|
||||||
if (next === charCodes.lowercaseX || next === charCodes.uppercaseX) {
|
if (next === charCodes.lowercaseX || next === charCodes.uppercaseX) {
|
||||||
this.readRadixNumber(16);
|
this.readRadixNumber(16);
|
||||||
@ -809,6 +783,25 @@ export default class Tokenizer extends LocationParser {
|
|||||||
case charCodes.tilde:
|
case charCodes.tilde:
|
||||||
this.finishOp(tt.tilde, 1);
|
this.finishOp(tt.tilde, 1);
|
||||||
return;
|
return;
|
||||||
|
|
||||||
|
case charCodes.atSign:
|
||||||
|
++this.state.pos;
|
||||||
|
this.finishToken(tt.at);
|
||||||
|
return;
|
||||||
|
|
||||||
|
case charCodes.numberSign:
|
||||||
|
this.readToken_numberSign();
|
||||||
|
return;
|
||||||
|
|
||||||
|
case charCodes.backslash:
|
||||||
|
this.readWord();
|
||||||
|
return;
|
||||||
|
|
||||||
|
default:
|
||||||
|
if (isIdentifierStart(code)) {
|
||||||
|
this.readWord();
|
||||||
|
return;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
this.raise(
|
this.raise(
|
||||||
@ -818,7 +811,7 @@ export default class Tokenizer extends LocationParser {
|
|||||||
}
|
}
|
||||||
|
|
||||||
finishOp(type: TokenType, size: number): void {
|
finishOp(type: TokenType, size: number): void {
|
||||||
const str = this.input.slice(this.state.pos, this.state.pos + size);
|
const str = this.state.input.slice(this.state.pos, this.state.pos + size);
|
||||||
this.state.pos += size;
|
this.state.pos += size;
|
||||||
this.finishToken(type, str);
|
this.finishToken(type, str);
|
||||||
}
|
}
|
||||||
@ -827,10 +820,10 @@ export default class Tokenizer extends LocationParser {
|
|||||||
const start = this.state.pos;
|
const start = this.state.pos;
|
||||||
let escaped, inClass;
|
let escaped, inClass;
|
||||||
for (;;) {
|
for (;;) {
|
||||||
if (this.state.pos >= this.input.length) {
|
if (this.state.pos >= this.state.length) {
|
||||||
this.raise(start, "Unterminated regular expression");
|
this.raise(start, "Unterminated regular expression");
|
||||||
}
|
}
|
||||||
const ch = this.input.charAt(this.state.pos);
|
const ch = this.state.input.charAt(this.state.pos);
|
||||||
if (lineBreak.test(ch)) {
|
if (lineBreak.test(ch)) {
|
||||||
this.raise(start, "Unterminated regular expression");
|
this.raise(start, "Unterminated regular expression");
|
||||||
}
|
}
|
||||||
@ -848,16 +841,16 @@ export default class Tokenizer extends LocationParser {
|
|||||||
}
|
}
|
||||||
++this.state.pos;
|
++this.state.pos;
|
||||||
}
|
}
|
||||||
const content = this.input.slice(start, this.state.pos);
|
const content = this.state.input.slice(start, this.state.pos);
|
||||||
++this.state.pos;
|
++this.state.pos;
|
||||||
|
|
||||||
let mods = "";
|
let mods = "";
|
||||||
|
|
||||||
while (this.state.pos < this.input.length) {
|
while (this.state.pos < this.state.length) {
|
||||||
const char = this.input[this.state.pos];
|
const char = this.state.input[this.state.pos];
|
||||||
const charCode = this.input.codePointAt(this.state.pos);
|
const charCode = this.state.input.codePointAt(this.state.pos);
|
||||||
|
|
||||||
if (VALID_REGEX_FLAGS.indexOf(char) > -1) {
|
if (VALID_REGEX_FLAGS.has(char)) {
|
||||||
if (mods.indexOf(char) > -1) {
|
if (mods.indexOf(char) > -1) {
|
||||||
this.raise(this.state.pos + 1, "Duplicate regular expression flag");
|
this.raise(this.state.pos + 1, "Duplicate regular expression flag");
|
||||||
}
|
}
|
||||||
@ -902,12 +895,12 @@ export default class Tokenizer extends LocationParser {
|
|||||||
let total = 0;
|
let total = 0;
|
||||||
|
|
||||||
for (let i = 0, e = len == null ? Infinity : len; i < e; ++i) {
|
for (let i = 0, e = len == null ? Infinity : len; i < e; ++i) {
|
||||||
const code = this.input.charCodeAt(this.state.pos);
|
const code = this.state.input.charCodeAt(this.state.pos);
|
||||||
let val;
|
let val;
|
||||||
|
|
||||||
if (this.hasPlugin("numericSeparator")) {
|
if (this.hasPlugin("numericSeparator")) {
|
||||||
const prev = this.input.charCodeAt(this.state.pos - 1);
|
const prev = this.state.input.charCodeAt(this.state.pos - 1);
|
||||||
const next = this.input.charCodeAt(this.state.pos + 1);
|
const next = this.state.input.charCodeAt(this.state.pos + 1);
|
||||||
if (code === charCodes.underscore) {
|
if (code === charCodes.underscore) {
|
||||||
if (allowedSiblings.indexOf(next) === -1) {
|
if (allowedSiblings.indexOf(next) === -1) {
|
||||||
this.raise(this.state.pos, "Invalid or unexpected token");
|
this.raise(this.state.pos, "Invalid or unexpected token");
|
||||||
@ -961,18 +954,22 @@ export default class Tokenizer extends LocationParser {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (this.hasPlugin("bigInt")) {
|
if (this.hasPlugin("bigInt")) {
|
||||||
if (this.input.charCodeAt(this.state.pos) === charCodes.lowercaseN) {
|
if (
|
||||||
|
this.state.input.charCodeAt(this.state.pos) === charCodes.lowercaseN
|
||||||
|
) {
|
||||||
++this.state.pos;
|
++this.state.pos;
|
||||||
isBigInt = true;
|
isBigInt = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isIdentifierStart(this.input.codePointAt(this.state.pos))) {
|
if (isIdentifierStart(this.state.input.codePointAt(this.state.pos))) {
|
||||||
this.raise(this.state.pos, "Identifier directly after number");
|
this.raise(this.state.pos, "Identifier directly after number");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isBigInt) {
|
if (isBigInt) {
|
||||||
const str = this.input.slice(start, this.state.pos).replace(/[_n]/g, "");
|
const str = this.state.input
|
||||||
|
.slice(start, this.state.pos)
|
||||||
|
.replace(/[_n]/g, "");
|
||||||
this.finishToken(tt.bigint, str);
|
this.finishToken(tt.bigint, str);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@ -992,7 +989,7 @@ export default class Tokenizer extends LocationParser {
|
|||||||
}
|
}
|
||||||
let octal =
|
let octal =
|
||||||
this.state.pos - start >= 2 &&
|
this.state.pos - start >= 2 &&
|
||||||
this.input.charCodeAt(start) === charCodes.digit0;
|
this.state.input.charCodeAt(start) === charCodes.digit0;
|
||||||
if (octal) {
|
if (octal) {
|
||||||
if (this.state.strict) {
|
if (this.state.strict) {
|
||||||
this.raise(
|
this.raise(
|
||||||
@ -1000,30 +997,30 @@ export default class Tokenizer extends LocationParser {
|
|||||||
"Legacy octal literals are not allowed in strict mode",
|
"Legacy octal literals are not allowed in strict mode",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if (/[89]/.test(this.input.slice(start, this.state.pos))) {
|
if (/[89]/.test(this.state.input.slice(start, this.state.pos))) {
|
||||||
octal = false;
|
octal = false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let next = this.input.charCodeAt(this.state.pos);
|
let next = this.state.input.charCodeAt(this.state.pos);
|
||||||
if (next === charCodes.dot && !octal) {
|
if (next === charCodes.dot && !octal) {
|
||||||
++this.state.pos;
|
++this.state.pos;
|
||||||
this.readInt(10);
|
this.readInt(10);
|
||||||
isFloat = true;
|
isFloat = true;
|
||||||
next = this.input.charCodeAt(this.state.pos);
|
next = this.state.input.charCodeAt(this.state.pos);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (
|
if (
|
||||||
(next === charCodes.uppercaseE || next === charCodes.lowercaseE) &&
|
(next === charCodes.uppercaseE || next === charCodes.lowercaseE) &&
|
||||||
!octal
|
!octal
|
||||||
) {
|
) {
|
||||||
next = this.input.charCodeAt(++this.state.pos);
|
next = this.state.input.charCodeAt(++this.state.pos);
|
||||||
if (next === charCodes.plusSign || next === charCodes.dash) {
|
if (next === charCodes.plusSign || next === charCodes.dash) {
|
||||||
++this.state.pos;
|
++this.state.pos;
|
||||||
}
|
}
|
||||||
if (this.readInt(10) === null) this.raise(start, "Invalid number");
|
if (this.readInt(10) === null) this.raise(start, "Invalid number");
|
||||||
isFloat = true;
|
isFloat = true;
|
||||||
next = this.input.charCodeAt(this.state.pos);
|
next = this.state.input.charCodeAt(this.state.pos);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this.hasPlugin("bigInt")) {
|
if (this.hasPlugin("bigInt")) {
|
||||||
@ -1035,12 +1032,14 @@ export default class Tokenizer extends LocationParser {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isIdentifierStart(this.input.codePointAt(this.state.pos))) {
|
if (isIdentifierStart(this.state.input.codePointAt(this.state.pos))) {
|
||||||
this.raise(this.state.pos, "Identifier directly after number");
|
this.raise(this.state.pos, "Identifier directly after number");
|
||||||
}
|
}
|
||||||
|
|
||||||
// remove "_" for numeric literal separator, and "n" for BigInts
|
// remove "_" for numeric literal separator, and "n" for BigInts
|
||||||
const str = this.input.slice(start, this.state.pos).replace(/[_n]/g, "");
|
const str = this.state.input
|
||||||
|
.slice(start, this.state.pos)
|
||||||
|
.replace(/[_n]/g, "");
|
||||||
|
|
||||||
if (isBigInt) {
|
if (isBigInt) {
|
||||||
this.finishToken(tt.bigint, str);
|
this.finishToken(tt.bigint, str);
|
||||||
@ -1054,13 +1053,13 @@ export default class Tokenizer extends LocationParser {
|
|||||||
// Read a string value, interpreting backslash-escapes.
|
// Read a string value, interpreting backslash-escapes.
|
||||||
|
|
||||||
readCodePoint(throwOnInvalid: boolean): number | null {
|
readCodePoint(throwOnInvalid: boolean): number | null {
|
||||||
const ch = this.input.charCodeAt(this.state.pos);
|
const ch = this.state.input.charCodeAt(this.state.pos);
|
||||||
let code;
|
let code;
|
||||||
|
|
||||||
if (ch === charCodes.leftCurlyBrace) {
|
if (ch === charCodes.leftCurlyBrace) {
|
||||||
const codePos = ++this.state.pos;
|
const codePos = ++this.state.pos;
|
||||||
code = this.readHexChar(
|
code = this.readHexChar(
|
||||||
this.input.indexOf("}", this.state.pos) - this.state.pos,
|
this.state.input.indexOf("}", this.state.pos) - this.state.pos,
|
||||||
throwOnInvalid,
|
throwOnInvalid,
|
||||||
);
|
);
|
||||||
++this.state.pos;
|
++this.state.pos;
|
||||||
@ -1085,13 +1084,13 @@ export default class Tokenizer extends LocationParser {
|
|||||||
let out = "",
|
let out = "",
|
||||||
chunkStart = ++this.state.pos;
|
chunkStart = ++this.state.pos;
|
||||||
for (;;) {
|
for (;;) {
|
||||||
if (this.state.pos >= this.input.length) {
|
if (this.state.pos >= this.state.length) {
|
||||||
this.raise(this.state.start, "Unterminated string constant");
|
this.raise(this.state.start, "Unterminated string constant");
|
||||||
}
|
}
|
||||||
const ch = this.input.charCodeAt(this.state.pos);
|
const ch = this.state.input.charCodeAt(this.state.pos);
|
||||||
if (ch === quote) break;
|
if (ch === quote) break;
|
||||||
if (ch === charCodes.backslash) {
|
if (ch === charCodes.backslash) {
|
||||||
out += this.input.slice(chunkStart, this.state.pos);
|
out += this.state.input.slice(chunkStart, this.state.pos);
|
||||||
// $FlowFixMe
|
// $FlowFixMe
|
||||||
out += this.readEscapedChar(false);
|
out += this.readEscapedChar(false);
|
||||||
chunkStart = this.state.pos;
|
chunkStart = this.state.pos;
|
||||||
@ -1107,7 +1106,7 @@ export default class Tokenizer extends LocationParser {
|
|||||||
++this.state.pos;
|
++this.state.pos;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
out += this.input.slice(chunkStart, this.state.pos++);
|
out += this.state.input.slice(chunkStart, this.state.pos++);
|
||||||
this.finishToken(tt.string, out);
|
this.finishToken(tt.string, out);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1118,14 +1117,14 @@ export default class Tokenizer extends LocationParser {
|
|||||||
chunkStart = this.state.pos,
|
chunkStart = this.state.pos,
|
||||||
containsInvalid = false;
|
containsInvalid = false;
|
||||||
for (;;) {
|
for (;;) {
|
||||||
if (this.state.pos >= this.input.length) {
|
if (this.state.pos >= this.state.length) {
|
||||||
this.raise(this.state.start, "Unterminated template");
|
this.raise(this.state.start, "Unterminated template");
|
||||||
}
|
}
|
||||||
const ch = this.input.charCodeAt(this.state.pos);
|
const ch = this.state.input.charCodeAt(this.state.pos);
|
||||||
if (
|
if (
|
||||||
ch === charCodes.graveAccent ||
|
ch === charCodes.graveAccent ||
|
||||||
(ch === charCodes.dollarSign &&
|
(ch === charCodes.dollarSign &&
|
||||||
this.input.charCodeAt(this.state.pos + 1) ===
|
this.state.input.charCodeAt(this.state.pos + 1) ===
|
||||||
charCodes.leftCurlyBrace)
|
charCodes.leftCurlyBrace)
|
||||||
) {
|
) {
|
||||||
if (this.state.pos === this.state.start && this.match(tt.template)) {
|
if (this.state.pos === this.state.start && this.match(tt.template)) {
|
||||||
@ -1139,12 +1138,12 @@ export default class Tokenizer extends LocationParser {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
out += this.input.slice(chunkStart, this.state.pos);
|
out += this.state.input.slice(chunkStart, this.state.pos);
|
||||||
this.finishToken(tt.template, containsInvalid ? null : out);
|
this.finishToken(tt.template, containsInvalid ? null : out);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (ch === charCodes.backslash) {
|
if (ch === charCodes.backslash) {
|
||||||
out += this.input.slice(chunkStart, this.state.pos);
|
out += this.state.input.slice(chunkStart, this.state.pos);
|
||||||
const escaped = this.readEscapedChar(true);
|
const escaped = this.readEscapedChar(true);
|
||||||
if (escaped === null) {
|
if (escaped === null) {
|
||||||
containsInvalid = true;
|
containsInvalid = true;
|
||||||
@ -1153,11 +1152,13 @@ export default class Tokenizer extends LocationParser {
|
|||||||
}
|
}
|
||||||
chunkStart = this.state.pos;
|
chunkStart = this.state.pos;
|
||||||
} else if (isNewLine(ch)) {
|
} else if (isNewLine(ch)) {
|
||||||
out += this.input.slice(chunkStart, this.state.pos);
|
out += this.state.input.slice(chunkStart, this.state.pos);
|
||||||
++this.state.pos;
|
++this.state.pos;
|
||||||
switch (ch) {
|
switch (ch) {
|
||||||
case charCodes.carriageReturn:
|
case charCodes.carriageReturn:
|
||||||
if (this.input.charCodeAt(this.state.pos) === charCodes.lineFeed) {
|
if (
|
||||||
|
this.state.input.charCodeAt(this.state.pos) === charCodes.lineFeed
|
||||||
|
) {
|
||||||
++this.state.pos;
|
++this.state.pos;
|
||||||
}
|
}
|
||||||
case charCodes.lineFeed:
|
case charCodes.lineFeed:
|
||||||
@ -1180,7 +1181,7 @@ export default class Tokenizer extends LocationParser {
|
|||||||
|
|
||||||
readEscapedChar(inTemplate: boolean): string | null {
|
readEscapedChar(inTemplate: boolean): string | null {
|
||||||
const throwOnInvalid = !inTemplate;
|
const throwOnInvalid = !inTemplate;
|
||||||
const ch = this.input.charCodeAt(++this.state.pos);
|
const ch = this.state.input.charCodeAt(++this.state.pos);
|
||||||
++this.state.pos;
|
++this.state.pos;
|
||||||
switch (ch) {
|
switch (ch) {
|
||||||
case charCodes.lowercaseN:
|
case charCodes.lowercaseN:
|
||||||
@ -1204,7 +1205,9 @@ export default class Tokenizer extends LocationParser {
|
|||||||
case charCodes.lowercaseF:
|
case charCodes.lowercaseF:
|
||||||
return "\f";
|
return "\f";
|
||||||
case charCodes.carriageReturn:
|
case charCodes.carriageReturn:
|
||||||
if (this.input.charCodeAt(this.state.pos) === charCodes.lineFeed) {
|
if (
|
||||||
|
this.state.input.charCodeAt(this.state.pos) === charCodes.lineFeed
|
||||||
|
) {
|
||||||
++this.state.pos;
|
++this.state.pos;
|
||||||
}
|
}
|
||||||
case charCodes.lineFeed:
|
case charCodes.lineFeed:
|
||||||
@ -1215,7 +1218,7 @@ export default class Tokenizer extends LocationParser {
|
|||||||
if (ch >= charCodes.digit0 && ch <= charCodes.digit7) {
|
if (ch >= charCodes.digit0 && ch <= charCodes.digit7) {
|
||||||
const codePos = this.state.pos - 1;
|
const codePos = this.state.pos - 1;
|
||||||
// $FlowFixMe
|
// $FlowFixMe
|
||||||
let octalStr = this.input
|
let octalStr = this.state.input
|
||||||
.substr(this.state.pos - 1, 3)
|
.substr(this.state.pos - 1, 3)
|
||||||
.match(/^[0-7]+/)[0];
|
.match(/^[0-7]+/)[0];
|
||||||
let octal = parseInt(octalStr, 8);
|
let octal = parseInt(octalStr, 8);
|
||||||
@ -1266,23 +1269,28 @@ export default class Tokenizer extends LocationParser {
|
|||||||
// as a micro-optimization.
|
// as a micro-optimization.
|
||||||
|
|
||||||
readWord1(): string {
|
readWord1(): string {
|
||||||
|
let word = "";
|
||||||
this.state.containsEsc = false;
|
this.state.containsEsc = false;
|
||||||
let word = "",
|
const start = this.state.pos;
|
||||||
first = true,
|
let chunkStart = this.state.pos;
|
||||||
chunkStart = this.state.pos;
|
|
||||||
while (this.state.pos < this.input.length) {
|
while (this.state.pos < this.state.length) {
|
||||||
const ch = this.input.codePointAt(this.state.pos);
|
const ch = this.state.input.codePointAt(this.state.pos);
|
||||||
if (isIdentifierChar(ch)) {
|
if (isIdentifierChar(ch)) {
|
||||||
this.state.pos += ch <= 0xffff ? 1 : 2;
|
this.state.pos += ch <= 0xffff ? 1 : 2;
|
||||||
} else if (this.state.isIterator && ch === charCodes.atSign) {
|
} else if (this.state.isIterator && ch === charCodes.atSign) {
|
||||||
this.state.pos += 1;
|
++this.state.pos;
|
||||||
} else if (ch === charCodes.backslash) {
|
} else if (ch === charCodes.backslash) {
|
||||||
this.state.containsEsc = true;
|
this.state.containsEsc = true;
|
||||||
|
|
||||||
word += this.input.slice(chunkStart, this.state.pos);
|
word += this.state.input.slice(chunkStart, this.state.pos);
|
||||||
const escStart = this.state.pos;
|
const escStart = this.state.pos;
|
||||||
|
const identifierCheck =
|
||||||
|
this.state.pos === start ? isIdentifierStart : isIdentifierChar;
|
||||||
|
|
||||||
if (this.input.charCodeAt(++this.state.pos) !== charCodes.lowercaseU) {
|
if (
|
||||||
|
this.state.input.charCodeAt(++this.state.pos) !== charCodes.lowercaseU
|
||||||
|
) {
|
||||||
this.raise(
|
this.raise(
|
||||||
this.state.pos,
|
this.state.pos,
|
||||||
"Expecting Unicode escape sequence \\uXXXX",
|
"Expecting Unicode escape sequence \\uXXXX",
|
||||||
@ -1291,8 +1299,11 @@ export default class Tokenizer extends LocationParser {
|
|||||||
|
|
||||||
++this.state.pos;
|
++this.state.pos;
|
||||||
const esc = this.readCodePoint(true);
|
const esc = this.readCodePoint(true);
|
||||||
|
|
||||||
|
if (
|
||||||
// $FlowFixMe (thinks esc may be null, but throwOnInvalid is true)
|
// $FlowFixMe (thinks esc may be null, but throwOnInvalid is true)
|
||||||
if (!(first ? isIdentifierStart : isIdentifierChar)(esc, true)) {
|
!identifierCheck(esc, true)
|
||||||
|
) {
|
||||||
this.raise(escStart, "Invalid Unicode escape");
|
this.raise(escStart, "Invalid Unicode escape");
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1302,9 +1313,8 @@ export default class Tokenizer extends LocationParser {
|
|||||||
} else {
|
} else {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
first = false;
|
|
||||||
}
|
}
|
||||||
return word + this.input.slice(chunkStart, this.state.pos);
|
return word + this.state.input.slice(chunkStart, this.state.pos);
|
||||||
}
|
}
|
||||||
|
|
||||||
isIterator(word: string): boolean {
|
isIterator(word: string): boolean {
|
||||||
@ -1316,16 +1326,12 @@ export default class Tokenizer extends LocationParser {
|
|||||||
|
|
||||||
readWord(): void {
|
readWord(): void {
|
||||||
const word = this.readWord1();
|
const word = this.readWord1();
|
||||||
let type = tt.name;
|
const type = keywordTypes[word] || tt.name;
|
||||||
|
|
||||||
if (this.isKeyword(word)) {
|
if (type.keyword && this.state.containsEsc) {
|
||||||
if (this.state.containsEsc) {
|
|
||||||
this.raise(this.state.pos, `Escape sequence in keyword ${word}`);
|
this.raise(this.state.pos, `Escape sequence in keyword ${word}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
type = keywordTypes[word];
|
|
||||||
}
|
|
||||||
|
|
||||||
// Allow @@iterator and @@asyncIterator as a identifier only inside type
|
// Allow @@iterator and @@asyncIterator as a identifier only inside type
|
||||||
if (
|
if (
|
||||||
this.state.isIterator &&
|
this.state.isIterator &&
|
||||||
@ -1358,7 +1364,7 @@ export default class Tokenizer extends LocationParser {
|
|||||||
(prevType === tt.name && this.state.exprAllowed)
|
(prevType === tt.name && this.state.exprAllowed)
|
||||||
) {
|
) {
|
||||||
return lineBreak.test(
|
return lineBreak.test(
|
||||||
this.input.slice(this.state.lastTokEnd, this.state.start),
|
this.state.input.slice(this.state.lastTokEnd, this.state.start),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -24,6 +24,7 @@ type TopicContextState = {
|
|||||||
export default class State {
|
export default class State {
|
||||||
strict: boolean;
|
strict: boolean;
|
||||||
input: string;
|
input: string;
|
||||||
|
length: number;
|
||||||
|
|
||||||
curLine: number;
|
curLine: number;
|
||||||
|
|
||||||
@ -37,6 +38,7 @@ export default class State {
|
|||||||
options.strictMode === false ? false : options.sourceType === "module";
|
options.strictMode === false ? false : options.sourceType === "module";
|
||||||
|
|
||||||
this.input = input;
|
this.input = input;
|
||||||
|
this.length = input.length;
|
||||||
|
|
||||||
this.curLine = options.startLine;
|
this.curLine = options.startLine;
|
||||||
this.startLoc = this.endLoc = this.curPosition();
|
this.startLoc = this.endLoc = this.curPosition();
|
||||||
@ -176,7 +178,9 @@ export default class State {
|
|||||||
|
|
||||||
clone(skipArrays?: boolean): State {
|
clone(skipArrays?: boolean): State {
|
||||||
const state = new State();
|
const state = new State();
|
||||||
Object.keys(this).forEach(key => {
|
const keys = Object.keys(this);
|
||||||
|
for (let i = 0, length = keys.length; i < length; i++) {
|
||||||
|
const key = keys[i];
|
||||||
// $FlowIgnore
|
// $FlowIgnore
|
||||||
let val = this[key];
|
let val = this[key];
|
||||||
|
|
||||||
@ -186,7 +190,8 @@ export default class State {
|
|||||||
|
|
||||||
// $FlowIgnore
|
// $FlowIgnore
|
||||||
state[key] = val;
|
state[key] = val;
|
||||||
});
|
}
|
||||||
|
|
||||||
return state;
|
return state;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -133,20 +133,20 @@ export const types: { [name: string]: TokenType } = {
|
|||||||
incDec: new TokenType("++/--", { prefix, postfix, startsExpr }),
|
incDec: new TokenType("++/--", { prefix, postfix, startsExpr }),
|
||||||
bang: new TokenType("!", { beforeExpr, prefix, startsExpr }),
|
bang: new TokenType("!", { beforeExpr, prefix, startsExpr }),
|
||||||
tilde: new TokenType("~", { beforeExpr, prefix, startsExpr }),
|
tilde: new TokenType("~", { beforeExpr, prefix, startsExpr }),
|
||||||
pipeline: new BinopTokenType("|>", 0),
|
pipeline: BinopTokenType("|>", 0),
|
||||||
nullishCoalescing: new BinopTokenType("??", 1),
|
nullishCoalescing: BinopTokenType("??", 1),
|
||||||
logicalOR: new BinopTokenType("||", 1),
|
logicalOR: BinopTokenType("||", 1),
|
||||||
logicalAND: new BinopTokenType("&&", 2),
|
logicalAND: BinopTokenType("&&", 2),
|
||||||
bitwiseOR: new BinopTokenType("|", 3),
|
bitwiseOR: BinopTokenType("|", 3),
|
||||||
bitwiseXOR: new BinopTokenType("^", 4),
|
bitwiseXOR: BinopTokenType("^", 4),
|
||||||
bitwiseAND: new BinopTokenType("&", 5),
|
bitwiseAND: BinopTokenType("&", 5),
|
||||||
equality: new BinopTokenType("==/!=", 6),
|
equality: BinopTokenType("==/!=", 6),
|
||||||
relational: new BinopTokenType("</>", 7),
|
relational: BinopTokenType("</>", 7),
|
||||||
bitShift: new BinopTokenType("<</>>", 8),
|
bitShift: BinopTokenType("<</>>", 8),
|
||||||
plusMin: new TokenType("+/-", { beforeExpr, binop: 9, prefix, startsExpr }),
|
plusMin: new TokenType("+/-", { beforeExpr, binop: 9, prefix, startsExpr }),
|
||||||
modulo: new BinopTokenType("%", 10),
|
modulo: BinopTokenType("%", 10),
|
||||||
star: new BinopTokenType("*", 10),
|
star: BinopTokenType("*", 10),
|
||||||
slash: new BinopTokenType("/", 10),
|
slash: BinopTokenType("/", 10),
|
||||||
exponent: new TokenType("**", {
|
exponent: new TokenType("**", {
|
||||||
beforeExpr,
|
beforeExpr,
|
||||||
binop: 11,
|
binop: 11,
|
||||||
@ -154,45 +154,53 @@ export const types: { [name: string]: TokenType } = {
|
|||||||
}),
|
}),
|
||||||
};
|
};
|
||||||
|
|
||||||
export const keywords = {
|
function makeKeywordProps(
|
||||||
break: new KeywordTokenType("break"),
|
name: string,
|
||||||
case: new KeywordTokenType("case", { beforeExpr }),
|
conf: any,
|
||||||
catch: new KeywordTokenType("catch"),
|
): PropertyDescriptor<TokenType> {
|
||||||
continue: new KeywordTokenType("continue"),
|
return { value: KeywordTokenType(name, conf), enumerable: true };
|
||||||
debugger: new KeywordTokenType("debugger"),
|
}
|
||||||
default: new KeywordTokenType("default", { beforeExpr }),
|
|
||||||
do: new KeywordTokenType("do", { isLoop, beforeExpr }),
|
// $FlowIssue
|
||||||
else: new KeywordTokenType("else", { beforeExpr }),
|
export const keywords = Object.create(null, {
|
||||||
finally: new KeywordTokenType("finally"),
|
break: makeKeywordProps("break"),
|
||||||
for: new KeywordTokenType("for", { isLoop }),
|
case: makeKeywordProps("case", { beforeExpr }),
|
||||||
function: new KeywordTokenType("function", { startsExpr }),
|
catch: makeKeywordProps("catch"),
|
||||||
if: new KeywordTokenType("if"),
|
continue: makeKeywordProps("continue"),
|
||||||
return: new KeywordTokenType("return", { beforeExpr }),
|
debugger: makeKeywordProps("debugger"),
|
||||||
switch: new KeywordTokenType("switch"),
|
default: makeKeywordProps("default", { beforeExpr }),
|
||||||
throw: new KeywordTokenType("throw", { beforeExpr, prefix, startsExpr }),
|
do: makeKeywordProps("do", { isLoop, beforeExpr }),
|
||||||
try: new KeywordTokenType("try"),
|
else: makeKeywordProps("else", { beforeExpr }),
|
||||||
var: new KeywordTokenType("var"),
|
finally: makeKeywordProps("finally"),
|
||||||
let: new KeywordTokenType("let"),
|
for: makeKeywordProps("for", { isLoop }),
|
||||||
const: new KeywordTokenType("const"),
|
function: makeKeywordProps("function", { startsExpr }),
|
||||||
while: new KeywordTokenType("while", { isLoop }),
|
if: makeKeywordProps("if"),
|
||||||
with: new KeywordTokenType("with"),
|
return: makeKeywordProps("return", { beforeExpr }),
|
||||||
new: new KeywordTokenType("new", { beforeExpr, startsExpr }),
|
switch: makeKeywordProps("switch"),
|
||||||
this: new KeywordTokenType("this", { startsExpr }),
|
throw: makeKeywordProps("throw", { beforeExpr, prefix, startsExpr }),
|
||||||
super: new KeywordTokenType("super", { startsExpr }),
|
try: makeKeywordProps("try"),
|
||||||
class: new KeywordTokenType("class", { startsExpr }),
|
var: makeKeywordProps("var"),
|
||||||
extends: new KeywordTokenType("extends", { beforeExpr }),
|
let: makeKeywordProps("let"),
|
||||||
export: new KeywordTokenType("export"),
|
const: makeKeywordProps("const"),
|
||||||
import: new KeywordTokenType("import", { startsExpr }),
|
while: makeKeywordProps("while", { isLoop }),
|
||||||
yield: new KeywordTokenType("yield", { beforeExpr, startsExpr }),
|
with: makeKeywordProps("with"),
|
||||||
null: new KeywordTokenType("null", { startsExpr }),
|
new: makeKeywordProps("new"),
|
||||||
true: new KeywordTokenType("true", { startsExpr }),
|
this: makeKeywordProps("this", { startsExpr }),
|
||||||
false: new KeywordTokenType("false", { startsExpr }),
|
super: makeKeywordProps("super", { startsExpr }),
|
||||||
in: new KeywordTokenType("in", { beforeExpr, binop: 7 }),
|
class: makeKeywordProps("class", { startsExpr }),
|
||||||
instanceof: new KeywordTokenType("instanceof", { beforeExpr, binop: 7 }),
|
extends: makeKeywordProps("extends", { beforeExpr }),
|
||||||
typeof: new KeywordTokenType("typeof", { beforeExpr, prefix, startsExpr }),
|
export: makeKeywordProps("export"),
|
||||||
void: new KeywordTokenType("void", { beforeExpr, prefix, startsExpr }),
|
import: makeKeywordProps("import", { startsExpr }),
|
||||||
delete: new KeywordTokenType("delete", { beforeExpr, prefix, startsExpr }),
|
yield: makeKeywordProps("yield", { beforeExpr, startsExpr }),
|
||||||
};
|
null: makeKeywordProps("null", { startsExpr }),
|
||||||
|
true: makeKeywordProps("true", { startsExpr }),
|
||||||
|
false: makeKeywordProps("false", { startsExpr }),
|
||||||
|
in: makeKeywordProps("in", { beforeExpr, binop: 7 }),
|
||||||
|
instanceof: makeKeywordProps("instanceof", { beforeExpr, binop: 7 }),
|
||||||
|
typeof: makeKeywordProps("typeof", { beforeExpr, prefix, startsExpr }),
|
||||||
|
void: makeKeywordProps("void", { beforeExpr, prefix, startsExpr }),
|
||||||
|
delete: makeKeywordProps("delete", { beforeExpr, prefix, startsExpr }),
|
||||||
|
});
|
||||||
|
|
||||||
// Map keyword names to token types.
|
// Map keyword names to token types.
|
||||||
Object.keys(keywords).forEach(name => {
|
Object.keys(keywords).forEach(name => {
|
||||||
|
|||||||
@ -4,28 +4,72 @@
|
|||||||
|
|
||||||
import * as charCodes from "charcodes";
|
import * as charCodes from "charcodes";
|
||||||
|
|
||||||
function makePredicate(words: string): (str: string) => boolean {
|
export const isES2015ReservedWord = (word: string): boolean => {
|
||||||
const wordsArr = words.split(" ");
|
return word === "enum" || word === "await";
|
||||||
return function(str) {
|
|
||||||
return wordsArr.indexOf(str) >= 0;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// Reserved word lists for various dialects of the language
|
|
||||||
|
|
||||||
export const reservedWords = {
|
|
||||||
"6": makePredicate("enum await"),
|
|
||||||
strict: makePredicate(
|
|
||||||
"implements interface let package private protected public static yield",
|
|
||||||
),
|
|
||||||
strictBind: makePredicate("eval arguments"),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// And the keywords
|
const reservedWordsStrict = new Set([
|
||||||
|
"implements",
|
||||||
|
"interface",
|
||||||
|
"let",
|
||||||
|
"package",
|
||||||
|
"private",
|
||||||
|
"protected",
|
||||||
|
"public",
|
||||||
|
"static",
|
||||||
|
"yield",
|
||||||
|
]);
|
||||||
|
export function isStrictReservedWord(word: string): boolean {
|
||||||
|
return reservedWordsStrict.has(word);
|
||||||
|
}
|
||||||
|
|
||||||
export const isKeyword = makePredicate(
|
export function isStrictBindReservedWord(word: string): boolean {
|
||||||
"break case catch continue debugger default do else finally for function if return switch throw try var while with null true false instanceof typeof void delete new in this let const class extends export import yield super",
|
return word === "eval" || word === "arguments";
|
||||||
);
|
}
|
||||||
|
|
||||||
|
const keywords = new Set([
|
||||||
|
"break",
|
||||||
|
"case",
|
||||||
|
"catch",
|
||||||
|
"continue",
|
||||||
|
"debugger",
|
||||||
|
"default",
|
||||||
|
"do",
|
||||||
|
"else",
|
||||||
|
"finally",
|
||||||
|
"for",
|
||||||
|
"function",
|
||||||
|
"if",
|
||||||
|
"return",
|
||||||
|
"switch",
|
||||||
|
"throw",
|
||||||
|
"try",
|
||||||
|
"var",
|
||||||
|
"while",
|
||||||
|
"with",
|
||||||
|
"null",
|
||||||
|
"true",
|
||||||
|
"false",
|
||||||
|
"instanceof",
|
||||||
|
"typeof",
|
||||||
|
"void",
|
||||||
|
"delete",
|
||||||
|
"new",
|
||||||
|
"in",
|
||||||
|
"this",
|
||||||
|
"let",
|
||||||
|
"const",
|
||||||
|
"class",
|
||||||
|
"extends",
|
||||||
|
"export",
|
||||||
|
"import",
|
||||||
|
"yield",
|
||||||
|
"super",
|
||||||
|
]);
|
||||||
|
|
||||||
|
export function isKeyword(word: string): boolean {
|
||||||
|
return keywords.has(word);
|
||||||
|
}
|
||||||
|
|
||||||
// ## Character categories
|
// ## Character categories
|
||||||
|
|
||||||
@ -64,7 +108,7 @@ const astralIdentifierCodes = [509,0,227,0,150,4,294,9,1368,2,2,1,6,3,41,2,5,0,1
|
|||||||
// rare.
|
// rare.
|
||||||
function isInAstralSet(code: number, set: $ReadOnlyArray<number>): boolean {
|
function isInAstralSet(code: number, set: $ReadOnlyArray<number>): boolean {
|
||||||
let pos = 0x10000;
|
let pos = 0x10000;
|
||||||
for (let i = 0; i < set.length; i += 2) {
|
for (let i = 0, length = set.length; i < length; i += 2) {
|
||||||
pos += set[i];
|
pos += set[i];
|
||||||
if (pos > code) return false;
|
if (pos > code) return false;
|
||||||
|
|
||||||
|
|||||||
@ -4,7 +4,7 @@ import * as charCodes from "charcodes";
|
|||||||
|
|
||||||
// Matches a whole line break (where CRLF is considered a single
|
// Matches a whole line break (where CRLF is considered a single
|
||||||
// line break). Used to count lines.
|
// line break). Used to count lines.
|
||||||
export const lineBreak = /\r\n?|\n|\u2028|\u2029/;
|
export const lineBreak = /\r\n?|[\n\u2028\u2029]/;
|
||||||
export const lineBreakG = new RegExp(lineBreak.source, "g");
|
export const lineBreakG = new RegExp(lineBreak.source, "g");
|
||||||
|
|
||||||
// https://tc39.github.io/ecma262/#sec-line-terminators
|
// https://tc39.github.io/ecma262/#sec-line-terminators
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
{
|
{
|
||||||
"sourceType": "module",
|
"sourceType": "module",
|
||||||
"throws": "Unexpected token, expected \"function\" (1:21)"
|
"throws": "Unexpected token, expected \"=>\" (1:31)"
|
||||||
}
|
}
|
||||||
|
|||||||
@ -126,7 +126,7 @@ function ppJSON(v) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function addPath(str, pt) {
|
function addPath(str, pt) {
|
||||||
if (str.charAt(str.length - 1) == ")") {
|
if (str.charAt(str.length - 1) === ")") {
|
||||||
return str.slice(0, str.length - 1) + "/" + pt + ")";
|
return str.slice(0, str.length - 1) + "/" + pt + ")";
|
||||||
} else {
|
} else {
|
||||||
return str + " (" + pt + ")";
|
return str + " (" + pt + ")";
|
||||||
|
|||||||
18
packages/babel-parser/test/unit/util/identifier.js
Normal file
18
packages/babel-parser/test/unit/util/identifier.js
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
import { isKeyword } from "../../../src/util/identifier";
|
||||||
|
|
||||||
|
describe("identifier", () => {
|
||||||
|
describe("isKeyword", () => {
|
||||||
|
it("break is a keyword", () => {
|
||||||
|
expect(isKeyword("break")).toBe(true);
|
||||||
|
});
|
||||||
|
it("let is a keyword", () => {
|
||||||
|
expect(isKeyword("let")).toBe(true);
|
||||||
|
});
|
||||||
|
it("super is a keyword", () => {
|
||||||
|
expect(isKeyword("super")).toBe(true);
|
||||||
|
});
|
||||||
|
it("abc is not a keyword", () => {
|
||||||
|
expect(isKeyword("abc")).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
@ -26,7 +26,6 @@ decorators/migrated_0003.js
|
|||||||
decorators/migrated_0007.js
|
decorators/migrated_0007.js
|
||||||
private_class_properties/multiple.js
|
private_class_properties/multiple.js
|
||||||
private_class_properties/super.js
|
private_class_properties/super.js
|
||||||
types/annotations/void_is_reserved_param.js
|
|
||||||
types/member/reserved_words.js
|
types/member/reserved_words.js
|
||||||
types/parameter_defaults/migrated_0032.js
|
types/parameter_defaults/migrated_0032.js
|
||||||
class_method_kinds/polymorphic_getter.js
|
class_method_kinds/polymorphic_getter.js
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user