type annotate babylon
This commit is contained in:
parent
c99a179401
commit
20f643b419
@ -1,3 +1,5 @@
|
|||||||
|
/* @flow */
|
||||||
|
|
||||||
import Parser, { plugins } from "./parser";
|
import Parser, { plugins } from "./parser";
|
||||||
import "./parser/util";
|
import "./parser/util";
|
||||||
import "./parser/statement";
|
import "./parser/statement";
|
||||||
|
|||||||
@ -1,3 +1,5 @@
|
|||||||
|
/* @flow */
|
||||||
|
|
||||||
// A second optional argument can be given to further configure
|
// A second optional argument can be given to further configure
|
||||||
// the parser process. These options are recognized:
|
// the parser process. These options are recognized:
|
||||||
|
|
||||||
@ -18,7 +20,7 @@ export const defaultOptions = {
|
|||||||
|
|
||||||
// Interpret and default an options object
|
// Interpret and default an options object
|
||||||
|
|
||||||
export function getOptions(opts) {
|
export function getOptions(opts?: Object): Object {
|
||||||
let options = {};
|
let options = {};
|
||||||
for (let key in defaultOptions) {
|
for (let key in defaultOptions) {
|
||||||
options[key] = opts && key in opts ? opts[key] : defaultOptions[key];
|
options[key] = opts && key in opts ? opts[key] : defaultOptions[key];
|
||||||
|
|||||||
@ -1,3 +1,5 @@
|
|||||||
|
/* @flow */
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Based on the comment attachment algorithm used in espree and estraverse.
|
* Based on the comment attachment algorithm used in espree and estraverse.
|
||||||
*
|
*
|
||||||
|
|||||||
@ -1,3 +1,5 @@
|
|||||||
|
/* @flow */
|
||||||
|
|
||||||
// A recursive descent parser operates by defining functions for all
|
// A recursive descent parser operates by defining functions for all
|
||||||
// syntactic elements, and recursively calling those, each function
|
// syntactic elements, and recursively calling those, each function
|
||||||
// advancing the input stream and returning an AST node. Precedence
|
// advancing the input stream and returning an AST node. Precedence
|
||||||
@ -793,7 +795,7 @@ pp.parseFunctionBody = function (node, allowExpression) {
|
|||||||
|
|
||||||
// normal function
|
// normal function
|
||||||
if (!isExpression && node.body.directives.length) {
|
if (!isExpression && node.body.directives.length) {
|
||||||
for (var directive of (node.body.directives: Array)) {
|
for (var directive of (node.body.directives: Array<Object>)) {
|
||||||
if (directive.value === "use strict") {
|
if (directive.value === "use strict") {
|
||||||
checkLVal = true;
|
checkLVal = true;
|
||||||
checkLValStrict = true;
|
checkLValStrict = true;
|
||||||
@ -809,7 +811,7 @@ pp.parseFunctionBody = function (node, allowExpression) {
|
|||||||
if (node.id) {
|
if (node.id) {
|
||||||
this.checkLVal(node.id, true);
|
this.checkLVal(node.id, true);
|
||||||
}
|
}
|
||||||
for (let param of (node.params: Array)) {
|
for (let param of (node.params: Array<Object>)) {
|
||||||
this.checkLVal(param, true, nameHash);
|
this.checkLVal(param, true, nameHash);
|
||||||
}
|
}
|
||||||
this.state.strict = oldStrict;
|
this.state.strict = oldStrict;
|
||||||
|
|||||||
@ -1,3 +1,5 @@
|
|||||||
|
/* @flow */
|
||||||
|
|
||||||
import { reservedWords } from "../util/identifier";
|
import { reservedWords } from "../util/identifier";
|
||||||
import { getOptions } from "../options";
|
import { getOptions } from "../options";
|
||||||
import Tokenizer from "../tokenizer";
|
import Tokenizer from "../tokenizer";
|
||||||
@ -7,7 +9,7 @@ import Tokenizer from "../tokenizer";
|
|||||||
export const plugins = {};
|
export const plugins = {};
|
||||||
|
|
||||||
export default class Parser extends Tokenizer {
|
export default class Parser extends Tokenizer {
|
||||||
constructor(options, input) {
|
constructor(options, input: string) {
|
||||||
options = getOptions(options);
|
options = getOptions(options);
|
||||||
super(options, input);
|
super(options, input);
|
||||||
|
|
||||||
@ -25,11 +27,11 @@ export default class Parser extends Tokenizer {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
hasFeature(name) {
|
hasFeature(name: string): boolean {
|
||||||
return !!this.options.features[name];
|
return !!this.options.features[name];
|
||||||
}
|
}
|
||||||
|
|
||||||
extend(name, f) {
|
extend(name: string, f: Function) {
|
||||||
this[name] = f(this[name]);
|
this[name] = f(this[name]);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -41,7 +43,13 @@ export default class Parser extends Tokenizer {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
parse() {
|
parse(): {
|
||||||
|
type: "File",
|
||||||
|
program: {
|
||||||
|
type: "Program",
|
||||||
|
body: Array<Object>
|
||||||
|
}
|
||||||
|
} {
|
||||||
let file = this.startNode();
|
let file = this.startNode();
|
||||||
let program = this.startNode();
|
let program = this.startNode();
|
||||||
this.nextToken();
|
this.nextToken();
|
||||||
|
|||||||
@ -1,3 +1,5 @@
|
|||||||
|
/* @flow */
|
||||||
|
|
||||||
import { getLineInfo } from "../util/location";
|
import { getLineInfo } from "../util/location";
|
||||||
import Parser from "./index";
|
import Parser from "./index";
|
||||||
|
|
||||||
|
|||||||
@ -1,3 +1,5 @@
|
|||||||
|
/* @flow */
|
||||||
|
|
||||||
import { types as tt } from "../tokenizer/types";
|
import { types as tt } from "../tokenizer/types";
|
||||||
import Parser from "./index";
|
import Parser from "./index";
|
||||||
import { reservedWords } from "../util/identifier";
|
import { reservedWords } from "../util/identifier";
|
||||||
@ -18,7 +20,7 @@ pp.toAssignable = function (node, isBinding) {
|
|||||||
|
|
||||||
case "ObjectExpression":
|
case "ObjectExpression":
|
||||||
node.type = "ObjectPattern";
|
node.type = "ObjectPattern";
|
||||||
for (let prop of (node.properties: Array)) {
|
for (let prop of (node.properties: Array<Object>)) {
|
||||||
if (prop.type === "SpreadProperty") continue;
|
if (prop.type === "SpreadProperty") continue;
|
||||||
if (prop.kind !== "init") this.raise(prop.key.start, "Object pattern can't contain getter or setter");
|
if (prop.kind !== "init") this.raise(prop.key.start, "Object pattern can't contain getter or setter");
|
||||||
this.toAssignable(prop.value, isBinding);
|
this.toAssignable(prop.value, isBinding);
|
||||||
@ -184,14 +186,14 @@ pp.checkLVal = function (expr, isBinding, checkClashes) {
|
|||||||
break;
|
break;
|
||||||
|
|
||||||
case "ObjectPattern":
|
case "ObjectPattern":
|
||||||
for (let prop of (expr.properties: Array)) {
|
for (let prop of (expr.properties: Array<Object>)) {
|
||||||
if (prop.type === "Property") prop = prop.value;
|
if (prop.type === "Property") prop = prop.value;
|
||||||
this.checkLVal(prop, isBinding, checkClashes);
|
this.checkLVal(prop, isBinding, checkClashes);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case "ArrayPattern":
|
case "ArrayPattern":
|
||||||
for (let elem of (expr.elements: Array)) {
|
for (let elem of (expr.elements: Array<Object>)) {
|
||||||
if (elem) this.checkLVal(elem, isBinding, checkClashes);
|
if (elem) this.checkLVal(elem, isBinding, checkClashes);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
|||||||
@ -1,3 +1,5 @@
|
|||||||
|
/* @flow */
|
||||||
|
|
||||||
import Parser from "./index";
|
import Parser from "./index";
|
||||||
import { SourceLocation } from "../util/location";
|
import { SourceLocation } from "../util/location";
|
||||||
|
|
||||||
@ -5,15 +7,20 @@ import { SourceLocation } from "../util/location";
|
|||||||
|
|
||||||
const pp = Parser.prototype;
|
const pp = Parser.prototype;
|
||||||
|
|
||||||
export class Node {
|
class Node {
|
||||||
constructor(parser, pos, loc) {
|
constructor(pos?: number, loc?: SourceLocation) {
|
||||||
this.type = "";
|
this.type = "";
|
||||||
this.start = pos;
|
this.start = pos;
|
||||||
this.end = 0;
|
this.end = 0;
|
||||||
this.loc = new SourceLocation(loc);
|
this.loc = new SourceLocation(loc);
|
||||||
}
|
}
|
||||||
|
|
||||||
__clone() {
|
type: string;
|
||||||
|
start: ?number;
|
||||||
|
end: number;
|
||||||
|
loc: SourceLocation;
|
||||||
|
|
||||||
|
__clone(): Node {
|
||||||
var node2 = new Node;
|
var node2 = new Node;
|
||||||
for (var key in this) node2[key] = this[key];
|
for (var key in this) node2[key] = this[key];
|
||||||
return node2;
|
return node2;
|
||||||
@ -21,11 +28,11 @@ export class Node {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pp.startNode = function () {
|
pp.startNode = function () {
|
||||||
return new Node(this, this.state.start, this.state.startLoc);
|
return new Node(this.state.start, this.state.startLoc);
|
||||||
};
|
};
|
||||||
|
|
||||||
pp.startNodeAt = function (pos, loc) {
|
pp.startNodeAt = function (pos, loc) {
|
||||||
return new Node(this, pos, loc);
|
return new Node(pos, loc);
|
||||||
};
|
};
|
||||||
|
|
||||||
function finishNodeAt(node, type, pos, loc) {
|
function finishNodeAt(node, type, pos, loc) {
|
||||||
|
|||||||
@ -1,3 +1,5 @@
|
|||||||
|
/* @flow */
|
||||||
|
|
||||||
import { types as tt } from "../tokenizer/types";
|
import { types as tt } from "../tokenizer/types";
|
||||||
import Parser from "./index";
|
import Parser from "./index";
|
||||||
import { lineBreak } from "../util/whitespace";
|
import { lineBreak } from "../util/whitespace";
|
||||||
@ -380,7 +382,7 @@ pp.parseEmptyStatement = function (node) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
pp.parseLabeledStatement = function (node, maybeName, expr) {
|
pp.parseLabeledStatement = function (node, maybeName, expr) {
|
||||||
for (let label of (this.state.labels: Array)){
|
for (let label of (this.state.labels: Array<Object>)){
|
||||||
if (label.name === maybeName) {
|
if (label.name === maybeName) {
|
||||||
this.raise(expr.start, `Label '${maybeName}' is already declared`);
|
this.raise(expr.start, `Label '${maybeName}' is already declared`);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,3 +1,5 @@
|
|||||||
|
/* @flow */
|
||||||
|
|
||||||
import { types as tt } from "../tokenizer/types";
|
import { types as tt } from "../tokenizer/types";
|
||||||
import Parser from "./index";
|
import Parser from "./index";
|
||||||
import { lineBreak } from "../util/whitespace";
|
import { lineBreak } from "../util/whitespace";
|
||||||
|
|||||||
@ -1,3 +1,5 @@
|
|||||||
|
/* @flow */
|
||||||
|
|
||||||
import { types as tt } from "../tokenizer/types";
|
import { types as tt } from "../tokenizer/types";
|
||||||
import Parser from "../parser";
|
import Parser from "../parser";
|
||||||
|
|
||||||
|
|||||||
@ -1,3 +1,5 @@
|
|||||||
|
/* @flow */
|
||||||
|
|
||||||
import XHTMLEntities from "./xhtml";
|
import XHTMLEntities from "./xhtml";
|
||||||
import { TokenType, types as tt } from "../../tokenizer/types";
|
import { TokenType, types as tt } from "../../tokenizer/types";
|
||||||
import { TokContext, types as tc } from "../../tokenizer/context";
|
import { TokContext, types as tc } from "../../tokenizer/context";
|
||||||
|
|||||||
@ -1,3 +1,5 @@
|
|||||||
|
/* @flow */
|
||||||
|
|
||||||
// The algorithm used to determine whether a regexp can appear at a
|
// The algorithm used to determine whether a regexp can appear at a
|
||||||
// given point in the program is loosely based on sweet.js' approach.
|
// given point in the program is loosely based on sweet.js' approach.
|
||||||
// See https://github.com/mozilla/sweet.js/wiki/design
|
// See https://github.com/mozilla/sweet.js/wiki/design
|
||||||
@ -5,12 +7,22 @@
|
|||||||
import { types as tt } from "./types";
|
import { types as tt } from "./types";
|
||||||
|
|
||||||
export class TokContext {
|
export class TokContext {
|
||||||
constructor(token, isExpr, preserveSpace, override) {
|
constructor(
|
||||||
|
token: string,
|
||||||
|
isExpr?: boolean,
|
||||||
|
preserveSpace?: boolean,
|
||||||
|
override?: Function,
|
||||||
|
) {
|
||||||
this.token = token;
|
this.token = token;
|
||||||
this.isExpr = !!isExpr;
|
this.isExpr = !!isExpr;
|
||||||
this.preserveSpace = !!preserveSpace;
|
this.preserveSpace = !!preserveSpace;
|
||||||
this.override = override;
|
this.override = override;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
token: string;
|
||||||
|
isExpr: boolean;
|
||||||
|
preserveSpace: boolean;
|
||||||
|
override: ?Function;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const types = {
|
export const types = {
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
/* @flow */
|
||||||
|
|
||||||
|
import type { TokenType } from "./types";
|
||||||
import { isIdentifierStart, isIdentifierChar, isKeyword } from "../util/identifier";
|
import { isIdentifierStart, isIdentifierChar, isKeyword } from "../util/identifier";
|
||||||
import { types as tt, keywords as keywordTypes } from "./types";
|
import { types as tt, keywords as keywordTypes } from "./types";
|
||||||
import { types as ct } from "./context";
|
import { types as ct } from "./context";
|
||||||
@ -18,9 +21,11 @@ export class Token {
|
|||||||
this.loc = new SourceLocation(state.startLoc, state.endLoc);
|
this.loc = new SourceLocation(state.startLoc, state.endLoc);
|
||||||
}
|
}
|
||||||
|
|
||||||
get range() {
|
type: TokenType;
|
||||||
return [this.start, this.end];
|
value: any;
|
||||||
}
|
start: number;
|
||||||
|
end: number;
|
||||||
|
loc: SourceLocation;
|
||||||
}
|
}
|
||||||
|
|
||||||
// ## Tokenizer
|
// ## Tokenizer
|
||||||
@ -162,8 +167,7 @@ export default class Tokenizer {
|
|||||||
value: text,
|
value: text,
|
||||||
start: start,
|
start: start,
|
||||||
end: end,
|
end: end,
|
||||||
loc: new SourceLocation(startLoc, endLoc),
|
loc: new SourceLocation(startLoc, endLoc)
|
||||||
range: [start, end]
|
|
||||||
};
|
};
|
||||||
|
|
||||||
if (!this.isLookahead) {
|
if (!this.isLookahead) {
|
||||||
|
|||||||
@ -1,70 +1,120 @@
|
|||||||
|
/* @flow */
|
||||||
|
|
||||||
|
import type { TokContext } from "./context";
|
||||||
|
import type { Token } from "./index";
|
||||||
import { Position } from "../util/location";
|
import { Position } from "../util/location";
|
||||||
import { types as ct } from "./context";
|
import { types as ct } from "./context";
|
||||||
import { types as tt } from "./types";
|
import { types as tt } from "./types";
|
||||||
|
|
||||||
export default class State {
|
export default class State {
|
||||||
init(options, input) {
|
init(options: Object, input: string) {
|
||||||
// strict
|
|
||||||
this.strict = options.strictMode === false ? false : options.sourceType === "module";
|
this.strict = options.strictMode === false ? false : options.sourceType === "module";
|
||||||
|
|
||||||
this.input = input;
|
this.input = input;
|
||||||
|
|
||||||
// Used to signify the start of a potential arrow function
|
|
||||||
this.potentialArrowAt = -1;
|
this.potentialArrowAt = -1;
|
||||||
|
|
||||||
// Flags to track whether we are in a function, a generator.
|
|
||||||
this.inFunction = this.inGenerator = false;
|
this.inFunction = this.inGenerator = false;
|
||||||
|
|
||||||
// Labels in scope.
|
|
||||||
this.labels = [];
|
this.labels = [];
|
||||||
|
|
||||||
// Leading decorators.
|
|
||||||
this.decorators = [];
|
this.decorators = [];
|
||||||
|
|
||||||
// Token store.
|
|
||||||
this.tokens = [];
|
this.tokens = [];
|
||||||
|
|
||||||
// Comment store.
|
|
||||||
this.comments = [];
|
this.comments = [];
|
||||||
|
|
||||||
// Comment attachment store
|
|
||||||
this.trailingComments = [];
|
this.trailingComments = [];
|
||||||
this.leadingComments = [];
|
this.leadingComments = [];
|
||||||
this.commentStack = [];
|
this.commentStack = [];
|
||||||
|
|
||||||
// The current position of the tokenizer in the input.
|
|
||||||
this.pos = this.lineStart = 0;
|
this.pos = this.lineStart = 0;
|
||||||
this.curLine = 1;
|
this.curLine = 1;
|
||||||
|
|
||||||
// Properties of the current token:
|
|
||||||
// Its type
|
|
||||||
this.type = tt.eof;
|
this.type = tt.eof;
|
||||||
// For tokens that include more information than their type, the value
|
|
||||||
this.value = null;
|
this.value = null;
|
||||||
// Its start and end offset
|
|
||||||
this.start = this.end = this.pos;
|
this.start = this.end = this.pos;
|
||||||
// And, if locations are used, the {line, column} object
|
|
||||||
// corresponding to those offsets
|
|
||||||
this.startLoc = this.endLoc = this.curPosition();
|
this.startLoc = this.endLoc = this.curPosition();
|
||||||
|
|
||||||
// Position information for the previous token
|
|
||||||
this.lastTokEndLoc = this.lastTokStartLoc = null;
|
this.lastTokEndLoc = this.lastTokStartLoc = null;
|
||||||
this.lastTokStart = this.lastTokEnd = this.pos;
|
this.lastTokStart = this.lastTokEnd = this.pos;
|
||||||
|
|
||||||
// The context stack is used to superficially track syntactic
|
|
||||||
// context to predict whether a regular expression is allowed in a
|
|
||||||
// given position.
|
|
||||||
this.context = [ct.b_stat];
|
this.context = [ct.b_stat];
|
||||||
this.exprAllowed = true;
|
this.exprAllowed = true;
|
||||||
|
|
||||||
// Used to signal to callers of `readWord1` whether the word
|
|
||||||
// contained any escape sequences. This is needed because words with
|
|
||||||
// escape sequences must not be interpreted as keywords.
|
|
||||||
this.containsEsc = false;
|
this.containsEsc = false;
|
||||||
|
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO
|
||||||
|
strict: boolean;
|
||||||
|
|
||||||
|
// TODO
|
||||||
|
input: string;
|
||||||
|
|
||||||
|
// Used to signify the start of a potential arrow function
|
||||||
|
potentialArrowAt: number;
|
||||||
|
|
||||||
|
// Flags to track whether we are in a function, a generator.
|
||||||
|
inFunction: boolean;
|
||||||
|
inGenerator: boolean;
|
||||||
|
|
||||||
|
// Labels in scope.
|
||||||
|
labels: Array<Object>;
|
||||||
|
|
||||||
|
// Leading decorators.
|
||||||
|
decorators: Array<Object>;
|
||||||
|
|
||||||
|
// Token store.
|
||||||
|
tokens: Array<Object>;
|
||||||
|
|
||||||
|
// Comment store.
|
||||||
|
comments: Array<Object>;
|
||||||
|
|
||||||
|
// Comment attachment store
|
||||||
|
trailingComments: Array<Object>;
|
||||||
|
leadingComments: Array<Object>;
|
||||||
|
commentStack: Array<Object>;
|
||||||
|
|
||||||
|
// The current position of the tokenizer in the input.
|
||||||
|
pos: number;
|
||||||
|
lineStart: number;
|
||||||
|
curLine: number;
|
||||||
|
|
||||||
|
// Properties of the current token:
|
||||||
|
// Its type
|
||||||
|
type: Token;
|
||||||
|
|
||||||
|
// For tokens that include more information than their type, the value
|
||||||
|
value: any;
|
||||||
|
|
||||||
|
// Its start and end offset
|
||||||
|
start: number;
|
||||||
|
end: number;
|
||||||
|
|
||||||
|
// And, if locations are used, the {line, column} object
|
||||||
|
// corresponding to those offsets
|
||||||
|
startLoc: Position;
|
||||||
|
endLoc: Position;
|
||||||
|
|
||||||
|
// Position information for the previous token
|
||||||
|
lastTokEndLoc: ?Position;
|
||||||
|
lastTokStartLoc: ?Position;
|
||||||
|
lastTokStart: number;
|
||||||
|
lastTokEnd: number;
|
||||||
|
|
||||||
|
// The context stack is used to superficially track syntactic
|
||||||
|
// context to predict whether a regular expression is allowed in a
|
||||||
|
// given position.
|
||||||
|
context: Array<TokContext>;
|
||||||
|
exprAllowed: boolean;
|
||||||
|
|
||||||
|
// Used to signal to callers of `readWord1` whether the word
|
||||||
|
// contained any escape sequences. This is needed because words with
|
||||||
|
// escape sequences must not be interpreted as keywords.
|
||||||
|
containsEsc: boolean;
|
||||||
|
|
||||||
curPosition() {
|
curPosition() {
|
||||||
return new Position(this.curLine, this.pos - this.lineStart);
|
return new Position(this.curLine, this.pos - this.lineStart);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,3 +1,5 @@
|
|||||||
|
/* @flow */
|
||||||
|
|
||||||
// ## Token types
|
// ## Token types
|
||||||
|
|
||||||
// The assignment of fine-grained, information-carrying type objects
|
// The assignment of fine-grained, information-carrying type objects
|
||||||
|
|||||||
@ -1,3 +1,5 @@
|
|||||||
|
/* @flow */
|
||||||
|
|
||||||
// This is a trick taken from Esprima. It turns out that, on
|
// This is a trick taken from Esprima. It turns out that, on
|
||||||
// non-Chrome browsers, to check whether a string is in a set, a
|
// non-Chrome browsers, to check whether a string is in a set, a
|
||||||
// predicate containing a big ugly `switch` statement is faster than
|
// predicate containing a big ugly `switch` statement is faster than
|
||||||
|
|||||||
@ -1,3 +1,5 @@
|
|||||||
|
/* @flow */
|
||||||
|
|
||||||
import { lineBreakG } from "./whitespace";
|
import { lineBreakG } from "./whitespace";
|
||||||
|
|
||||||
// These are used when `options.locations` is on, for the
|
// These are used when `options.locations` is on, for the
|
||||||
|
|||||||
@ -1,3 +1,5 @@
|
|||||||
|
/* @flow */
|
||||||
|
|
||||||
// Matches a whole line break (where CRLF is considered a single
|
// Matches a whole line break (where CRLF is considered a single
|
||||||
// line break). Used to count lines.
|
// line break). Used to count lines.
|
||||||
|
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user