add some more flow types
This commit is contained in:
parent
effaf820c3
commit
c2d7e95e1a
@ -1,4 +1,5 @@
|
|||||||
[ignore]
|
[ignore]
|
||||||
|
.*/packages/.*/lib
|
||||||
|
|
||||||
[include]
|
[include]
|
||||||
|
|
||||||
@ -10,6 +11,7 @@ lib/types.js
|
|||||||
[options]
|
[options]
|
||||||
strip_root=true
|
strip_root=true
|
||||||
suppress_comment= \\(.\\|\n\\)*\\$FlowFixMe
|
suppress_comment= \\(.\\|\n\\)*\\$FlowFixMe
|
||||||
|
suppress_comment= \\(.\\|\n\\)*\\$FlowIssue
|
||||||
|
|
||||||
[version]
|
[version]
|
||||||
0.20.1
|
0.21.0
|
||||||
|
|||||||
@ -14,7 +14,7 @@
|
|||||||
"derequire": "^2.0.2",
|
"derequire": "^2.0.2",
|
||||||
"es5-shim": "^4.1.7",
|
"es5-shim": "^4.1.7",
|
||||||
"eslint": "1.8.0",
|
"eslint": "1.8.0",
|
||||||
"flow-bin": "^0.20.0",
|
"flow-bin": "^0.21.0",
|
||||||
"fs-readdir-recursive": "^0.1.2",
|
"fs-readdir-recursive": "^0.1.2",
|
||||||
"gulp": "^3.9.0",
|
"gulp": "^3.9.0",
|
||||||
"gulp-babel": "^5.3.0",
|
"gulp-babel": "^5.3.0",
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
/* @noflow */
|
/* @flow */
|
||||||
|
|
||||||
import * as t from "babel-types";
|
import * as t from "babel-types";
|
||||||
|
|
||||||
|
|||||||
@ -537,7 +537,7 @@ export default class File extends Store {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
makeResult({ code, map, ast, ignored } /*: BabelFileResult */): BabelFileResult {
|
makeResult({ code, map, ast, ignored }: BabelFileResult): BabelFileResult {
|
||||||
let result = {
|
let result = {
|
||||||
metadata: null,
|
metadata: null,
|
||||||
options: this.opts,
|
options: this.opts,
|
||||||
|
|||||||
@ -1,17 +1,19 @@
|
|||||||
/* @noflow */
|
/* @flow */
|
||||||
|
/* global BabelFileResult */
|
||||||
|
/* global BabelFileMetadata */
|
||||||
|
|
||||||
import normalizeAst from "../helpers/normalize-ast";
|
import normalizeAst from "../helpers/normalize-ast";
|
||||||
import Plugin from "./plugin";
|
import Plugin from "./plugin";
|
||||||
import File from "./file";
|
import File from "./file";
|
||||||
|
|
||||||
export default class Pipeline {
|
export default class Pipeline {
|
||||||
lint(code: string, opts?: Object = {}) {
|
lint(code: string, opts?: Object = {}): BabelFileResult {
|
||||||
opts.code = false;
|
opts.code = false;
|
||||||
opts.mode = "lint";
|
opts.mode = "lint";
|
||||||
return this.transform(code, opts);
|
return this.transform(code, opts);
|
||||||
}
|
}
|
||||||
|
|
||||||
pretransform(code: string, opts?: Object) {
|
pretransform(code: string, opts?: Object): BabelFileResult {
|
||||||
let file = new File(opts, this);
|
let file = new File(opts, this);
|
||||||
return file.wrap(code, function () {
|
return file.wrap(code, function () {
|
||||||
file.addCode(code);
|
file.addCode(code);
|
||||||
@ -20,7 +22,7 @@ export default class Pipeline {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
transform(code: string, opts?: Object) {
|
transform(code: string, opts?: Object): BabelFileResult {
|
||||||
let file = new File(opts, this);
|
let file = new File(opts, this);
|
||||||
return file.wrap(code, function () {
|
return file.wrap(code, function () {
|
||||||
file.addCode(code);
|
file.addCode(code);
|
||||||
@ -29,7 +31,7 @@ export default class Pipeline {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
analyse(code: string, opts: Object = {}, visitor?) {
|
analyse(code: string, opts: Object = {}, visitor?: Object): ?BabelFileMetadata {
|
||||||
opts.code = false;
|
opts.code = false;
|
||||||
if (visitor) {
|
if (visitor) {
|
||||||
opts.plugins = opts.plugins || [];
|
opts.plugins = opts.plugins || [];
|
||||||
@ -38,7 +40,7 @@ export default class Pipeline {
|
|||||||
return this.transform(code, opts).metadata;
|
return this.transform(code, opts).metadata;
|
||||||
}
|
}
|
||||||
|
|
||||||
transformFromAst(ast, code: string, opts: Object) {
|
transformFromAst(ast: Object, code: string, opts: Object): BabelFileResult {
|
||||||
ast = normalizeAst(ast);
|
ast = normalizeAst(ast);
|
||||||
|
|
||||||
let file = new File(opts, this);
|
let file = new File(opts, this);
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
/* @noflow */
|
/* @flow */
|
||||||
|
|
||||||
import escapeRegExp from "lodash/string/escapeRegExp";
|
import escapeRegExp from "lodash/string/escapeRegExp";
|
||||||
import startsWith from "lodash/string/startsWith";
|
import startsWith from "lodash/string/startsWith";
|
||||||
@ -16,7 +16,7 @@ export { inherits, inspect } from "util";
|
|||||||
* Test if a filename ends with a compilable extension.
|
* Test if a filename ends with a compilable extension.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
export function canCompile(filename: string, altExts?: Array<string>) {
|
export function canCompile(filename: string, altExts?: Array<string>): boolean {
|
||||||
let exts = altExts || canCompile.EXTENSIONS;
|
let exts = altExts || canCompile.EXTENSIONS;
|
||||||
let ext = path.extname(filename);
|
let ext = path.extname(filename);
|
||||||
return contains(exts, ext);
|
return contains(exts, ext);
|
||||||
|
|||||||
@ -1,3 +1,5 @@
|
|||||||
|
/* @flow */
|
||||||
|
|
||||||
import type Position from "./position";
|
import type Position from "./position";
|
||||||
import repeating from "repeating";
|
import repeating from "repeating";
|
||||||
import trimRight from "trim-right";
|
import trimRight from "trim-right";
|
||||||
@ -21,17 +23,19 @@ export default class Buffer {
|
|||||||
this.last = "";
|
this.last = "";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
printedCommentStarts: Object;
|
||||||
parenPushNewlineState: ?Object;
|
parenPushNewlineState: ?Object;
|
||||||
buf: string;
|
|
||||||
position: Position;
|
position: Position;
|
||||||
_indent: number;
|
_indent: number;
|
||||||
format: Object;
|
format: Object;
|
||||||
|
buf: string;
|
||||||
|
last: string;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Description
|
* Description
|
||||||
*/
|
*/
|
||||||
|
|
||||||
catchUp(node) {
|
catchUp(node: Object) {
|
||||||
// catch up to this nodes newline if we're behind
|
// catch up to this nodes newline if we're behind
|
||||||
if (node.loc && this.format.retainLines && this.buf) {
|
if (node.loc && this.format.retainLines && this.buf) {
|
||||||
while (this.position.line < node.loc.start.line) {
|
while (this.position.line < node.loc.start.line) {
|
||||||
@ -44,7 +48,7 @@ export default class Buffer {
|
|||||||
* Get the current trimmed buffer.
|
* Get the current trimmed buffer.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
get() {
|
get(): string {
|
||||||
return trimRight(this.buf);
|
return trimRight(this.buf);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -52,7 +56,7 @@ export default class Buffer {
|
|||||||
* Get the current indent.
|
* Get the current indent.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
getIndent() {
|
getIndent(): string {
|
||||||
if (this.format.compact || this.format.concise) {
|
if (this.format.compact || this.format.concise) {
|
||||||
return "";
|
return "";
|
||||||
} else {
|
} else {
|
||||||
@ -64,7 +68,7 @@ export default class Buffer {
|
|||||||
* Get the current indent size.
|
* Get the current indent size.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
indentSize() {
|
indentSize(): number {
|
||||||
return this.getIndent().length;
|
return this.getIndent().length;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -222,7 +226,7 @@ export default class Buffer {
|
|||||||
|
|
||||||
_removeSpacesAfterLastNewline() {
|
_removeSpacesAfterLastNewline() {
|
||||||
let lastNewlineIndex = this.buf.lastIndexOf("\n");
|
let lastNewlineIndex = this.buf.lastIndexOf("\n");
|
||||||
if (lastNewlineIndex >= 0 && this.buf.trimRight().length <= lastNewlineIndex) {
|
if (lastNewlineIndex >= 0 && this.get().length <= lastNewlineIndex) {
|
||||||
this.buf = this.buf.substring(0, lastNewlineIndex + 1);
|
this.buf = this.buf.substring(0, lastNewlineIndex + 1);
|
||||||
this.last = "\n";
|
this.last = "\n";
|
||||||
}
|
}
|
||||||
@ -251,7 +255,7 @@ export default class Buffer {
|
|||||||
* Push a string to the buffer.
|
* Push a string to the buffer.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
_push(str) {
|
_push(str: string): void {
|
||||||
// see startTerminatorless() instance method
|
// see startTerminatorless() instance method
|
||||||
let parenPushNewlineState = this.parenPushNewlineState;
|
let parenPushNewlineState = this.parenPushNewlineState;
|
||||||
if (parenPushNewlineState) {
|
if (parenPushNewlineState) {
|
||||||
@ -296,12 +300,12 @@ export default class Buffer {
|
|||||||
* Test if a character is last in the buffer.
|
* Test if a character is last in the buffer.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
isLast(cha: string) {
|
isLast(cha: string): boolean {
|
||||||
if (this.format.compact) return false;
|
if (this.format.compact) return false;
|
||||||
return this._isLast(cha);
|
return this._isLast(cha);
|
||||||
}
|
}
|
||||||
|
|
||||||
_isLast(cha: string) {
|
_isLast(cha: string): boolean {
|
||||||
let last = this.last;
|
let last = this.last;
|
||||||
|
|
||||||
if (Array.isArray(cha)) {
|
if (Array.isArray(cha)) {
|
||||||
|
|||||||
@ -1,3 +1,5 @@
|
|||||||
|
/* @flow */
|
||||||
|
|
||||||
export function JSXAttribute(node: Object) {
|
export function JSXAttribute(node: Object) {
|
||||||
this.print(node.name, node);
|
this.print(node.name, node);
|
||||||
if (node.value) {
|
if (node.value) {
|
||||||
|
|||||||
@ -1,3 +1,5 @@
|
|||||||
|
/* @flow */
|
||||||
|
|
||||||
import repeating from "repeating";
|
import repeating from "repeating";
|
||||||
import * as t from "babel-types";
|
import * as t from "babel-types";
|
||||||
|
|
||||||
|
|||||||
@ -1,8 +1,15 @@
|
|||||||
|
/* @noflow */
|
||||||
|
|
||||||
import isBoolean from "lodash/lang/isBoolean";
|
import isBoolean from "lodash/lang/isBoolean";
|
||||||
import each from "lodash/collection/each";
|
import each from "lodash/collection/each";
|
||||||
import map from "lodash/collection/map";
|
import map from "lodash/collection/map";
|
||||||
import * as t from "babel-types";
|
import * as t from "babel-types";
|
||||||
|
|
||||||
|
type WhitespaceObject = {
|
||||||
|
before?: boolean,
|
||||||
|
after?: boolean
|
||||||
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Crawl a node to test if it contains a CallExpression, a Function, or a Helper.
|
* Crawl a node to test if it contains a CallExpression, a Function, or a Helper.
|
||||||
*
|
*
|
||||||
@ -63,7 +70,7 @@ exports.nodes = {
|
|||||||
* Test if AssignmentExpression needs whitespace.
|
* Test if AssignmentExpression needs whitespace.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
AssignmentExpression(node) {
|
AssignmentExpression(node: Object): ?WhitespaceObject {
|
||||||
let state = crawl(node.right);
|
let state = crawl(node.right);
|
||||||
if ((state.hasCall && state.hasHelper) || state.hasFunction) {
|
if ((state.hasCall && state.hasHelper) || state.hasFunction) {
|
||||||
return {
|
return {
|
||||||
@ -77,7 +84,7 @@ exports.nodes = {
|
|||||||
* Test if SwitchCase needs whitespace.
|
* Test if SwitchCase needs whitespace.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
SwitchCase(node, parent) {
|
SwitchCase(node: Object, parent: Object): ?WhitespaceObject {
|
||||||
return {
|
return {
|
||||||
before: node.consequent.length || parent.cases[0] === node
|
before: node.consequent.length || parent.cases[0] === node
|
||||||
};
|
};
|
||||||
@ -87,7 +94,7 @@ exports.nodes = {
|
|||||||
* Test if LogicalExpression needs whitespace.
|
* Test if LogicalExpression needs whitespace.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
LogicalExpression(node) {
|
LogicalExpression(node: Object): ?WhitespaceObject {
|
||||||
if (t.isFunction(node.left) || t.isFunction(node.right)) {
|
if (t.isFunction(node.left) || t.isFunction(node.right)) {
|
||||||
return {
|
return {
|
||||||
after: true
|
after: true
|
||||||
@ -99,7 +106,7 @@ exports.nodes = {
|
|||||||
* Test if Literal needs whitespace.
|
* Test if Literal needs whitespace.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
Literal(node) {
|
Literal(node: Object): ?WhitespaceObject {
|
||||||
if (node.value === "use strict") {
|
if (node.value === "use strict") {
|
||||||
return {
|
return {
|
||||||
after: true
|
after: true
|
||||||
@ -111,7 +118,7 @@ exports.nodes = {
|
|||||||
* Test if CallExpression needs whitespace.
|
* Test if CallExpression needs whitespace.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
CallExpression(node) {
|
CallExpression(node: Object): ?WhitespaceObject {
|
||||||
if (t.isFunction(node.callee) || isHelper(node)) {
|
if (t.isFunction(node.callee) || isHelper(node)) {
|
||||||
return {
|
return {
|
||||||
before: true,
|
before: true,
|
||||||
@ -124,7 +131,7 @@ exports.nodes = {
|
|||||||
* Test if VariableDeclaration needs whitespace.
|
* Test if VariableDeclaration needs whitespace.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
VariableDeclaration(node) {
|
VariableDeclaration(node: Object): ?WhitespaceObject {
|
||||||
for (let i = 0; i < node.declarations.length; i++) {
|
for (let i = 0; i < node.declarations.length; i++) {
|
||||||
let declar = node.declarations[i];
|
let declar = node.declarations[i];
|
||||||
|
|
||||||
@ -147,7 +154,7 @@ exports.nodes = {
|
|||||||
* Test if IfStatement needs whitespace.
|
* Test if IfStatement needs whitespace.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
IfStatement(node) {
|
IfStatement(node: Object): ?WhitespaceObject {
|
||||||
if (t.isBlockStatement(node.consequent)) {
|
if (t.isBlockStatement(node.consequent)) {
|
||||||
return {
|
return {
|
||||||
before: true,
|
before: true,
|
||||||
@ -163,7 +170,7 @@ exports.nodes = {
|
|||||||
|
|
||||||
exports.nodes.ObjectProperty =
|
exports.nodes.ObjectProperty =
|
||||||
exports.nodes.ObjectMethod =
|
exports.nodes.ObjectMethod =
|
||||||
exports.nodes.SpreadProperty = function (node, parent) {
|
exports.nodes.SpreadProperty = function (node: Object, parent): ?WhitespaceObject {
|
||||||
if (parent.properties[0] === node) {
|
if (parent.properties[0] === node) {
|
||||||
return {
|
return {
|
||||||
before: true
|
before: true
|
||||||
@ -181,7 +188,7 @@ exports.list = {
|
|||||||
* Return VariableDeclaration declarations init properties.
|
* Return VariableDeclaration declarations init properties.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
VariableDeclaration(node) {
|
VariableDeclaration(node: Object): Array<Object> {
|
||||||
return map(node.declarations, "init");
|
return map(node.declarations, "init");
|
||||||
},
|
},
|
||||||
|
|
||||||
@ -189,7 +196,7 @@ exports.list = {
|
|||||||
* Return VariableDeclaration elements.
|
* Return VariableDeclaration elements.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
ArrayExpression(node) {
|
ArrayExpression(node: Object): Array<Object> {
|
||||||
return node.elements;
|
return node.elements;
|
||||||
},
|
},
|
||||||
|
|
||||||
@ -197,7 +204,7 @@ exports.list = {
|
|||||||
* Return VariableDeclaration properties.
|
* Return VariableDeclaration properties.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
ObjectExpression(node) {
|
ObjectExpression(node: Object): Array<Object> {
|
||||||
return node.properties;
|
return node.properties;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
@ -28,14 +28,14 @@ export let ReferencedMemberExpression = {
|
|||||||
|
|
||||||
export let BindingIdentifier = {
|
export let BindingIdentifier = {
|
||||||
types: ["Identifier"],
|
types: ["Identifier"],
|
||||||
checkPath({ node, parent } /*: NodePath */): boolean {
|
checkPath({ node, parent }: NodePath): boolean {
|
||||||
return t.isIdentifier(node) && t.isBinding(node, parent);
|
return t.isIdentifier(node) && t.isBinding(node, parent);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export let Statement = {
|
export let Statement = {
|
||||||
types: ["Statement"],
|
types: ["Statement"],
|
||||||
checkPath({ node, parent } /*: NodePath */): boolean {
|
checkPath({ node, parent }: NodePath): boolean {
|
||||||
if (t.isStatement(node)) {
|
if (t.isStatement(node)) {
|
||||||
if (t.isVariableDeclaration(node)) {
|
if (t.isVariableDeclaration(node)) {
|
||||||
if (t.isForXStatement(parent, { left: node })) return false;
|
if (t.isForXStatement(parent, { left: node })) return false;
|
||||||
@ -106,7 +106,7 @@ export let Pure = {
|
|||||||
|
|
||||||
export let Flow = {
|
export let Flow = {
|
||||||
types: ["Flow", "ImportDeclaration", "ExportDeclaration"],
|
types: ["Flow", "ImportDeclaration", "ExportDeclaration"],
|
||||||
checkPath({ node } /*: NodePath */): boolean {
|
checkPath({ node }: NodePath): boolean {
|
||||||
if (t.isFlow(node)) {
|
if (t.isFlow(node)) {
|
||||||
return true;
|
return true;
|
||||||
} else if (t.isImportDeclaration(node)) {
|
} else if (t.isImportDeclaration(node)) {
|
||||||
|
|||||||
@ -1,3 +1,5 @@
|
|||||||
|
/* @noflow */
|
||||||
|
|
||||||
import isPlainObject from "lodash/lang/isPlainObject";
|
import isPlainObject from "lodash/lang/isPlainObject";
|
||||||
import isNumber from "lodash/lang/isNumber";
|
import isNumber from "lodash/lang/isNumber";
|
||||||
import isRegExp from "lodash/lang/isRegExp";
|
import isRegExp from "lodash/lang/isRegExp";
|
||||||
@ -218,7 +220,7 @@ export function toExpression(node: Object): Object {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export function toBlock(node, parent: Object): Object {
|
export function toBlock(node: Object, parent: Object): Object {
|
||||||
if (t.isBlockStatement(node)) {
|
if (t.isBlockStatement(node)) {
|
||||||
return node;
|
return node;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,3 +1,5 @@
|
|||||||
|
/* @flow */
|
||||||
|
|
||||||
import * as t from "./index";
|
import * as t from "./index";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -19,7 +21,7 @@ export function createUnionTypeAnnotation(types: Array<Object>) {
|
|||||||
* Dedupe type annotations.
|
* Dedupe type annotations.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
export function removeTypeDuplicates(nodes: Array<Object>) {
|
export function removeTypeDuplicates(nodes: Array<Object>): Array<Object> {
|
||||||
let generics = {};
|
let generics = {};
|
||||||
let bases = {};
|
let bases = {};
|
||||||
|
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
/* @noflow */
|
/* @flow */
|
||||||
|
|
||||||
import toFastProperties from "to-fast-properties";
|
import toFastProperties from "to-fast-properties";
|
||||||
import compact from "lodash/array/compact";
|
import compact from "lodash/array/compact";
|
||||||
@ -406,6 +406,7 @@ export function inherits(child: Object, parent: Object): Object {
|
|||||||
|
|
||||||
export function assertNode(node?) {
|
export function assertNode(node?) {
|
||||||
if (!isNode(node)) {
|
if (!isNode(node)) {
|
||||||
|
// $FlowFixMe
|
||||||
throw new TypeError("Not a valid node " + (node && node.type));
|
throw new TypeError("Not a valid node " + (node && node.type));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,3 +1,5 @@
|
|||||||
|
/* @flow */
|
||||||
|
|
||||||
import { getBindingIdentifiers } from "./retrievers";
|
import { getBindingIdentifiers } from "./retrievers";
|
||||||
import esutils from "esutils";
|
import esutils from "esutils";
|
||||||
import * as t from "./index";
|
import * as t from "./index";
|
||||||
@ -67,7 +69,7 @@ export function isReferenced(node: Object, parent: Object): boolean {
|
|||||||
case "ArrowFunctionExpression":
|
case "ArrowFunctionExpression":
|
||||||
case "FunctionDeclaration":
|
case "FunctionDeclaration":
|
||||||
case "FunctionExpression":
|
case "FunctionExpression":
|
||||||
for (let param of (parent.params: Array)) {
|
for (let param of (parent.params: Array<any>)) {
|
||||||
if (param === node) return false;
|
if (param === node) return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -118,7 +118,7 @@ pp.processComment = function (node) {
|
|||||||
// result in an empty array, and if so, the array must be
|
// result in an empty array, and if so, the array must be
|
||||||
// deleted.
|
// deleted.
|
||||||
node.leadingComments = this.state.leadingComments.slice(0, i);
|
node.leadingComments = this.state.leadingComments.slice(0, i);
|
||||||
if (node.leadingComments.length === 0) {
|
if ((node.leadingComments: Array<any>).length === 0) {
|
||||||
node.leadingComments = null;
|
node.leadingComments = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
/* @noflow */
|
/* @flow */
|
||||||
|
|
||||||
import { reservedWords } from "../util/identifier";
|
import { reservedWords } from "../util/identifier";
|
||||||
import { getOptions } from "../options";
|
import { getOptions } from "../options";
|
||||||
@ -7,7 +7,7 @@ import Tokenizer from "../tokenizer";
|
|||||||
export const plugins = {};
|
export const plugins = {};
|
||||||
|
|
||||||
export default class Parser extends Tokenizer {
|
export default class Parser extends Tokenizer {
|
||||||
constructor(options, input: string) {
|
constructor(options: Object, input: string) {
|
||||||
options = getOptions(options);
|
options = getOptions(options);
|
||||||
super(options, input);
|
super(options, input);
|
||||||
|
|
||||||
@ -31,7 +31,7 @@ export default class Parser extends Tokenizer {
|
|||||||
this[name] = f(this[name]);
|
this[name] = f(this[name]);
|
||||||
}
|
}
|
||||||
|
|
||||||
loadPlugins(plugins: Array<string>) {
|
loadPlugins(plugins: Array<string>): Object {
|
||||||
let pluginMap = {};
|
let pluginMap = {};
|
||||||
|
|
||||||
if (plugins.indexOf("flow") >= 0) {
|
if (plugins.indexOf("flow") >= 0) {
|
||||||
|
|||||||
@ -26,7 +26,9 @@ export class TokContext {
|
|||||||
override: ?Function;
|
override: ?Function;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const types = {
|
export const types: {
|
||||||
|
[key: string]: TokContext;
|
||||||
|
} = {
|
||||||
b_stat: new TokContext("{", false),
|
b_stat: new TokContext("{", false),
|
||||||
b_expr: new TokContext("{", true),
|
b_expr: new TokContext("{", true),
|
||||||
b_tmpl: new TokContext("${", true),
|
b_tmpl: new TokContext("${", true),
|
||||||
@ -57,7 +59,7 @@ tt.parenR.updateContext = tt.braceR.updateContext = function () {
|
|||||||
|
|
||||||
tt.name.updateContext = function (prevType) {
|
tt.name.updateContext = function (prevType) {
|
||||||
this.state.exprAllowed = false;
|
this.state.exprAllowed = false;
|
||||||
|
|
||||||
if (prevType === tt._let || prevType === tt._const || prevType === tt._var) {
|
if (prevType === tt._let || prevType === tt._const || prevType === tt._var) {
|
||||||
if (lineBreak.test(this.input.slice(this.state.end))) {
|
if (lineBreak.test(this.input.slice(this.state.end))) {
|
||||||
this.state.exprAllowed = true;
|
this.state.exprAllowed = true;
|
||||||
|
|||||||
@ -1,3 +1,5 @@
|
|||||||
|
/* @noflow */
|
||||||
|
|
||||||
import type { TokenType } from "./types";
|
import type { TokenType } from "./types";
|
||||||
import { isIdentifierStart, isIdentifierChar, isKeyword } from "../util/identifier";
|
import { isIdentifierStart, isIdentifierChar, isKeyword } from "../util/identifier";
|
||||||
import { types as tt, keywords as keywordTypes } from "./types";
|
import { types as tt, keywords as keywordTypes } from "./types";
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user