From 5f0ece0bdbe67fe2c4aa7ea1765547d0a7feb48b Mon Sep 17 00:00:00 2001 From: Henry Zhu Date: Fri, 18 Dec 2015 01:19:06 -0500 Subject: [PATCH] add some more flow types --- src/parser/comments.js | 2 +- src/parser/index.js | 6 +++--- src/tokenizer/context.js | 6 ++++-- src/tokenizer/index.js | 2 ++ 4 files changed, 10 insertions(+), 6 deletions(-) diff --git a/src/parser/comments.js b/src/parser/comments.js index 344ba7e2c0..38f2081322 100644 --- a/src/parser/comments.js +++ b/src/parser/comments.js @@ -118,7 +118,7 @@ pp.processComment = function (node) { // result in an empty array, and if so, the array must be // deleted. node.leadingComments = this.state.leadingComments.slice(0, i); - if (node.leadingComments.length === 0) { + if ((node.leadingComments: Array).length === 0) { node.leadingComments = null; } diff --git a/src/parser/index.js b/src/parser/index.js index 1440ea1a41..14a3ff4b72 100644 --- a/src/parser/index.js +++ b/src/parser/index.js @@ -1,4 +1,4 @@ -/* @noflow */ +/* @flow */ import { reservedWords } from "../util/identifier"; import { getOptions } from "../options"; @@ -7,7 +7,7 @@ import Tokenizer from "../tokenizer"; export const plugins = {}; export default class Parser extends Tokenizer { - constructor(options, input: string) { + constructor(options: Object, input: string) { options = getOptions(options); super(options, input); @@ -31,7 +31,7 @@ export default class Parser extends Tokenizer { this[name] = f(this[name]); } - loadPlugins(plugins: Array) { + loadPlugins(plugins: Array): Object { let pluginMap = {}; if (plugins.indexOf("flow") >= 0) { diff --git a/src/tokenizer/context.js b/src/tokenizer/context.js index a8e592da6e..fb1bcc57f6 100644 --- a/src/tokenizer/context.js +++ b/src/tokenizer/context.js @@ -26,7 +26,9 @@ export class TokContext { override: ?Function; } -export const types = { +export const types: { + [key: string]: TokContext; +} = { b_stat: new TokContext("{", false), b_expr: new TokContext("{", true), b_tmpl: new TokContext("${", true), @@ -57,7 +59,7 @@ tt.parenR.updateContext = tt.braceR.updateContext = function () { tt.name.updateContext = function (prevType) { this.state.exprAllowed = false; - + if (prevType === tt._let || prevType === tt._const || prevType === tt._var) { if (lineBreak.test(this.input.slice(this.state.end))) { this.state.exprAllowed = true; diff --git a/src/tokenizer/index.js b/src/tokenizer/index.js index 8e610f77dd..7203e30aac 100644 --- a/src/tokenizer/index.js +++ b/src/tokenizer/index.js @@ -1,3 +1,5 @@ +/* @noflow */ + import type { TokenType } from "./types"; import { isIdentifierStart, isIdentifierChar, isKeyword } from "../util/identifier"; import { types as tt, keywords as keywordTypes } from "./types";