diff --git a/packages/babel-parser/src/parser/node.js b/packages/babel-parser/src/parser/node.js index 5ed447018d..ea04076bf3 100644 --- a/packages/babel-parser/src/parser/node.js +++ b/packages/babel-parser/src/parser/node.js @@ -29,8 +29,10 @@ class Node implements NodeBase { __clone(): this { // $FlowIgnore - const node2: any = new Node(); - Object.keys(this).forEach(key => { + const newNode: any = new Node(); + const keys = Object.keys(this); + for (let i = 0, length = keys.length; i < length; i++) { + const key = keys[i]; // Do not clone comments that are already attached to the node if ( key !== "leadingComments" && @@ -38,11 +40,11 @@ class Node implements NodeBase { key !== "innerComments" ) { // $FlowIgnore - node2[key] = this[key]; + newNode[key] = this[key]; } - }); + } - return node2; + return newNode; } } diff --git a/packages/babel-parser/src/tokenizer/state.js b/packages/babel-parser/src/tokenizer/state.js index 794bc32014..9bc2b81c02 100644 --- a/packages/babel-parser/src/tokenizer/state.js +++ b/packages/babel-parser/src/tokenizer/state.js @@ -178,7 +178,9 @@ export default class State { clone(skipArrays?: boolean): State { const state = new State(); - Object.keys(this).forEach(key => { + const keys = Object.keys(this); + for (let i = 0, length = keys.length; i < length; i++) { + const key = keys[i]; // $FlowIgnore let val = this[key]; @@ -188,7 +190,8 @@ export default class State { // $FlowIgnore state[key] = val; - }); + } + return state; } }