Prepare @babel/core for asynchronicity (#10507)
* Prepare @babel/core for asynchronicity * Include regenerator-runtime in standalone build * Fix rollup build
This commit is contained in:
parent
9fec528016
commit
282f81bd67
@ -183,6 +183,7 @@ function buildRollup(packages) {
|
||||
rollupCommonJs({
|
||||
include: [
|
||||
/node_modules/,
|
||||
"packages/babel-runtime/regenerator/**",
|
||||
"packages/babel-preset-env/data/*.js",
|
||||
// Rollup doesn't read export maps, so it loads the cjs fallback
|
||||
"packages/babel-compat-data/*.js",
|
||||
|
||||
@ -14,6 +14,10 @@ module.exports = function(api) {
|
||||
|
||||
let convertESM = true;
|
||||
let ignoreLib = true;
|
||||
let includeRegeneratorRuntime = false;
|
||||
|
||||
let transformRuntimeOptions;
|
||||
|
||||
const nodeVersion = "6.9";
|
||||
// The vast majority of our src files are modules, but we use
|
||||
// unambiguous to keep things simple until we get around to renaming
|
||||
@ -29,19 +33,19 @@ module.exports = function(api) {
|
||||
|
||||
switch (env) {
|
||||
// Configs used during bundling builds.
|
||||
case "rollup":
|
||||
envOpts.targets = {
|
||||
node: nodeVersion,
|
||||
};
|
||||
case "standalone":
|
||||
includeRegeneratorRuntime = true;
|
||||
unambiguousSources.push("packages/babel-runtime/regenerator");
|
||||
case "rollup":
|
||||
convertESM = false;
|
||||
ignoreLib = false;
|
||||
// rollup-commonjs will converts node_modules to ESM
|
||||
unambiguousSources.push(
|
||||
"**/node_modules",
|
||||
"packages/babel-preset-env/data",
|
||||
"packages/babel-compat-data"
|
||||
);
|
||||
// targets to browserslists: defaults
|
||||
if (env === "rollup") envOpts.targets = { node: nodeVersion };
|
||||
break;
|
||||
case "production":
|
||||
// Config during builds before publish.
|
||||
@ -62,6 +66,16 @@ module.exports = function(api) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (includeRegeneratorRuntime) {
|
||||
const babelRuntimePkgPath = require.resolve("@babel/runtime/package.json");
|
||||
|
||||
transformRuntimeOptions = {
|
||||
helpers: false, // Helpers are handled by rollup when needed
|
||||
regenerator: true,
|
||||
version: require(babelRuntimePkgPath).version,
|
||||
};
|
||||
}
|
||||
|
||||
const config = {
|
||||
// Our dependencies are all standard CommonJS, along with all sorts of
|
||||
// other random files in Babel's codebase, so we use script as the default,
|
||||
@ -119,6 +133,10 @@ module.exports = function(api) {
|
||||
test: unambiguousSources,
|
||||
sourceType: "unambiguous",
|
||||
},
|
||||
includeRegeneratorRuntime && {
|
||||
exclude: /regenerator-runtime/,
|
||||
plugins: [["@babel/transform-runtime", transformRuntimeOptions]],
|
||||
},
|
||||
].filter(Boolean),
|
||||
};
|
||||
|
||||
|
||||
@ -4,6 +4,7 @@
|
||||
|
||||
declare module "resolve" {
|
||||
declare export default {
|
||||
(string, {| basedir: string |}, (err: ?Error, res: string) => void): void;
|
||||
sync: (string, {| basedir: string |}) => string;
|
||||
};
|
||||
}
|
||||
|
||||
@ -49,6 +49,7 @@
|
||||
"@babel/types": "^7.7.4",
|
||||
"convert-source-map": "^1.7.0",
|
||||
"debug": "^4.1.0",
|
||||
"gensync": "^1.0.0-beta.1",
|
||||
"json5": "^2.1.0",
|
||||
"lodash": "^4.17.13",
|
||||
"resolve": "^1.3.2",
|
||||
|
||||
@ -1,5 +1,17 @@
|
||||
// @flow
|
||||
|
||||
import gensync, { type Handler } from "gensync";
|
||||
import {
|
||||
maybeAsync,
|
||||
isAsync,
|
||||
onFirstPause,
|
||||
waitFor,
|
||||
isThenable,
|
||||
} from "../gensync-utils/async";
|
||||
import { isIterableIterator } from "./util";
|
||||
|
||||
export type { CacheConfigurator };
|
||||
|
||||
export type SimpleCacheConfigurator = SimpleCacheConfiguratorFn &
|
||||
SimpleCacheConfiguratorObj;
|
||||
|
||||
@ -14,91 +26,223 @@ type SimpleCacheConfiguratorObj = {
|
||||
invalidate: <T>(handler: () => T) => T,
|
||||
};
|
||||
|
||||
type CacheEntry<ResultT, SideChannel> = Array<{
|
||||
export type CacheEntry<ResultT, SideChannel> = Array<{
|
||||
value: ResultT,
|
||||
valid: SideChannel => boolean,
|
||||
valid: SideChannel => Handler<boolean>,
|
||||
}>;
|
||||
|
||||
export type { CacheConfigurator };
|
||||
const synchronize = <ArgsT, ResultT>(
|
||||
gen: (...ArgsT) => Handler<ResultT>,
|
||||
// $FlowIssue https://github.com/facebook/flow/issues/7279
|
||||
): ((...args: ArgsT) => ResultT) => {
|
||||
return gensync(gen).sync;
|
||||
};
|
||||
|
||||
/**
|
||||
* Given a function with a single argument, cache its results based on its argument and how it
|
||||
* configures its caching behavior. Cached values are stored strongly.
|
||||
*/
|
||||
export function makeStrongCache<ArgT, ResultT, SideChannel>(
|
||||
handler: (ArgT, CacheConfigurator<SideChannel>) => ResultT,
|
||||
): (ArgT, SideChannel) => ResultT {
|
||||
return makeCachedFunction(new Map(), handler);
|
||||
// eslint-disable-next-line require-yield, no-unused-vars
|
||||
function* genTrue(data: any) {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a function with a single argument, cache its results based on its argument and how it
|
||||
* configures its caching behavior. Cached values are stored weakly and the function argument must be
|
||||
* an object type.
|
||||
*/
|
||||
export function makeWeakCache<
|
||||
ArgT: {} | Array<*> | $ReadOnlyArray<*>,
|
||||
ResultT,
|
||||
SideChannel,
|
||||
>(
|
||||
export function makeWeakCache<ArgT, ResultT, SideChannel>(
|
||||
handler: (ArgT, CacheConfigurator<SideChannel>) => Handler<ResultT> | ResultT,
|
||||
): (ArgT, SideChannel) => Handler<ResultT> {
|
||||
return makeCachedFunction<ArgT, ResultT, SideChannel, *>(WeakMap, handler);
|
||||
}
|
||||
|
||||
export function makeWeakCacheSync<ArgT, ResultT, SideChannel>(
|
||||
handler: (ArgT, CacheConfigurator<SideChannel>) => ResultT,
|
||||
): (ArgT, SideChannel) => ResultT {
|
||||
return makeCachedFunction(new WeakMap(), handler);
|
||||
return synchronize<[ArgT, SideChannel], ResultT>(
|
||||
makeWeakCache<ArgT, ResultT, SideChannel>(handler),
|
||||
);
|
||||
}
|
||||
|
||||
export function makeStrongCache<ArgT, ResultT, SideChannel>(
|
||||
handler: (ArgT, CacheConfigurator<SideChannel>) => Handler<ResultT> | ResultT,
|
||||
): (ArgT, SideChannel) => Handler<ResultT> {
|
||||
return makeCachedFunction<ArgT, ResultT, SideChannel, *>(Map, handler);
|
||||
}
|
||||
|
||||
export function makeStrongCacheSync<ArgT, ResultT, SideChannel>(
|
||||
handler: (ArgT, CacheConfigurator<SideChannel>) => ResultT,
|
||||
): (ArgT, SideChannel) => ResultT {
|
||||
return synchronize<[ArgT, SideChannel], ResultT>(
|
||||
makeStrongCache<ArgT, ResultT, SideChannel>(handler),
|
||||
);
|
||||
}
|
||||
|
||||
/* NOTE: Part of the logic explained in this comment is explained in the
|
||||
* getCachedValueOrWait and setupAsyncLocks functions.
|
||||
*
|
||||
* > There are only two hard things in Computer Science: cache invalidation and naming things.
|
||||
* > -- Phil Karlton
|
||||
*
|
||||
* I don't know if Phil was also thinking about handling a cache whose invalidation function is
|
||||
* defined asynchronously is considered, but it is REALLY hard to do correctly.
|
||||
*
|
||||
* The implemented logic (only when gensync is run asynchronously) is the following:
|
||||
* 1. If there is a valid cache associated to the current "arg" parameter,
|
||||
* a. RETURN the cached value
|
||||
* 3. If there is a FinishLock associated to the current "arg" parameter representing a valid cache,
|
||||
* a. Wait for that lock to be released
|
||||
* b. RETURN the value associated with that lock
|
||||
* 5. Start executing the function to be cached
|
||||
* a. If it pauses on a promise, then
|
||||
* i. Let FinishLock be a new lock
|
||||
* ii. Store FinishLock as associated to the current "arg" parameter
|
||||
* iii. Wait for the function to finish executing
|
||||
* iv. Release FinishLock
|
||||
* v. Send the function result to anyone waiting on FinishLock
|
||||
* 6. Store the result in the cache
|
||||
* 7. RETURN the result
|
||||
*/
|
||||
function makeCachedFunction<ArgT, ResultT, SideChannel, Cache: *>(
|
||||
CallCache: Class<Cache>,
|
||||
handler: (ArgT, CacheConfigurator<SideChannel>) => Handler<ResultT> | ResultT,
|
||||
): (ArgT, SideChannel) => Handler<ResultT> {
|
||||
const callCacheSync = new CallCache();
|
||||
const callCacheAsync = new CallCache();
|
||||
const futureCache = new CallCache();
|
||||
|
||||
return function* cachedFunction(arg: ArgT, data: SideChannel) {
|
||||
const asyncContext = yield* isAsync();
|
||||
const callCache = asyncContext ? callCacheAsync : callCacheSync;
|
||||
|
||||
const cached = yield* getCachedValueOrWait(
|
||||
asyncContext,
|
||||
callCache,
|
||||
futureCache,
|
||||
arg,
|
||||
data,
|
||||
);
|
||||
if (cached.valid) return cached.value;
|
||||
|
||||
const cache = new CacheConfigurator(data);
|
||||
|
||||
const handlerResult = handler(arg, cache);
|
||||
|
||||
let finishLock: ?Lock<ResultT>;
|
||||
let value: ResultT;
|
||||
|
||||
if (isIterableIterator(handlerResult)) {
|
||||
// Flow refines handlerResult to Generator<any, any, any>
|
||||
const gen = (handlerResult: Generator<*, ResultT, *>);
|
||||
|
||||
value = yield* onFirstPause(gen, () => {
|
||||
finishLock = setupAsyncLocks(cache, futureCache, arg);
|
||||
});
|
||||
} else {
|
||||
// $FlowIgnore doesn't refine handlerResult to ResultT
|
||||
value = (handlerResult: ResultT);
|
||||
}
|
||||
|
||||
updateFunctionCache(callCache, cache, arg, value);
|
||||
|
||||
if (finishLock) {
|
||||
futureCache.delete(arg);
|
||||
finishLock.release(value);
|
||||
}
|
||||
|
||||
return value;
|
||||
};
|
||||
}
|
||||
|
||||
type CacheMap<ArgT, ResultT, SideChannel> =
|
||||
| Map<ArgT, CacheEntry<ResultT, SideChannel>>
|
||||
| WeakMap<ArgT, CacheEntry<ResultT, SideChannel>>;
|
||||
|
||||
function makeCachedFunction<
|
||||
function* getCachedValue<
|
||||
ArgT,
|
||||
ResultT,
|
||||
SideChannel,
|
||||
// $FlowIssue https://github.com/facebook/flow/issues/4528
|
||||
Cache: CacheMap<ArgT, ResultT, SideChannel>,
|
||||
>(
|
||||
callCache: Cache,
|
||||
handler: (ArgT, CacheConfigurator<SideChannel>) => ResultT,
|
||||
): (ArgT, SideChannel) => ResultT {
|
||||
return function cachedFunction(arg, data) {
|
||||
let cachedValue: CacheEntry<ResultT, SideChannel> | void = callCache.get(
|
||||
arg,
|
||||
);
|
||||
cache: Cache,
|
||||
arg: ArgT,
|
||||
data: SideChannel,
|
||||
): Handler<{ valid: true, value: ResultT } | { valid: false, value: null }> {
|
||||
const cachedValue: CacheEntry<ResultT, SideChannel> | void = cache.get(arg);
|
||||
|
||||
if (cachedValue) {
|
||||
for (const { value, valid } of cachedValue) {
|
||||
if (valid(data)) return value;
|
||||
if (yield* valid(data)) return { valid: true, value };
|
||||
}
|
||||
}
|
||||
|
||||
const cache = new CacheConfigurator(data);
|
||||
return { valid: false, value: null };
|
||||
}
|
||||
|
||||
const value = handler(arg, cache);
|
||||
function* getCachedValueOrWait<ArgT, ResultT, SideChannel>(
|
||||
asyncContext: boolean,
|
||||
callCache: CacheMap<ArgT, ResultT, SideChannel>,
|
||||
futureCache: CacheMap<ArgT, Lock<ResultT>, SideChannel>,
|
||||
arg: ArgT,
|
||||
data: SideChannel,
|
||||
): Handler<{ valid: true, value: ResultT } | { valid: false, value: null }> {
|
||||
const cached = yield* getCachedValue(callCache, arg, data);
|
||||
if (cached.valid) {
|
||||
return cached;
|
||||
}
|
||||
|
||||
if (!cache.configured()) cache.forever();
|
||||
if (asyncContext) {
|
||||
const cached = yield* getCachedValue(futureCache, arg, data);
|
||||
if (cached.valid) {
|
||||
const value = yield* waitFor<ResultT>(cached.value.promise);
|
||||
return { valid: true, value };
|
||||
}
|
||||
}
|
||||
|
||||
cache.deactivate();
|
||||
return { valid: false, value: null };
|
||||
}
|
||||
|
||||
switch (cache.mode()) {
|
||||
function setupAsyncLocks<ArgT, ResultT, SideChannel>(
|
||||
config: CacheConfigurator<SideChannel>,
|
||||
futureCache: CacheMap<ArgT, Lock<ResultT>, SideChannel>,
|
||||
arg: ArgT,
|
||||
): Lock<ResultT> {
|
||||
const finishLock = new Lock<ResultT>();
|
||||
|
||||
updateFunctionCache(futureCache, config, arg, finishLock);
|
||||
|
||||
return finishLock;
|
||||
}
|
||||
|
||||
function updateFunctionCache<
|
||||
ArgT,
|
||||
ResultT,
|
||||
SideChannel,
|
||||
// $FlowIssue https://github.com/facebook/flow/issues/4528
|
||||
Cache: CacheMap<ArgT, ResultT, SideChannel>,
|
||||
>(
|
||||
cache: Cache,
|
||||
config: CacheConfigurator<SideChannel>,
|
||||
arg: ArgT,
|
||||
value: ResultT,
|
||||
) {
|
||||
if (!config.configured()) config.forever();
|
||||
|
||||
let cachedValue: CacheEntry<ResultT, SideChannel> | void = cache.get(arg);
|
||||
|
||||
config.deactivate();
|
||||
|
||||
switch (config.mode()) {
|
||||
case "forever":
|
||||
cachedValue = [{ value, valid: () => true }];
|
||||
callCache.set(arg, cachedValue);
|
||||
cachedValue = [{ value, valid: genTrue }];
|
||||
cache.set(arg, cachedValue);
|
||||
break;
|
||||
case "invalidate":
|
||||
cachedValue = [{ value, valid: cache.validator() }];
|
||||
callCache.set(arg, cachedValue);
|
||||
cachedValue = [{ value, valid: config.validator() }];
|
||||
cache.set(arg, cachedValue);
|
||||
break;
|
||||
case "valid":
|
||||
if (cachedValue) {
|
||||
cachedValue.push({ value, valid: cache.validator() });
|
||||
cachedValue.push({ value, valid: config.validator() });
|
||||
} else {
|
||||
cachedValue = [{ value, valid: cache.validator() }];
|
||||
callCache.set(arg, cachedValue);
|
||||
cachedValue = [{ value, valid: config.validator() }];
|
||||
cache.set(arg, cachedValue);
|
||||
}
|
||||
}
|
||||
|
||||
return value;
|
||||
};
|
||||
}
|
||||
|
||||
class CacheConfigurator<SideChannel = void> {
|
||||
@ -109,7 +253,7 @@ class CacheConfigurator<SideChannel = void> {
|
||||
|
||||
_configured: boolean = false;
|
||||
|
||||
_pairs: Array<[mixed, (SideChannel) => mixed]> = [];
|
||||
_pairs: Array<[mixed, (SideChannel) => Handler<mixed>]> = [];
|
||||
|
||||
_data: SideChannel;
|
||||
|
||||
@ -162,30 +306,36 @@ class CacheConfigurator<SideChannel = void> {
|
||||
this._configured = true;
|
||||
|
||||
const key = handler(this._data);
|
||||
this._pairs.push([key, handler]);
|
||||
|
||||
const fn = maybeAsync(
|
||||
handler,
|
||||
`You appear to be using an async cache handler, but Babel has been called synchronously`,
|
||||
);
|
||||
|
||||
if (isThenable(key)) {
|
||||
return key.then(key => {
|
||||
this._pairs.push([key, fn]);
|
||||
return key;
|
||||
});
|
||||
}
|
||||
|
||||
this._pairs.push([key, fn]);
|
||||
return key;
|
||||
}
|
||||
|
||||
invalidate<T>(handler: SideChannel => T): T {
|
||||
if (!this._active) {
|
||||
throw new Error("Cannot change caching after evaluation has completed.");
|
||||
}
|
||||
if (this._never || this._forever) {
|
||||
throw new Error(
|
||||
"Caching has already been configured with .never or .forever()",
|
||||
);
|
||||
}
|
||||
this._invalidate = true;
|
||||
this._configured = true;
|
||||
|
||||
const key = handler(this._data);
|
||||
this._pairs.push([key, handler]);
|
||||
return key;
|
||||
return this.using(handler);
|
||||
}
|
||||
|
||||
validator(): SideChannel => boolean {
|
||||
validator(): SideChannel => Handler<boolean> {
|
||||
const pairs = this._pairs;
|
||||
return (data: SideChannel) => pairs.every(([key, fn]) => key === fn(data));
|
||||
return function*(data: SideChannel) {
|
||||
for (const [key, fn] of pairs) {
|
||||
if (key !== (yield* fn(data))) return false;
|
||||
}
|
||||
return true;
|
||||
};
|
||||
}
|
||||
|
||||
deactivate() {
|
||||
@ -219,8 +369,18 @@ function makeSimpleConfigurator(
|
||||
|
||||
// Types are limited here so that in the future these values can be used
|
||||
// as part of Babel's caching logic.
|
||||
type SimpleType = string | boolean | number | null | void;
|
||||
type SimpleType = string | boolean | number | null | void | Promise<SimpleType>;
|
||||
export function assertSimpleType(value: mixed): SimpleType {
|
||||
if (isThenable(value)) {
|
||||
throw new Error(
|
||||
`You appear to be using an async cache handler, ` +
|
||||
`which your current version of Babel does not support. ` +
|
||||
`We may add support for this in the future, ` +
|
||||
`but if you're on the most recent version of @babel/core and still ` +
|
||||
`seeing this error, then you'll need to synchronously handle your caching logic.`,
|
||||
);
|
||||
}
|
||||
|
||||
if (
|
||||
value != null &&
|
||||
typeof value !== "string" &&
|
||||
@ -233,3 +393,20 @@ export function assertSimpleType(value: mixed): SimpleType {
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
class Lock<T> {
|
||||
released: boolean = false;
|
||||
promise: Promise<T>;
|
||||
_resolve: (value: T) => void;
|
||||
|
||||
constructor() {
|
||||
this.promise = new Promise(resolve => {
|
||||
this._resolve = resolve;
|
||||
});
|
||||
}
|
||||
|
||||
release(value: T) {
|
||||
this.released = true;
|
||||
this._resolve(value);
|
||||
}
|
||||
}
|
||||
|
||||
@ -2,6 +2,7 @@
|
||||
|
||||
import path from "path";
|
||||
import buildDebug from "debug";
|
||||
import type { Handler } from "gensync";
|
||||
import {
|
||||
validate,
|
||||
type ValidatedOptions,
|
||||
@ -24,7 +25,7 @@ import {
|
||||
type FilePackageData,
|
||||
} from "./files";
|
||||
|
||||
import { makeWeakCache, makeStrongCache } from "./caching";
|
||||
import { makeWeakCacheSync, makeStrongCacheSync } from "./caching";
|
||||
|
||||
import {
|
||||
createCachedDescriptors,
|
||||
@ -57,11 +58,11 @@ export type ConfigContext = {
|
||||
/**
|
||||
* Build a config chain for a given preset.
|
||||
*/
|
||||
export function buildPresetChain(
|
||||
export function* buildPresetChain(
|
||||
arg: PresetInstance,
|
||||
context: *,
|
||||
): ConfigChain | null {
|
||||
const chain = buildPresetChainWalker(arg, context);
|
||||
): Handler<ConfigChain | null> {
|
||||
const chain = yield* buildPresetChainWalker(arg, context);
|
||||
if (!chain) return null;
|
||||
|
||||
return {
|
||||
@ -82,11 +83,11 @@ export const buildPresetChainWalker: (
|
||||
overridesEnv: (preset, index, envName) =>
|
||||
loadPresetOverridesEnvDescriptors(preset)(index)(envName),
|
||||
});
|
||||
const loadPresetDescriptors = makeWeakCache((preset: PresetInstance) =>
|
||||
const loadPresetDescriptors = makeWeakCacheSync((preset: PresetInstance) =>
|
||||
buildRootDescriptors(preset, preset.alias, createUncachedDescriptors),
|
||||
);
|
||||
const loadPresetEnvDescriptors = makeWeakCache((preset: PresetInstance) =>
|
||||
makeStrongCache((envName: string) =>
|
||||
const loadPresetEnvDescriptors = makeWeakCacheSync((preset: PresetInstance) =>
|
||||
makeStrongCacheSync((envName: string) =>
|
||||
buildEnvDescriptors(
|
||||
preset,
|
||||
preset.alias,
|
||||
@ -95,8 +96,9 @@ const loadPresetEnvDescriptors = makeWeakCache((preset: PresetInstance) =>
|
||||
),
|
||||
),
|
||||
);
|
||||
const loadPresetOverridesDescriptors = makeWeakCache((preset: PresetInstance) =>
|
||||
makeStrongCache((index: number) =>
|
||||
const loadPresetOverridesDescriptors = makeWeakCacheSync(
|
||||
(preset: PresetInstance) =>
|
||||
makeStrongCacheSync((index: number) =>
|
||||
buildOverrideDescriptors(
|
||||
preset,
|
||||
preset.alias,
|
||||
@ -105,10 +107,10 @@ const loadPresetOverridesDescriptors = makeWeakCache((preset: PresetInstance) =>
|
||||
),
|
||||
),
|
||||
);
|
||||
const loadPresetOverridesEnvDescriptors = makeWeakCache(
|
||||
const loadPresetOverridesEnvDescriptors = makeWeakCacheSync(
|
||||
(preset: PresetInstance) =>
|
||||
makeStrongCache((index: number) =>
|
||||
makeStrongCache((envName: string) =>
|
||||
makeStrongCacheSync((index: number) =>
|
||||
makeStrongCacheSync((envName: string) =>
|
||||
buildOverrideEnvDescriptors(
|
||||
preset,
|
||||
preset.alias,
|
||||
@ -129,11 +131,11 @@ export type RootConfigChain = ConfigChain & {
|
||||
/**
|
||||
* Build a config chain for Babel's full root configuration.
|
||||
*/
|
||||
export function buildRootChain(
|
||||
export function* buildRootChain(
|
||||
opts: ValidatedOptions,
|
||||
context: ConfigContext,
|
||||
): RootConfigChain | null {
|
||||
const programmaticChain = loadProgrammaticChain(
|
||||
): Handler<RootConfigChain | null> {
|
||||
const programmaticChain = yield* loadProgrammaticChain(
|
||||
{
|
||||
options: opts,
|
||||
dirname: context.cwd,
|
||||
@ -144,14 +146,18 @@ export function buildRootChain(
|
||||
|
||||
let configFile;
|
||||
if (typeof opts.configFile === "string") {
|
||||
configFile = loadConfig(
|
||||
configFile = yield* loadConfig(
|
||||
opts.configFile,
|
||||
context.cwd,
|
||||
context.envName,
|
||||
context.caller,
|
||||
);
|
||||
} else if (opts.configFile !== false) {
|
||||
configFile = findRootConfig(context.root, context.envName, context.caller);
|
||||
configFile = yield* findRootConfig(
|
||||
context.root,
|
||||
context.envName,
|
||||
context.caller,
|
||||
);
|
||||
}
|
||||
|
||||
let { babelrc, babelrcRoots } = opts;
|
||||
@ -160,7 +166,7 @@ export function buildRootChain(
|
||||
const configFileChain = emptyChain();
|
||||
if (configFile) {
|
||||
const validatedFile = validateConfigFile(configFile);
|
||||
const result = loadFileChain(validatedFile, context);
|
||||
const result = yield* loadFileChain(validatedFile, context);
|
||||
if (!result) return null;
|
||||
|
||||
// Allow config files to toggle `.babelrc` resolution on and off and
|
||||
@ -178,7 +184,7 @@ export function buildRootChain(
|
||||
|
||||
const pkgData =
|
||||
typeof context.filename === "string"
|
||||
? findPackageData(context.filename)
|
||||
? yield* findPackageData(context.filename)
|
||||
: null;
|
||||
|
||||
let ignoreFile, babelrcFile;
|
||||
@ -189,7 +195,7 @@ export function buildRootChain(
|
||||
pkgData &&
|
||||
babelrcLoadEnabled(context, pkgData, babelrcRoots, babelrcRootsDirectory)
|
||||
) {
|
||||
({ ignore: ignoreFile, config: babelrcFile } = findRelativeConfig(
|
||||
({ ignore: ignoreFile, config: babelrcFile } = yield* findRelativeConfig(
|
||||
pkgData,
|
||||
context.envName,
|
||||
context.caller,
|
||||
@ -203,7 +209,10 @@ export function buildRootChain(
|
||||
}
|
||||
|
||||
if (babelrcFile) {
|
||||
const result = loadFileChain(validateBabelrcFile(babelrcFile), context);
|
||||
const result = yield* loadFileChain(
|
||||
validateBabelrcFile(babelrcFile),
|
||||
context,
|
||||
);
|
||||
if (!result) return null;
|
||||
|
||||
mergeChain(fileChain, result);
|
||||
@ -268,13 +277,15 @@ function babelrcLoadEnabled(
|
||||
});
|
||||
}
|
||||
|
||||
const validateConfigFile = makeWeakCache((file: ConfigFile): ValidatedFile => ({
|
||||
const validateConfigFile = makeWeakCacheSync(
|
||||
(file: ConfigFile): ValidatedFile => ({
|
||||
filepath: file.filepath,
|
||||
dirname: file.dirname,
|
||||
options: validate("configfile", file.options),
|
||||
}));
|
||||
}),
|
||||
);
|
||||
|
||||
const validateBabelrcFile = makeWeakCache(
|
||||
const validateBabelrcFile = makeWeakCacheSync(
|
||||
(file: ConfigFile): ValidatedFile => ({
|
||||
filepath: file.filepath,
|
||||
dirname: file.dirname,
|
||||
@ -282,11 +293,13 @@ const validateBabelrcFile = makeWeakCache(
|
||||
}),
|
||||
);
|
||||
|
||||
const validateExtendFile = makeWeakCache((file: ConfigFile): ValidatedFile => ({
|
||||
const validateExtendFile = makeWeakCacheSync(
|
||||
(file: ConfigFile): ValidatedFile => ({
|
||||
filepath: file.filepath,
|
||||
dirname: file.dirname,
|
||||
options: validate("extendsfile", file.options),
|
||||
}));
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* Build a config chain for just the programmatic options passed into Babel.
|
||||
@ -317,11 +330,11 @@ const loadFileChain = makeChainWalker({
|
||||
overridesEnv: (file, index, envName) =>
|
||||
loadFileOverridesEnvDescriptors(file)(index)(envName),
|
||||
});
|
||||
const loadFileDescriptors = makeWeakCache((file: ValidatedFile) =>
|
||||
const loadFileDescriptors = makeWeakCacheSync((file: ValidatedFile) =>
|
||||
buildRootDescriptors(file, file.filepath, createUncachedDescriptors),
|
||||
);
|
||||
const loadFileEnvDescriptors = makeWeakCache((file: ValidatedFile) =>
|
||||
makeStrongCache((envName: string) =>
|
||||
const loadFileEnvDescriptors = makeWeakCacheSync((file: ValidatedFile) =>
|
||||
makeStrongCacheSync((envName: string) =>
|
||||
buildEnvDescriptors(
|
||||
file,
|
||||
file.filepath,
|
||||
@ -330,8 +343,8 @@ const loadFileEnvDescriptors = makeWeakCache((file: ValidatedFile) =>
|
||||
),
|
||||
),
|
||||
);
|
||||
const loadFileOverridesDescriptors = makeWeakCache((file: ValidatedFile) =>
|
||||
makeStrongCache((index: number) =>
|
||||
const loadFileOverridesDescriptors = makeWeakCacheSync((file: ValidatedFile) =>
|
||||
makeStrongCacheSync((index: number) =>
|
||||
buildOverrideDescriptors(
|
||||
file,
|
||||
file.filepath,
|
||||
@ -340,9 +353,10 @@ const loadFileOverridesDescriptors = makeWeakCache((file: ValidatedFile) =>
|
||||
),
|
||||
),
|
||||
);
|
||||
const loadFileOverridesEnvDescriptors = makeWeakCache((file: ValidatedFile) =>
|
||||
makeStrongCache((index: number) =>
|
||||
makeStrongCache((envName: string) =>
|
||||
const loadFileOverridesEnvDescriptors = makeWeakCacheSync(
|
||||
(file: ValidatedFile) =>
|
||||
makeStrongCacheSync((index: number) =>
|
||||
makeStrongCacheSync((envName: string) =>
|
||||
buildOverrideEnvDescriptors(
|
||||
file,
|
||||
file.filepath,
|
||||
@ -410,8 +424,12 @@ function makeChainWalker<ArgT: { options: ValidatedOptions, dirname: string }>({
|
||||
env: (ArgT, string) => OptionsAndDescriptors | null,
|
||||
overrides: (ArgT, number) => OptionsAndDescriptors,
|
||||
overridesEnv: (ArgT, number, string) => OptionsAndDescriptors | null,
|
||||
}): (ArgT, ConfigContext, Set<ConfigFile> | void) => ConfigChain | null {
|
||||
return (input, context, files = new Set()) => {
|
||||
}): (
|
||||
ArgT,
|
||||
ConfigContext,
|
||||
Set<ConfigFile> | void,
|
||||
) => Handler<ConfigChain | null> {
|
||||
return function*(input, context, files = new Set()) {
|
||||
const { dirname } = input;
|
||||
|
||||
const flattenedConfigs = [];
|
||||
@ -455,7 +473,9 @@ function makeChainWalker<ArgT: { options: ValidatedOptions, dirname: string }>({
|
||||
const chain = emptyChain();
|
||||
|
||||
for (const op of flattenedConfigs) {
|
||||
if (!mergeExtendsChain(chain, op.options, dirname, context, files)) {
|
||||
if (
|
||||
!(yield* mergeExtendsChain(chain, op.options, dirname, context, files))
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@ -465,16 +485,16 @@ function makeChainWalker<ArgT: { options: ValidatedOptions, dirname: string }>({
|
||||
};
|
||||
}
|
||||
|
||||
function mergeExtendsChain(
|
||||
function* mergeExtendsChain(
|
||||
chain: ConfigChain,
|
||||
opts: ValidatedOptions,
|
||||
dirname: string,
|
||||
context: ConfigContext,
|
||||
files: Set<ConfigFile>,
|
||||
): boolean {
|
||||
): Handler<boolean> {
|
||||
if (opts.extends === undefined) return true;
|
||||
|
||||
const file = loadConfig(
|
||||
const file = yield* loadConfig(
|
||||
opts.extends,
|
||||
dirname,
|
||||
context.envName,
|
||||
@ -490,7 +510,11 @@ function mergeExtendsChain(
|
||||
}
|
||||
|
||||
files.add(file);
|
||||
const fileChain = loadFileChain(validateExtendFile(file), context, files);
|
||||
const fileChain = yield* loadFileChain(
|
||||
validateExtendFile(file),
|
||||
context,
|
||||
files,
|
||||
);
|
||||
files.delete(file);
|
||||
|
||||
if (!fileChain) return false;
|
||||
|
||||
@ -5,8 +5,8 @@ import { loadPlugin, loadPreset } from "./files";
|
||||
import { getItemDescriptor } from "./item";
|
||||
|
||||
import {
|
||||
makeWeakCache,
|
||||
makeStrongCache,
|
||||
makeWeakCacheSync,
|
||||
makeStrongCacheSync,
|
||||
type CacheConfigurator,
|
||||
} from "./caching";
|
||||
|
||||
@ -130,11 +130,11 @@ export function createUncachedDescriptors(
|
||||
}
|
||||
|
||||
const PRESET_DESCRIPTOR_CACHE = new WeakMap();
|
||||
const createCachedPresetDescriptors = makeWeakCache(
|
||||
const createCachedPresetDescriptors = makeWeakCacheSync(
|
||||
(items: PluginList, cache: CacheConfigurator<string>) => {
|
||||
const dirname = cache.using(dir => dir);
|
||||
return makeStrongCache((alias: string) =>
|
||||
makeStrongCache((passPerPreset: boolean) =>
|
||||
return makeStrongCacheSync((alias: string) =>
|
||||
makeStrongCacheSync((passPerPreset: boolean) =>
|
||||
createPresetDescriptors(items, dirname, alias, passPerPreset).map(
|
||||
// Items are cached using the overall preset array identity when
|
||||
// possibly, but individual descriptors are also cached if a match
|
||||
@ -147,10 +147,10 @@ const createCachedPresetDescriptors = makeWeakCache(
|
||||
);
|
||||
|
||||
const PLUGIN_DESCRIPTOR_CACHE = new WeakMap();
|
||||
const createCachedPluginDescriptors = makeWeakCache(
|
||||
const createCachedPluginDescriptors = makeWeakCacheSync(
|
||||
(items: PluginList, cache: CacheConfigurator<string>) => {
|
||||
const dirname = cache.using(dir => dir);
|
||||
return makeStrongCache((alias: string) =>
|
||||
return makeStrongCacheSync((alias: string) =>
|
||||
createPluginDescriptors(items, dirname, alias).map(
|
||||
// Items are cached using the overall plugin array identity when
|
||||
// possibly, but individual descriptors are also cached if a match
|
||||
|
||||
@ -2,12 +2,11 @@
|
||||
|
||||
import buildDebug from "debug";
|
||||
import path from "path";
|
||||
import fs from "fs";
|
||||
import json5 from "json5";
|
||||
import resolve from "resolve";
|
||||
import gensync, { type Handler } from "gensync";
|
||||
import {
|
||||
makeStrongCache,
|
||||
makeWeakCache,
|
||||
makeWeakCacheSync,
|
||||
type CacheConfigurator,
|
||||
} from "../caching";
|
||||
import makeAPI, { type PluginAPI } from "../helpers/config-api";
|
||||
@ -16,6 +15,9 @@ import pathPatternToRegex from "../pattern-to-regex";
|
||||
import type { FilePackageData, RelativeConfig, ConfigFile } from "./types";
|
||||
import type { CallerMetadata } from "../validation/options";
|
||||
|
||||
import * as fs from "../../gensync-utils/fs";
|
||||
import resolve from "../../gensync-utils/resolve";
|
||||
|
||||
const debug = buildDebug("babel:config:loading:files:configuration");
|
||||
|
||||
export const ROOT_CONFIG_FILENAMES = [
|
||||
@ -27,13 +29,14 @@ const RELATIVE_CONFIG_FILENAMES = [".babelrc", ".babelrc.js", ".babelrc.cjs"];
|
||||
|
||||
const BABELIGNORE_FILENAME = ".babelignore";
|
||||
|
||||
export function findConfigUpwards(rootDir: string): string | null {
|
||||
export function* findConfigUpwards(rootDir: string): Handler<string | null> {
|
||||
let dirname = rootDir;
|
||||
while (true) {
|
||||
const configFileFound = ROOT_CONFIG_FILENAMES.some(filename =>
|
||||
fs.existsSync(path.join(dirname, filename)),
|
||||
);
|
||||
if (configFileFound) return dirname;
|
||||
for (const filename of ROOT_CONFIG_FILENAMES) {
|
||||
if (yield* fs.exists(path.join(dirname, filename))) {
|
||||
return dirname;
|
||||
}
|
||||
}
|
||||
|
||||
const nextDir = path.dirname(dirname);
|
||||
if (dirname === nextDir) break;
|
||||
@ -43,11 +46,11 @@ export function findConfigUpwards(rootDir: string): string | null {
|
||||
return null;
|
||||
}
|
||||
|
||||
export function findRelativeConfig(
|
||||
export function* findRelativeConfig(
|
||||
packageData: FilePackageData,
|
||||
envName: string,
|
||||
caller: CallerMetadata | void,
|
||||
): RelativeConfig {
|
||||
): Handler<RelativeConfig> {
|
||||
let config = null;
|
||||
let ignore = null;
|
||||
|
||||
@ -55,7 +58,7 @@ export function findRelativeConfig(
|
||||
|
||||
for (const loc of packageData.directories) {
|
||||
if (!config) {
|
||||
config = loadOneConfig(
|
||||
config = yield* loadOneConfig(
|
||||
RELATIVE_CONFIG_FILENAMES,
|
||||
loc,
|
||||
envName,
|
||||
@ -68,7 +71,7 @@ export function findRelativeConfig(
|
||||
|
||||
if (!ignore) {
|
||||
const ignoreLoc = path.join(loc, BABELIGNORE_FILENAME);
|
||||
ignore = readIgnoreConfig(ignoreLoc);
|
||||
ignore = yield* readIgnoreConfig(ignoreLoc);
|
||||
|
||||
if (ignore) {
|
||||
debug("Found ignore %o from %o.", ignore.filepath, dirname);
|
||||
@ -83,26 +86,28 @@ export function findRootConfig(
|
||||
dirname: string,
|
||||
envName: string,
|
||||
caller: CallerMetadata | void,
|
||||
): ConfigFile | null {
|
||||
): Handler<ConfigFile | null> {
|
||||
return loadOneConfig(ROOT_CONFIG_FILENAMES, dirname, envName, caller);
|
||||
}
|
||||
|
||||
function loadOneConfig(
|
||||
function* loadOneConfig(
|
||||
names: string[],
|
||||
dirname: string,
|
||||
envName: string,
|
||||
caller: CallerMetadata | void,
|
||||
previousConfig?: ConfigFile | null = null,
|
||||
): ConfigFile | null {
|
||||
const config = names.reduce((previousConfig: ConfigFile | null, name) => {
|
||||
const filepath = path.resolve(dirname, name);
|
||||
const config = readConfig(filepath, envName, caller);
|
||||
|
||||
): Handler<ConfigFile | null> {
|
||||
const configs = yield* gensync.all(
|
||||
names.map(filename =>
|
||||
readConfig(path.join(dirname, filename), envName, caller),
|
||||
),
|
||||
);
|
||||
const config = configs.reduce((previousConfig: ConfigFile | null, config) => {
|
||||
if (config && previousConfig) {
|
||||
throw new Error(
|
||||
`Multiple configuration files found. Please remove one:\n` +
|
||||
` - ${path.basename(previousConfig.filepath)}\n` +
|
||||
` - ${name}\n` +
|
||||
` - ${config.filepath}\n` +
|
||||
`from ${dirname}`,
|
||||
);
|
||||
}
|
||||
@ -116,15 +121,15 @@ function loadOneConfig(
|
||||
return config;
|
||||
}
|
||||
|
||||
export function loadConfig(
|
||||
export function* loadConfig(
|
||||
name: string,
|
||||
dirname: string,
|
||||
envName: string,
|
||||
caller: CallerMetadata | void,
|
||||
): ConfigFile {
|
||||
const filepath = resolve.sync(name, { basedir: dirname });
|
||||
): Handler<ConfigFile> {
|
||||
const filepath = yield* resolve(name, { basedir: dirname });
|
||||
|
||||
const conf = readConfig(filepath, envName, caller);
|
||||
const conf = yield* readConfig(filepath, envName, caller);
|
||||
if (!conf) {
|
||||
throw new Error(`Config file ${filepath} contains no configuration data`);
|
||||
}
|
||||
@ -137,7 +142,7 @@ export function loadConfig(
|
||||
* Read the given config file, returning the result. Returns null if no config was found, but will
|
||||
* throw if there are parsing errors while loading a config.
|
||||
*/
|
||||
function readConfig(filepath, envName, caller): ConfigFile | null {
|
||||
function readConfig(filepath, envName, caller) {
|
||||
const ext = path.extname(filepath);
|
||||
return ext === ".js" || ext === ".cjs"
|
||||
? readConfigJS(filepath, { envName, caller })
|
||||
@ -146,15 +151,14 @@ function readConfig(filepath, envName, caller): ConfigFile | null {
|
||||
|
||||
const LOADING_CONFIGS = new Set();
|
||||
|
||||
const readConfigJS = makeStrongCache(
|
||||
(
|
||||
const readConfigJS = makeStrongCache(function* readConfigJS(
|
||||
filepath: string,
|
||||
cache: CacheConfigurator<{
|
||||
envName: string,
|
||||
caller: CallerMetadata | void,
|
||||
}>,
|
||||
) => {
|
||||
if (!fs.existsSync(filepath)) {
|
||||
): Handler<ConfigFile | null> {
|
||||
if (!fs.exists.sync(filepath)) {
|
||||
cache.forever();
|
||||
return null;
|
||||
}
|
||||
@ -177,6 +181,7 @@ const readConfigJS = makeStrongCache(
|
||||
try {
|
||||
LOADING_CONFIGS.add(filepath);
|
||||
|
||||
yield* []; // If we want to allow mjs configs imported using `import()`
|
||||
// $FlowIssue
|
||||
const configModule = (require(filepath): mixed);
|
||||
options =
|
||||
@ -190,10 +195,12 @@ const readConfigJS = makeStrongCache(
|
||||
LOADING_CONFIGS.delete(filepath);
|
||||
}
|
||||
|
||||
let assertCache = false;
|
||||
if (typeof options === "function") {
|
||||
yield* []; // if we want to make it possible to use async configs
|
||||
options = ((options: any): (api: PluginAPI) => {})(makeAPI(cache));
|
||||
|
||||
if (!cache.configured()) throwConfigError();
|
||||
assertCache = true;
|
||||
}
|
||||
|
||||
if (!options || typeof options !== "object" || Array.isArray(options)) {
|
||||
@ -212,15 +219,16 @@ const readConfigJS = makeStrongCache(
|
||||
);
|
||||
}
|
||||
|
||||
if (assertCache && !cache.configured()) throwConfigError();
|
||||
|
||||
return {
|
||||
filepath,
|
||||
dirname: path.dirname(filepath),
|
||||
options,
|
||||
};
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
const packageToBabelConfig = makeWeakCache(
|
||||
const packageToBabelConfig = makeWeakCacheSync(
|
||||
(file: ConfigFile): ConfigFile | null => {
|
||||
const babel = file.options[("babel": string)];
|
||||
|
||||
|
||||
@ -1,5 +1,7 @@
|
||||
// @flow
|
||||
|
||||
import type { Handler } from "gensync";
|
||||
|
||||
import type {
|
||||
ConfigFile,
|
||||
IgnoreFile,
|
||||
@ -11,13 +13,15 @@ import type { CallerMetadata } from "../validation/options";
|
||||
|
||||
export type { ConfigFile, IgnoreFile, RelativeConfig, FilePackageData };
|
||||
|
||||
export function findConfigUpwards(
|
||||
// eslint-disable-next-line require-yield
|
||||
export function* findConfigUpwards(
|
||||
rootDir: string, // eslint-disable-line no-unused-vars
|
||||
): string | null {
|
||||
): Handler<string | null> {
|
||||
return null;
|
||||
}
|
||||
|
||||
export function findPackageData(filepath: string): FilePackageData {
|
||||
// eslint-disable-next-line require-yield
|
||||
export function* findPackageData(filepath: string): Handler<FilePackageData> {
|
||||
return {
|
||||
filepath,
|
||||
directories: [],
|
||||
@ -26,28 +30,31 @@ export function findPackageData(filepath: string): FilePackageData {
|
||||
};
|
||||
}
|
||||
|
||||
export function findRelativeConfig(
|
||||
// eslint-disable-next-line require-yield
|
||||
export function* findRelativeConfig(
|
||||
pkgData: FilePackageData, // eslint-disable-line no-unused-vars
|
||||
envName: string, // eslint-disable-line no-unused-vars
|
||||
caller: CallerMetadata | void, // eslint-disable-line no-unused-vars
|
||||
): RelativeConfig {
|
||||
): Handler<RelativeConfig> {
|
||||
return { pkg: null, config: null, ignore: null };
|
||||
}
|
||||
|
||||
export function findRootConfig(
|
||||
// eslint-disable-next-line require-yield
|
||||
export function* findRootConfig(
|
||||
dirname: string, // eslint-disable-line no-unused-vars
|
||||
envName: string, // eslint-disable-line no-unused-vars
|
||||
caller: CallerMetadata | void, // eslint-disable-line no-unused-vars
|
||||
): ConfigFile | null {
|
||||
): Handler<ConfigFile | null> {
|
||||
return null;
|
||||
}
|
||||
|
||||
export function loadConfig(
|
||||
// eslint-disable-next-line require-yield
|
||||
export function* loadConfig(
|
||||
name: string,
|
||||
dirname: string,
|
||||
envName: string, // eslint-disable-line no-unused-vars
|
||||
caller: CallerMetadata | void, // eslint-disable-line no-unused-vars
|
||||
): ConfigFile {
|
||||
): Handler<ConfigFile> {
|
||||
throw new Error(`Cannot load ${name} relative to ${dirname} in a browser`);
|
||||
}
|
||||
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
// @flow
|
||||
|
||||
import path from "path";
|
||||
import type { Handler } from "gensync";
|
||||
import { makeStaticFileCache } from "./utils";
|
||||
|
||||
import type { ConfigFile, FilePackageData } from "./types";
|
||||
@ -12,7 +13,7 @@ const PACKAGE_FILENAME = "package.json";
|
||||
* of Babel's config requires general package information to decide when to
|
||||
* search for .babelrc files
|
||||
*/
|
||||
export function findPackageData(filepath: string): FilePackageData {
|
||||
export function* findPackageData(filepath: string): Handler<FilePackageData> {
|
||||
let pkg = null;
|
||||
const directories = [];
|
||||
let isPackage = true;
|
||||
@ -21,7 +22,7 @@ export function findPackageData(filepath: string): FilePackageData {
|
||||
while (!pkg && path.basename(dirname) !== "node_modules") {
|
||||
directories.push(dirname);
|
||||
|
||||
pkg = readConfigPackage(path.join(dirname, PACKAGE_FILENAME));
|
||||
pkg = yield* readConfigPackage(path.join(dirname, PACKAGE_FILENAME));
|
||||
|
||||
const nextLoc = path.dirname(dirname);
|
||||
if (dirname === nextLoc) {
|
||||
|
||||
@ -1,24 +1,32 @@
|
||||
// @flow
|
||||
|
||||
import fs from "fs";
|
||||
import { makeStrongCache } from "../caching";
|
||||
import type { Gensync, Handler } from "gensync";
|
||||
|
||||
import { makeStrongCache, type CacheConfigurator } from "../caching";
|
||||
import * as fs from "../../gensync-utils/fs";
|
||||
import nodeFs from "fs";
|
||||
|
||||
export function makeStaticFileCache<T>(
|
||||
fn: (string, string) => T,
|
||||
): string => T | null {
|
||||
return makeStrongCache((filepath, cache) => {
|
||||
if (cache.invalidate(() => fileMtime(filepath)) === null) {
|
||||
): Gensync<[string], T | null> {
|
||||
return (makeStrongCache(function*(
|
||||
filepath: string,
|
||||
cache: CacheConfigurator<?void>,
|
||||
): Handler<null | T> {
|
||||
const cached = cache.invalidate(() => fileMtime(filepath));
|
||||
|
||||
if (cached === null) {
|
||||
cache.forever();
|
||||
return null;
|
||||
}
|
||||
|
||||
return fn(filepath, fs.readFileSync(filepath, "utf8"));
|
||||
});
|
||||
return fn(filepath, yield* fs.readFile(filepath, "utf8"));
|
||||
}): Gensync<any, *>);
|
||||
}
|
||||
|
||||
function fileMtime(filepath: string): number | null {
|
||||
try {
|
||||
return +fs.statSync(filepath).mtime;
|
||||
return +nodeFs.statSync(filepath).mtime;
|
||||
} catch (e) {
|
||||
if (e.code !== "ENOENT" && e.code !== "ENOTDIR") throw e;
|
||||
}
|
||||
|
||||
@ -1,5 +1,8 @@
|
||||
// @flow
|
||||
|
||||
import gensync, { type Handler } from "gensync";
|
||||
import { forwardAsync } from "../gensync-utils/async";
|
||||
|
||||
import { mergeOptions } from "./util";
|
||||
import * as context from "../index";
|
||||
import Plugin from "./plugin";
|
||||
@ -12,7 +15,11 @@ import {
|
||||
} from "./config-chain";
|
||||
import type { UnloadedDescriptor } from "./config-descriptors";
|
||||
import traverse from "@babel/traverse";
|
||||
import { makeWeakCache, type CacheConfigurator } from "./caching";
|
||||
import {
|
||||
makeWeakCache,
|
||||
makeWeakCacheSync,
|
||||
type CacheConfigurator,
|
||||
} from "./caching";
|
||||
import { validate, type CallerMetadata } from "./validation/options";
|
||||
import { validatePluginObject } from "./validation/plugins";
|
||||
import makeAPI from "./helpers/config-api";
|
||||
@ -45,10 +52,10 @@ type SimpleContext = {
|
||||
caller: CallerMetadata | void,
|
||||
};
|
||||
|
||||
export default function loadFullConfig(
|
||||
export default gensync<[any], ResolvedConfig | null>(function* loadFullConfig(
|
||||
inputOpts: mixed,
|
||||
): ResolvedConfig | null {
|
||||
const result = loadPrivatePartialConfig(inputOpts);
|
||||
): Handler<ResolvedConfig | null> {
|
||||
const result = yield* loadPrivatePartialConfig(inputOpts);
|
||||
if (!result) {
|
||||
return null;
|
||||
}
|
||||
@ -63,28 +70,29 @@ export default function loadFullConfig(
|
||||
throw new Error("Assertion failure - plugins and presets exist");
|
||||
}
|
||||
|
||||
const ignored = (function recurseDescriptors(
|
||||
const ignored = yield* (function* recurseDescriptors(
|
||||
config: {
|
||||
plugins: Array<UnloadedDescriptor>,
|
||||
presets: Array<UnloadedDescriptor>,
|
||||
},
|
||||
pass: Array<Plugin>,
|
||||
) {
|
||||
const plugins = config.plugins.reduce((acc, descriptor) => {
|
||||
const plugins = [];
|
||||
for (const descriptor of config.plugins) {
|
||||
if (descriptor.options !== false) {
|
||||
acc.push(loadPluginDescriptor(descriptor, context));
|
||||
plugins.push(yield* loadPluginDescriptor(descriptor, context));
|
||||
}
|
||||
return acc;
|
||||
}, []);
|
||||
const presets = config.presets.reduce((acc, descriptor) => {
|
||||
}
|
||||
|
||||
const presets = [];
|
||||
for (const descriptor of config.presets) {
|
||||
if (descriptor.options !== false) {
|
||||
acc.push({
|
||||
preset: loadPresetDescriptor(descriptor, context),
|
||||
presets.push({
|
||||
preset: yield* loadPresetDescriptor(descriptor, context),
|
||||
pass: descriptor.ownPass ? [] : pass,
|
||||
});
|
||||
}
|
||||
return acc;
|
||||
}, []);
|
||||
}
|
||||
|
||||
// resolve presets
|
||||
if (presets.length > 0) {
|
||||
@ -99,7 +107,7 @@ export default function loadFullConfig(
|
||||
for (const { preset, pass } of presets) {
|
||||
if (!preset) return true;
|
||||
|
||||
const ignored = recurseDescriptors(
|
||||
const ignored = yield* recurseDescriptors(
|
||||
{
|
||||
plugins: preset.plugins,
|
||||
presets: preset.presets,
|
||||
@ -165,16 +173,15 @@ export default function loadFullConfig(
|
||||
options: opts,
|
||||
passes: passes,
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Load a generic plugin/preset from the given descriptor loaded from the config object.
|
||||
*/
|
||||
const loadDescriptor = makeWeakCache(
|
||||
(
|
||||
const loadDescriptor = makeWeakCache(function*(
|
||||
{ value, options, dirname, alias }: UnloadedDescriptor,
|
||||
cache: CacheConfigurator<SimpleContext>,
|
||||
): LoadedDescriptor => {
|
||||
): Handler<LoadedDescriptor> {
|
||||
// Disabled presets should already have been filtered out
|
||||
if (options === false) throw new Error("Assertion failure");
|
||||
|
||||
@ -201,6 +208,8 @@ const loadDescriptor = makeWeakCache(
|
||||
}
|
||||
|
||||
if (typeof item.then === "function") {
|
||||
yield* []; // if we want to support async plugins
|
||||
|
||||
throw new Error(
|
||||
`You appear to be using an async plugin, ` +
|
||||
`which your current version of Babel does not support. ` +
|
||||
@ -210,16 +219,15 @@ const loadDescriptor = makeWeakCache(
|
||||
}
|
||||
|
||||
return { value: item, options, dirname, alias };
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
/**
|
||||
* Instantiate a plugin for the given descriptor, returning the plugin/options pair.
|
||||
*/
|
||||
function loadPluginDescriptor(
|
||||
function* loadPluginDescriptor(
|
||||
descriptor: UnloadedDescriptor,
|
||||
context: SimpleContext,
|
||||
): Plugin {
|
||||
): Handler<Plugin> {
|
||||
if (descriptor.value instanceof Plugin) {
|
||||
if (descriptor.options) {
|
||||
throw new Error(
|
||||
@ -230,14 +238,16 @@ function loadPluginDescriptor(
|
||||
return descriptor.value;
|
||||
}
|
||||
|
||||
return instantiatePlugin(loadDescriptor(descriptor, context), context);
|
||||
return yield* instantiatePlugin(
|
||||
yield* loadDescriptor(descriptor, context),
|
||||
context,
|
||||
);
|
||||
}
|
||||
|
||||
const instantiatePlugin = makeWeakCache(
|
||||
(
|
||||
const instantiatePlugin = makeWeakCache(function*(
|
||||
{ value, options, dirname, alias }: LoadedDescriptor,
|
||||
cache: CacheConfigurator<SimpleContext>,
|
||||
): Plugin => {
|
||||
): Handler<Plugin> {
|
||||
const pluginObj = validatePluginObject(value);
|
||||
|
||||
const plugin = {
|
||||
@ -258,10 +268,10 @@ const instantiatePlugin = makeWeakCache(
|
||||
dirname,
|
||||
};
|
||||
|
||||
const inherits = yield* forwardAsync(loadPluginDescriptor, run => {
|
||||
// If the inherited plugin changes, reinstantiate this plugin.
|
||||
const inherits = cache.invalidate(data =>
|
||||
loadPluginDescriptor(inheritsDescriptor, data),
|
||||
);
|
||||
return cache.invalidate(data => run(inheritsDescriptor, data));
|
||||
});
|
||||
|
||||
plugin.pre = chain(inherits.pre, plugin.pre);
|
||||
plugin.post = chain(inherits.post, plugin.post);
|
||||
@ -276,8 +286,7 @@ const instantiatePlugin = makeWeakCache(
|
||||
}
|
||||
|
||||
return new Plugin(plugin, options, alias);
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
const validateIfOptionNeedsFilename = (
|
||||
options: ValidatedOptions,
|
||||
@ -318,16 +327,16 @@ const validatePreset = (
|
||||
/**
|
||||
* Generate a config object that will act as the root of a new nested config.
|
||||
*/
|
||||
const loadPresetDescriptor = (
|
||||
function* loadPresetDescriptor(
|
||||
descriptor: UnloadedDescriptor,
|
||||
context: ConfigContext,
|
||||
): ConfigChain | null => {
|
||||
const preset = instantiatePreset(loadDescriptor(descriptor, context));
|
||||
): Handler<ConfigChain | null> {
|
||||
const preset = instantiatePreset(yield* loadDescriptor(descriptor, context));
|
||||
validatePreset(preset, context, descriptor);
|
||||
return buildPresetChain(preset, context);
|
||||
};
|
||||
return yield* buildPresetChain(preset, context);
|
||||
}
|
||||
|
||||
const instantiatePreset = makeWeakCache(
|
||||
const instantiatePreset = makeWeakCacheSync(
|
||||
({ value, dirname, alias }: LoadedDescriptor): PresetInstance => {
|
||||
return {
|
||||
options: validate("preset", value),
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
// @flow
|
||||
|
||||
import loadFullConfig from "./full";
|
||||
import gensync from "gensync";
|
||||
|
||||
export type {
|
||||
ResolvedConfig,
|
||||
InputOptions,
|
||||
@ -8,12 +9,29 @@ export type {
|
||||
Plugin,
|
||||
} from "./full";
|
||||
|
||||
import loadFullConfig from "./full";
|
||||
import { loadPartialConfig as loadPartialConfigRunner } from "./partial";
|
||||
|
||||
export { loadFullConfig as default };
|
||||
export { loadPartialConfig } from "./partial";
|
||||
export type { PartialConfig } from "./partial";
|
||||
|
||||
export function loadOptions(opts: {}): Object | null {
|
||||
const config = loadFullConfig(opts);
|
||||
|
||||
const loadOptionsRunner = gensync<[mixed], Object | null>(function*(opts) {
|
||||
const config = yield* loadFullConfig(opts);
|
||||
return config ? config.options : null;
|
||||
}
|
||||
});
|
||||
|
||||
const maybeErrback = runner => (opts: mixed, callback: Function) => {
|
||||
if (callback === undefined && typeof opts === "function") {
|
||||
callback = opts;
|
||||
opts = undefined;
|
||||
}
|
||||
return callback ? runner.errback(opts, callback) : runner.sync(opts);
|
||||
};
|
||||
|
||||
export const loadPartialConfig = maybeErrback(loadPartialConfigRunner);
|
||||
export const loadPartialConfigSync = loadPartialConfigRunner.sync;
|
||||
export const loadPartialConfigAsync = loadPartialConfigRunner.async;
|
||||
|
||||
export const loadOptions = maybeErrback(loadOptionsRunner);
|
||||
export const loadOptionsSync = loadOptionsRunner.sync;
|
||||
export const loadOptionsAsync = loadOptionsRunner.async;
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
// @flow
|
||||
|
||||
import path from "path";
|
||||
import gensync, { type Handler } from "gensync";
|
||||
import Plugin from "./plugin";
|
||||
import { mergeOptions } from "./util";
|
||||
import { createItemFromDescriptor } from "./item";
|
||||
@ -19,18 +20,21 @@ import {
|
||||
type IgnoreFile,
|
||||
} from "./files";
|
||||
|
||||
function resolveRootMode(rootDir: string, rootMode: RootMode): string {
|
||||
function* resolveRootMode(
|
||||
rootDir: string,
|
||||
rootMode: RootMode,
|
||||
): Handler<string> {
|
||||
switch (rootMode) {
|
||||
case "root":
|
||||
return rootDir;
|
||||
|
||||
case "upward-optional": {
|
||||
const upwardRootDir = findConfigUpwards(rootDir);
|
||||
const upwardRootDir = yield* findConfigUpwards(rootDir);
|
||||
return upwardRootDir === null ? rootDir : upwardRootDir;
|
||||
}
|
||||
|
||||
case "upward": {
|
||||
const upwardRootDir = findConfigUpwards(rootDir);
|
||||
const upwardRootDir = yield* findConfigUpwards(rootDir);
|
||||
if (upwardRootDir !== null) return upwardRootDir;
|
||||
|
||||
throw Object.assign(
|
||||
@ -51,15 +55,17 @@ function resolveRootMode(rootDir: string, rootMode: RootMode): string {
|
||||
}
|
||||
}
|
||||
|
||||
export default function loadPrivatePartialConfig(
|
||||
inputOpts: mixed,
|
||||
): {
|
||||
type PrivPartialConfig = {
|
||||
options: ValidatedOptions,
|
||||
context: ConfigContext,
|
||||
ignore: IgnoreFile | void,
|
||||
babelrc: ConfigFile | void,
|
||||
config: ConfigFile | void,
|
||||
} | null {
|
||||
};
|
||||
|
||||
export default function* loadPrivatePartialConfig(
|
||||
inputOpts: mixed,
|
||||
): Handler<PrivPartialConfig | null> {
|
||||
if (
|
||||
inputOpts != null &&
|
||||
(typeof inputOpts !== "object" || Array.isArray(inputOpts))
|
||||
@ -77,7 +83,7 @@ export default function loadPrivatePartialConfig(
|
||||
caller,
|
||||
} = args;
|
||||
const absoluteCwd = path.resolve(cwd);
|
||||
const absoluteRootDir = resolveRootMode(
|
||||
const absoluteRootDir = yield* resolveRootMode(
|
||||
path.resolve(absoluteCwd, rootDir),
|
||||
rootMode,
|
||||
);
|
||||
@ -93,7 +99,7 @@ export default function loadPrivatePartialConfig(
|
||||
caller,
|
||||
};
|
||||
|
||||
const configChain = buildRootChain(args, context);
|
||||
const configChain = yield* buildRootChain(args, context);
|
||||
if (!configChain) return null;
|
||||
|
||||
const options = {};
|
||||
@ -129,8 +135,10 @@ export default function loadPrivatePartialConfig(
|
||||
};
|
||||
}
|
||||
|
||||
export function loadPartialConfig(inputOpts: mixed): PartialConfig | null {
|
||||
const result = loadPrivatePartialConfig(inputOpts);
|
||||
export const loadPartialConfig = gensync<[any], PartialConfig | null>(function*(
|
||||
inputOpts: mixed,
|
||||
): Handler<PartialConfig | null> {
|
||||
const result: ?PrivPartialConfig = yield* loadPrivatePartialConfig(inputOpts);
|
||||
if (!result) return null;
|
||||
|
||||
const { options, babelrc, ignore, config } = result;
|
||||
@ -150,7 +158,7 @@ export function loadPartialConfig(inputOpts: mixed): PartialConfig | null {
|
||||
ignore ? ignore.filepath : undefined,
|
||||
config ? config.filepath : undefined,
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
export type { PartialConfig };
|
||||
|
||||
|
||||
@ -28,3 +28,14 @@ function mergeDefaultFields<T: {}>(target: T, source: T) {
|
||||
if (val !== undefined) target[k] = (val: any);
|
||||
}
|
||||
}
|
||||
|
||||
export function isIterableIterator(value: mixed): boolean %checks {
|
||||
return (
|
||||
/*:: value instanceof Generator && */
|
||||
// /*:: "@@iterator" in value && */
|
||||
!!value &&
|
||||
typeof value.next === "function" &&
|
||||
// $FlowIgnore
|
||||
typeof value[Symbol.iterator] === "function"
|
||||
);
|
||||
}
|
||||
|
||||
110
packages/babel-core/src/gensync-utils/async.js
Normal file
110
packages/babel-core/src/gensync-utils/async.js
Normal file
@ -0,0 +1,110 @@
|
||||
// @flow
|
||||
|
||||
import gensync, { type Gensync, type Handler } from "gensync";
|
||||
|
||||
type MaybePromise<T> = T | Promise<T>;
|
||||
|
||||
const id = x => x;
|
||||
|
||||
const runGenerator = gensync(function*(item) {
|
||||
return yield* item;
|
||||
});
|
||||
|
||||
// This Gensync returns true if the current execution contect is
|
||||
// asynchronous, otherwise it returns false.
|
||||
export const isAsync = gensync<[], boolean>({
|
||||
sync: () => false,
|
||||
errback: cb => cb(null, true),
|
||||
});
|
||||
|
||||
// This function wraps any functions (which could be either synchronous or
|
||||
// asynchronous) with a Gensync. If the wrapped function returns a promise
|
||||
// but the current execution context is synchronous, it will throw the
|
||||
// provided error.
|
||||
// This is used to handle user-provided functions which could be asynchronous.
|
||||
export function maybeAsync<T, Args: any[]>(
|
||||
fn: (...args: Args) => T,
|
||||
message: string,
|
||||
): Gensync<Args, T> {
|
||||
return gensync({
|
||||
sync(...args) {
|
||||
const result = fn.apply(this, args);
|
||||
if (isThenable(result)) throw new Error(message);
|
||||
return result;
|
||||
},
|
||||
async(...args) {
|
||||
return Promise.resolve(fn.apply(this, args));
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
const withKind = (gensync<[any], any>({
|
||||
sync: cb => cb("sync"),
|
||||
async: cb => cb("async"),
|
||||
}): <T>(cb: (kind: "sync" | "async") => MaybePromise<T>) => Handler<T>);
|
||||
|
||||
// This function wraps a generator (or a Gensync) into another function which,
|
||||
// when called, will run the provided generator in a sync or async way, depending
|
||||
// on the execution context where this forwardAsync function is called.
|
||||
// This is useful, for example, when passing a callback to a function which isn't
|
||||
// aware of gensync, but it only knows about synchronous and asynchronous functions.
|
||||
// An example is cache.using, which being exposed to the user must be as simple as
|
||||
// possible:
|
||||
// yield* forwardAsync(gensyncFn, wrappedFn =>
|
||||
// cache.using(x => {
|
||||
// // Here we don't know about gensync. wrappedFn is a
|
||||
// // normal sync or async function
|
||||
// return wrappedFn(x);
|
||||
// })
|
||||
// )
|
||||
export function forwardAsync<ActionArgs: mixed[], ActionReturn, Return>(
|
||||
action: (...args: ActionArgs) => Handler<ActionReturn>,
|
||||
cb: (
|
||||
adapted: (...args: ActionArgs) => MaybePromise<ActionReturn>,
|
||||
) => MaybePromise<Return>,
|
||||
): Handler<Return> {
|
||||
const g = gensync<ActionArgs, ActionReturn>(action);
|
||||
return withKind<Return>(kind => {
|
||||
const adapted = g[kind];
|
||||
return cb(adapted);
|
||||
});
|
||||
}
|
||||
|
||||
// If the given generator is executed asynchronously, the first time that it
|
||||
// is paused (i.e. When it yields a gensync generator which can't be run
|
||||
// synchronously), call the "firstPause" callback.
|
||||
export const onFirstPause = (gensync<[any, any], any>({
|
||||
name: "onFirstPause",
|
||||
arity: 2,
|
||||
sync: function(item) {
|
||||
return runGenerator.sync(item);
|
||||
},
|
||||
errback: function(item, firstPause, cb) {
|
||||
let completed = false;
|
||||
|
||||
runGenerator.errback(item, (err, value) => {
|
||||
completed = true;
|
||||
cb(err, value);
|
||||
});
|
||||
|
||||
if (!completed) {
|
||||
firstPause();
|
||||
}
|
||||
},
|
||||
}): <T>(gen: Generator<*, T, *>, cb: Function) => Handler<T>);
|
||||
|
||||
// Wait for the given promise to be resolved
|
||||
export const waitFor = (gensync<[any], any>({
|
||||
sync: id,
|
||||
async: id,
|
||||
}): <T>(p: T | Promise<T>) => Handler<T>);
|
||||
|
||||
export function isThenable(val: mixed): boolean %checks {
|
||||
return (
|
||||
/*:: val instanceof Promise && */
|
||||
!!val &&
|
||||
(typeof val === "object" || typeof val === "function") &&
|
||||
!!val.then &&
|
||||
typeof val.then === "function"
|
||||
);
|
||||
}
|
||||
21
packages/babel-core/src/gensync-utils/fs.js
Normal file
21
packages/babel-core/src/gensync-utils/fs.js
Normal file
@ -0,0 +1,21 @@
|
||||
// @flow
|
||||
|
||||
import fs from "fs";
|
||||
import gensync from "gensync";
|
||||
|
||||
export const readFile = gensync<[string, "utf8"], string>({
|
||||
sync: fs.readFileSync,
|
||||
errback: fs.readFile,
|
||||
});
|
||||
|
||||
export const exists = gensync<[string], boolean>({
|
||||
sync(path) {
|
||||
try {
|
||||
fs.accessSync(path);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
errback: (path, cb) => fs.access(path, undefined, err => cb(null, !err)),
|
||||
});
|
||||
9
packages/babel-core/src/gensync-utils/resolve.js
Normal file
9
packages/babel-core/src/gensync-utils/resolve.js
Normal file
@ -0,0 +1,9 @@
|
||||
// @flow
|
||||
|
||||
import resolve from "resolve";
|
||||
import gensync from "gensync";
|
||||
|
||||
export default gensync<[string, {| basedir: string |}], string>({
|
||||
sync: resolve.sync,
|
||||
errback: resolve,
|
||||
});
|
||||
@ -15,7 +15,14 @@ export { default as template } from "@babel/template";
|
||||
|
||||
export { createConfigItem } from "./config/item";
|
||||
|
||||
export { loadPartialConfig, loadOptions } from "./config";
|
||||
export {
|
||||
loadPartialConfig,
|
||||
loadPartialConfigSync,
|
||||
loadPartialConfigAsync,
|
||||
loadOptions,
|
||||
loadOptionsSync,
|
||||
loadOptionsAsync,
|
||||
} from "./config";
|
||||
|
||||
export { transform, transformSync, transformAsync } from "./transform";
|
||||
export {
|
||||
|
||||
@ -1,5 +1,7 @@
|
||||
// @flow
|
||||
|
||||
import gensync from "gensync";
|
||||
|
||||
import loadConfig, { type InputOptions } from "./config";
|
||||
import parser from "./parser";
|
||||
import type { ParseResult } from "./parser";
|
||||
@ -19,6 +21,18 @@ type Parse = {
|
||||
(code: string, opts: ?InputOptions): ParseResult | null,
|
||||
};
|
||||
|
||||
const parseRunner = gensync<[string, ?InputOptions], ParseResult | null>(
|
||||
function* parse(code, opts) {
|
||||
const config = yield* loadConfig(opts);
|
||||
|
||||
if (config === null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return yield* parser(config.passes, normalizeOptions(config), code);
|
||||
},
|
||||
);
|
||||
|
||||
export const parse: Parse = (function parse(code, opts, callback) {
|
||||
if (typeof opts === "function") {
|
||||
callback = opts;
|
||||
@ -27,55 +41,10 @@ export const parse: Parse = (function parse(code, opts, callback) {
|
||||
|
||||
// For backward-compat with Babel 7's early betas, we allow sync parsing when
|
||||
// no callback is given. Will be dropped in some future Babel major version.
|
||||
if (callback === undefined) return parseSync(code, opts);
|
||||
if (callback === undefined) return parseRunner.sync(code, opts);
|
||||
|
||||
const config = loadConfig(opts);
|
||||
|
||||
if (config === null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Reassign to keep Flowtype happy.
|
||||
const cb = callback;
|
||||
|
||||
// Just delaying the transform one tick for now to simulate async behavior
|
||||
// but more async logic may land here eventually.
|
||||
process.nextTick(() => {
|
||||
let ast = null;
|
||||
try {
|
||||
const cfg = loadConfig(opts);
|
||||
if (cfg === null) return cb(null, null);
|
||||
|
||||
ast = parser(cfg.passes, normalizeOptions(cfg), code);
|
||||
} catch (err) {
|
||||
return cb(err);
|
||||
}
|
||||
|
||||
cb(null, ast);
|
||||
});
|
||||
parseRunner.errback(code, opts, callback);
|
||||
}: Function);
|
||||
|
||||
export function parseSync(
|
||||
code: string,
|
||||
opts?: InputOptions,
|
||||
): ParseResult | null {
|
||||
const config = loadConfig(opts);
|
||||
|
||||
if (config === null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return parser(config.passes, normalizeOptions(config), code);
|
||||
}
|
||||
|
||||
export function parseAsync(
|
||||
code: string,
|
||||
opts?: InputOptions,
|
||||
): Promise<ParseResult | null> {
|
||||
return new Promise((res, rej) => {
|
||||
parse(code, opts, (err, result) => {
|
||||
if (err == null) res(result);
|
||||
else rej(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
export const parseSync = parseRunner.sync;
|
||||
export const parseAsync = parseRunner.async;
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
import type { Handler } from "gensync";
|
||||
import { parse } from "@babel/parser";
|
||||
import { codeFrameColumns } from "@babel/code-frame";
|
||||
import generateMissingPluginMessage from "./util/missing-plugin-helper";
|
||||
@ -6,11 +7,11 @@ type AstRoot = BabelNodeFile | BabelNodeProgram;
|
||||
|
||||
export type ParseResult = AstRoot;
|
||||
|
||||
export default function parser(
|
||||
export default function* parser(
|
||||
pluginPasses: PluginPasses,
|
||||
{ parserOpts, highlightCode = true, filename = "unknown" }: Object,
|
||||
code: string,
|
||||
): ParseResult {
|
||||
): Handler<ParseResult> {
|
||||
try {
|
||||
const results = [];
|
||||
for (const plugins of pluginPasses) {
|
||||
@ -27,6 +28,7 @@ export default function parser(
|
||||
if (results.length === 0) {
|
||||
return parse(code, parserOpts);
|
||||
} else if (results.length === 1) {
|
||||
yield* []; // If we want to allow async parsers
|
||||
if (typeof results[0].then === "function") {
|
||||
throw new Error(
|
||||
`You appear to be using an async parser plugin, ` +
|
||||
|
||||
@ -1,9 +1,10 @@
|
||||
// @flow
|
||||
|
||||
import loadConfig, { type InputOptions } from "./config";
|
||||
import gensync from "gensync";
|
||||
|
||||
import loadConfig, { type InputOptions, type ResolvedConfig } from "./config";
|
||||
import {
|
||||
runSync,
|
||||
runAsync,
|
||||
run,
|
||||
type FileResult,
|
||||
type FileResultCallback,
|
||||
} from "./transformation";
|
||||
@ -24,6 +25,18 @@ type TransformFromAst = {
|
||||
(ast: AstRoot, code: string, opts: ?InputOptions): FileResult | null,
|
||||
};
|
||||
|
||||
const transformFromAstRunner = gensync<
|
||||
[AstRoot, string, ?InputOptions],
|
||||
FileResult | null,
|
||||
>(function*(ast, code, opts) {
|
||||
const config: ResolvedConfig | null = yield* loadConfig(opts);
|
||||
if (config === null) return null;
|
||||
|
||||
if (!ast) throw new Error("No AST given");
|
||||
|
||||
return yield* run(config, code, ast);
|
||||
});
|
||||
|
||||
export const transformFromAst: TransformFromAst = (function transformFromAst(
|
||||
ast,
|
||||
code,
|
||||
@ -37,50 +50,12 @@ export const transformFromAst: TransformFromAst = (function transformFromAst(
|
||||
|
||||
// For backward-compat with Babel 6, we allow sync transformation when
|
||||
// no callback is given. Will be dropped in some future Babel major version.
|
||||
if (callback === undefined) return transformFromAstSync(ast, code, opts);
|
||||
|
||||
// Reassign to keep Flowtype happy.
|
||||
const cb = callback;
|
||||
|
||||
// Just delaying the transform one tick for now to simulate async behavior
|
||||
// but more async logic may land here eventually.
|
||||
process.nextTick(() => {
|
||||
let cfg;
|
||||
try {
|
||||
cfg = loadConfig(opts);
|
||||
if (cfg === null) return cb(null, null);
|
||||
} catch (err) {
|
||||
return cb(err);
|
||||
if (callback === undefined) {
|
||||
return transformFromAstRunner.sync(ast, code, opts);
|
||||
}
|
||||
|
||||
if (!ast) return cb(new Error("No AST given"));
|
||||
|
||||
runAsync(cfg, code, ast, cb);
|
||||
});
|
||||
transformFromAstRunner.errback(ast, code, opts, callback);
|
||||
}: Function);
|
||||
|
||||
export function transformFromAstSync(
|
||||
ast: AstRoot,
|
||||
code: string,
|
||||
opts: ?InputOptions,
|
||||
): FileResult | null {
|
||||
const config = loadConfig(opts);
|
||||
if (config === null) return null;
|
||||
|
||||
if (!ast) throw new Error("No AST given");
|
||||
|
||||
return runSync(config, code, ast);
|
||||
}
|
||||
|
||||
export function transformFromAstAsync(
|
||||
ast: AstRoot,
|
||||
code: string,
|
||||
opts: ?InputOptions,
|
||||
): Promise<FileResult | null> {
|
||||
return new Promise((res, rej) => {
|
||||
transformFromAst(ast, code, opts, (err, result) => {
|
||||
if (err == null) res(result);
|
||||
else rej(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
export const transformFromAstSync = transformFromAstRunner.sync;
|
||||
export const transformFromAstAsync = transformFromAstRunner.async;
|
||||
|
||||
@ -1,13 +1,14 @@
|
||||
// @flow
|
||||
import fs from "fs";
|
||||
|
||||
import loadConfig, { type InputOptions } from "./config";
|
||||
import gensync from "gensync";
|
||||
|
||||
import loadConfig, { type InputOptions, type ResolvedConfig } from "./config";
|
||||
import {
|
||||
runSync,
|
||||
runAsync,
|
||||
run,
|
||||
type FileResult,
|
||||
type FileResultCallback,
|
||||
} from "./transformation";
|
||||
import * as fs from "./gensync-utils/fs";
|
||||
|
||||
import typeof * as transformFileBrowserType from "./transform-file-browser";
|
||||
import typeof * as transformFileType from "./transform-file";
|
||||
@ -22,50 +23,8 @@ type TransformFile = {
|
||||
(filename: string, opts: ?InputOptions, callback: FileResultCallback): void,
|
||||
};
|
||||
|
||||
export const transformFile: TransformFile = (function transformFile(
|
||||
filename,
|
||||
opts,
|
||||
callback,
|
||||
) {
|
||||
let options;
|
||||
if (typeof opts === "function") {
|
||||
callback = opts;
|
||||
opts = undefined;
|
||||
}
|
||||
|
||||
if (opts == null) {
|
||||
options = { filename };
|
||||
} else if (opts && typeof opts === "object") {
|
||||
options = {
|
||||
...opts,
|
||||
filename,
|
||||
};
|
||||
}
|
||||
|
||||
process.nextTick(() => {
|
||||
let cfg;
|
||||
try {
|
||||
cfg = loadConfig(options);
|
||||
if (cfg === null) return callback(null, null);
|
||||
} catch (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
// Reassignment to keep Flow happy.
|
||||
const config = cfg;
|
||||
|
||||
fs.readFile(filename, "utf8", function(err, code: string) {
|
||||
if (err) return callback(err, null);
|
||||
|
||||
runAsync(config, code, null, callback);
|
||||
});
|
||||
});
|
||||
}: Function);
|
||||
|
||||
export function transformFileSync(
|
||||
filename: string,
|
||||
opts: ?InputOptions,
|
||||
): FileResult | null {
|
||||
const transformFileRunner = gensync<[string, ?InputOptions], FileResult | null>(
|
||||
function*(filename, opts) {
|
||||
let options;
|
||||
if (opts == null) {
|
||||
options = { filename };
|
||||
@ -76,20 +35,14 @@ export function transformFileSync(
|
||||
};
|
||||
}
|
||||
|
||||
const config = loadConfig(options);
|
||||
const config: ResolvedConfig | null = yield* loadConfig(options);
|
||||
if (config === null) return null;
|
||||
|
||||
return runSync(config, fs.readFileSync(filename, "utf8"));
|
||||
}
|
||||
const code = yield* fs.readFile(filename, "utf8");
|
||||
return yield* run(config, code);
|
||||
},
|
||||
);
|
||||
|
||||
export function transformFileAsync(
|
||||
filename: string,
|
||||
opts: ?InputOptions,
|
||||
): Promise<FileResult | null> {
|
||||
return new Promise((res, rej) => {
|
||||
transformFile(filename, opts, (err, result) => {
|
||||
if (err == null) res(result);
|
||||
else rej(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
export const transformFile: TransformFile = transformFileRunner.errback;
|
||||
export const transformFileSync = transformFileRunner.sync;
|
||||
export const transformFileAsync = transformFileRunner.async;
|
||||
|
||||
@ -1,8 +1,10 @@
|
||||
// @flow
|
||||
import loadConfig, { type InputOptions } from "./config";
|
||||
|
||||
import gensync from "gensync";
|
||||
|
||||
import loadConfig, { type InputOptions, type ResolvedConfig } from "./config";
|
||||
import {
|
||||
runSync,
|
||||
runAsync,
|
||||
run,
|
||||
type FileResult,
|
||||
type FileResultCallback,
|
||||
} from "./transformation";
|
||||
@ -16,6 +18,15 @@ type Transform = {
|
||||
(code: string, opts: ?InputOptions): FileResult | null,
|
||||
};
|
||||
|
||||
const transformRunner = gensync<[string, ?InputOptions], FileResult | null>(
|
||||
function* transform(code, opts) {
|
||||
const config: ResolvedConfig | null = yield* loadConfig(opts);
|
||||
if (config === null) return null;
|
||||
|
||||
return yield* run(config, code);
|
||||
},
|
||||
);
|
||||
|
||||
export const transform: Transform = (function transform(code, opts, callback) {
|
||||
if (typeof opts === "function") {
|
||||
callback = opts;
|
||||
@ -24,44 +35,10 @@ export const transform: Transform = (function transform(code, opts, callback) {
|
||||
|
||||
// For backward-compat with Babel 6, we allow sync transformation when
|
||||
// no callback is given. Will be dropped in some future Babel major version.
|
||||
if (callback === undefined) return transformSync(code, opts);
|
||||
if (callback === undefined) return transformRunner.sync(code, opts);
|
||||
|
||||
// Reassign to keep Flowtype happy.
|
||||
const cb = callback;
|
||||
|
||||
// Just delaying the transform one tick for now to simulate async behavior
|
||||
// but more async logic may land here eventually.
|
||||
process.nextTick(() => {
|
||||
let cfg;
|
||||
try {
|
||||
cfg = loadConfig(opts);
|
||||
if (cfg === null) return cb(null, null);
|
||||
} catch (err) {
|
||||
return cb(err);
|
||||
}
|
||||
|
||||
runAsync(cfg, code, null, cb);
|
||||
});
|
||||
transformRunner.errback(code, opts, callback);
|
||||
}: Function);
|
||||
|
||||
export function transformSync(
|
||||
code: string,
|
||||
opts: ?InputOptions,
|
||||
): FileResult | null {
|
||||
const config = loadConfig(opts);
|
||||
if (config === null) return null;
|
||||
|
||||
return runSync(config, code);
|
||||
}
|
||||
|
||||
export function transformAsync(
|
||||
code: string,
|
||||
opts: ?InputOptions,
|
||||
): Promise<FileResult | null> {
|
||||
return new Promise((res, rej) => {
|
||||
transform(code, opts, (err, result) => {
|
||||
if (err == null) res(result);
|
||||
else rej(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
export const transformSync = transformRunner.sync;
|
||||
export const transformAsync = transformRunner.async;
|
||||
|
||||
@ -11,7 +11,7 @@ export default function loadBlockHoistPlugin(): Plugin {
|
||||
// Lazy-init the internal plugin to remove the init-time circular
|
||||
// dependency between plugins being passed @babel/core's export object,
|
||||
// which loads this file, and this 'loadConfig' loading plugins.
|
||||
const config = loadConfig({
|
||||
const config = loadConfig.sync({
|
||||
babelrc: false,
|
||||
configFile: false,
|
||||
plugins: [blockHoistPlugin],
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
// @flow
|
||||
import traverse from "@babel/traverse";
|
||||
import typeof { SourceMap } from "convert-source-map";
|
||||
import type { Handler } from "gensync";
|
||||
|
||||
import type { ResolvedConfig, PluginPasses } from "../config";
|
||||
|
||||
@ -25,30 +26,12 @@ export type FileResult = {
|
||||
map: SourceMap | null,
|
||||
};
|
||||
|
||||
export function runAsync(
|
||||
export function* run(
|
||||
config: ResolvedConfig,
|
||||
code: string,
|
||||
ast: ?(BabelNodeFile | BabelNodeProgram),
|
||||
callback: Function,
|
||||
) {
|
||||
let result;
|
||||
try {
|
||||
result = runSync(config, code, ast);
|
||||
} catch (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
// We don't actually care about calling this synchronously here because it is
|
||||
// already running within a .nextTick handler from the transform calls above.
|
||||
return callback(null, result);
|
||||
}
|
||||
|
||||
export function runSync(
|
||||
config: ResolvedConfig,
|
||||
code: string,
|
||||
ast: ?(BabelNodeFile | BabelNodeProgram),
|
||||
): FileResult {
|
||||
const file = normalizeFile(
|
||||
): Handler<FileResult> {
|
||||
const file = yield* normalizeFile(
|
||||
config.passes,
|
||||
normalizeOptions(config),
|
||||
code,
|
||||
@ -57,7 +40,7 @@ export function runSync(
|
||||
|
||||
const opts = file.opts;
|
||||
try {
|
||||
transformFile(file, config.passes);
|
||||
yield* transformFile(file, config.passes);
|
||||
} catch (e) {
|
||||
e.message = `${opts.filename ?? "unknown"}: ${e.message}`;
|
||||
if (!e.code) {
|
||||
@ -89,7 +72,7 @@ export function runSync(
|
||||
};
|
||||
}
|
||||
|
||||
function transformFile(file: File, pluginPasses: PluginPasses): void {
|
||||
function* transformFile(file: File, pluginPasses: PluginPasses): Handler<void> {
|
||||
for (const pluginPairs of pluginPasses) {
|
||||
const passPairs = [];
|
||||
const passes = [];
|
||||
@ -108,6 +91,7 @@ function transformFile(file: File, pluginPasses: PluginPasses): void {
|
||||
if (fn) {
|
||||
const result = fn.call(pass, file);
|
||||
|
||||
yield* [];
|
||||
if (isThenable(result)) {
|
||||
throw new Error(
|
||||
`You appear to be using an plugin with an async .pre, ` +
|
||||
@ -132,6 +116,7 @@ function transformFile(file: File, pluginPasses: PluginPasses): void {
|
||||
if (fn) {
|
||||
const result = fn.call(pass, file);
|
||||
|
||||
yield* [];
|
||||
if (isThenable(result)) {
|
||||
throw new Error(
|
||||
`You appear to be using an plugin with an async .post, ` +
|
||||
|
||||
@ -4,6 +4,7 @@ import fs from "fs";
|
||||
import path from "path";
|
||||
import buildDebug from "debug";
|
||||
import cloneDeep from "lodash/cloneDeep";
|
||||
import type { Handler } from "gensync";
|
||||
import * as t from "@babel/types";
|
||||
import type { PluginPasses } from "../config";
|
||||
import convertSourceMap, { typeof Converter } from "convert-source-map";
|
||||
@ -19,12 +20,12 @@ export type NormalizedFile = {
|
||||
inputMap: Converter | null,
|
||||
};
|
||||
|
||||
export default function normalizeFile(
|
||||
export default function* normalizeFile(
|
||||
pluginPasses: PluginPasses,
|
||||
options: Object,
|
||||
code: string,
|
||||
ast: ?(BabelNodeFile | BabelNodeProgram),
|
||||
): File {
|
||||
): Handler<File> {
|
||||
code = `${code || ""}`;
|
||||
|
||||
if (ast) {
|
||||
@ -35,7 +36,7 @@ export default function normalizeFile(
|
||||
}
|
||||
ast = cloneDeep(ast);
|
||||
} else {
|
||||
ast = parser(pluginPasses, options, code);
|
||||
ast = yield* parser(pluginPasses, options, code);
|
||||
}
|
||||
|
||||
let inputMap = null;
|
||||
|
||||
214
packages/babel-core/test/async.js
Normal file
214
packages/babel-core/test/async.js
Normal file
@ -0,0 +1,214 @@
|
||||
import path from "path";
|
||||
import * as babel from "..";
|
||||
|
||||
const nodeGte8 = (...args) => {
|
||||
// "minNodeVersion": "8.0.0" <-- For Ctrl+F when dropping node 6
|
||||
const testFn = process.version.slice(0, 3) === "v6." ? it.skip : it;
|
||||
testFn(...args);
|
||||
};
|
||||
|
||||
describe("asynchronicity", () => {
|
||||
const base = path.join(__dirname, "fixtures", "async");
|
||||
let cwd;
|
||||
|
||||
beforeEach(function() {
|
||||
cwd = process.cwd();
|
||||
process.chdir(base);
|
||||
});
|
||||
|
||||
afterEach(function() {
|
||||
process.chdir(cwd);
|
||||
});
|
||||
|
||||
describe("config file", () => {
|
||||
describe("async function", () => {
|
||||
nodeGte8("called synchronously", () => {
|
||||
process.chdir("config-file-async-function");
|
||||
|
||||
expect(() =>
|
||||
babel.transformSync(""),
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
`"You appear to be using an async configuration, which your current version of Babel does` +
|
||||
` not support. We may add support for this in the future, but if you're on the most recent` +
|
||||
` version of @babel/core and still seeing this error, then you'll need to synchronously` +
|
||||
` return your config."`,
|
||||
);
|
||||
});
|
||||
|
||||
nodeGte8("called asynchronously", async () => {
|
||||
process.chdir("config-file-async-function");
|
||||
|
||||
await expect(
|
||||
babel.transformAsync(""),
|
||||
).rejects.toThrowErrorMatchingInlineSnapshot(
|
||||
`"You appear to be using an async configuration, which your current version of Babel does` +
|
||||
` not support. We may add support for this in the future, but if you're on the most recent` +
|
||||
` version of @babel/core and still seeing this error, then you'll need to synchronously` +
|
||||
` return your config."`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("promise", () => {
|
||||
it("called synchronously", () => {
|
||||
process.chdir("config-file-promise");
|
||||
|
||||
expect(() =>
|
||||
babel.transformSync(""),
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
`"You appear to be using an async configuration, which your current version of Babel does` +
|
||||
` not support. We may add support for this in the future, but if you're on the most recent` +
|
||||
` version of @babel/core and still seeing this error, then you'll need to synchronously` +
|
||||
` return your config."`,
|
||||
);
|
||||
});
|
||||
|
||||
it("called asynchronously", async () => {
|
||||
process.chdir("config-file-promise");
|
||||
|
||||
await expect(
|
||||
babel.transformAsync(""),
|
||||
).rejects.toThrowErrorMatchingInlineSnapshot(
|
||||
`"You appear to be using an async configuration, which your current version of Babel does` +
|
||||
` not support. We may add support for this in the future, but if you're on the most recent` +
|
||||
` version of @babel/core and still seeing this error, then you'll need to synchronously` +
|
||||
` return your config."`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("cache.using", () => {
|
||||
nodeGte8("called synchronously", () => {
|
||||
process.chdir("config-cache");
|
||||
|
||||
expect(() =>
|
||||
babel.transformSync(""),
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
`"You appear to be using an async cache handler, which your current version of Babel does` +
|
||||
` not support. We may add support for this in the future, but if you're on the most recent` +
|
||||
` version of @babel/core and still seeing this error, then you'll need to synchronously` +
|
||||
` handle your caching logic."`,
|
||||
);
|
||||
});
|
||||
|
||||
nodeGte8("called asynchronously", async () => {
|
||||
process.chdir("config-cache");
|
||||
|
||||
await expect(
|
||||
babel.transformAsync(""),
|
||||
).rejects.toThrowErrorMatchingInlineSnapshot(
|
||||
`"You appear to be using an async cache handler, which your current version of Babel does` +
|
||||
` not support. We may add support for this in the future, but if you're on the most recent` +
|
||||
` version of @babel/core and still seeing this error, then you'll need to synchronously` +
|
||||
` handle your caching logic."`,
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("plugin", () => {
|
||||
describe("factory function", () => {
|
||||
nodeGte8("called synchronously", () => {
|
||||
process.chdir("plugin");
|
||||
|
||||
expect(() =>
|
||||
babel.transformSync(""),
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
`"[BABEL] unknown: You appear to be using an async plugin, which your current version of Babel` +
|
||||
` does not support. If you're using a published plugin, you may need to upgrade your` +
|
||||
` @babel/core version."`,
|
||||
);
|
||||
});
|
||||
|
||||
nodeGte8("called asynchronously", async () => {
|
||||
process.chdir("plugin");
|
||||
|
||||
await expect(
|
||||
babel.transformAsync(""),
|
||||
).rejects.toThrowErrorMatchingInlineSnapshot(
|
||||
`"[BABEL] unknown: You appear to be using an async plugin, which your current version of Babel` +
|
||||
` does not support. If you're using a published plugin, you may need to upgrade your` +
|
||||
` @babel/core version."`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe(".pre", () => {
|
||||
nodeGte8("called synchronously", () => {
|
||||
process.chdir("plugin-pre");
|
||||
|
||||
expect(() =>
|
||||
babel.transformSync(""),
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
`"unknown: You appear to be using an plugin with an async .pre, which your current version` +
|
||||
` of Babel does not support. If you're using a published plugin, you may need to upgrade your` +
|
||||
` @babel/core version."`,
|
||||
);
|
||||
});
|
||||
|
||||
nodeGte8("called asynchronously", async () => {
|
||||
process.chdir("plugin-pre");
|
||||
|
||||
await expect(
|
||||
babel.transformAsync(""),
|
||||
).rejects.toThrowErrorMatchingInlineSnapshot(
|
||||
`"unknown: You appear to be using an plugin with an async .pre, which your current version` +
|
||||
` of Babel does not support. If you're using a published plugin, you may need to upgrade your` +
|
||||
` @babel/core version."`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe(".post", () => {
|
||||
nodeGte8("called synchronously", () => {
|
||||
process.chdir("plugin-post");
|
||||
|
||||
expect(() =>
|
||||
babel.transformSync(""),
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
`"unknown: You appear to be using an plugin with an async .post, which your current version` +
|
||||
` of Babel does not support. If you're using a published plugin, you may need to upgrade your` +
|
||||
` @babel/core version."`,
|
||||
);
|
||||
});
|
||||
|
||||
nodeGte8("called asynchronously", async () => {
|
||||
process.chdir("plugin-post");
|
||||
|
||||
await expect(
|
||||
babel.transformAsync(""),
|
||||
).rejects.toThrowErrorMatchingInlineSnapshot(
|
||||
`"unknown: You appear to be using an plugin with an async .post, which your current version` +
|
||||
` of Babel does not support. If you're using a published plugin, you may need to upgrade your` +
|
||||
` @babel/core version."`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("inherits", () => {
|
||||
nodeGte8("called synchronously", () => {
|
||||
process.chdir("plugin-inherits");
|
||||
|
||||
expect(() =>
|
||||
babel.transformSync(""),
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
`"[BABEL] unknown: You appear to be using an async plugin, which your current version of Babel` +
|
||||
` does not support. If you're using a published plugin, you may need to upgrade your` +
|
||||
` @babel/core version."`,
|
||||
);
|
||||
});
|
||||
|
||||
nodeGte8("called asynchronously", async () => {
|
||||
process.chdir("plugin-inherits");
|
||||
|
||||
await expect(
|
||||
babel.transformAsync(""),
|
||||
).rejects.toThrowErrorMatchingInlineSnapshot(
|
||||
`"[BABEL] unknown: You appear to be using an async plugin, which your current version of Babel` +
|
||||
` does not support. If you're using a published plugin, you may need to upgrade your` +
|
||||
` @babel/core version."`,
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -1,10 +1,12 @@
|
||||
import { makeStrongCache } from "../lib/config/caching";
|
||||
import gensync from "gensync";
|
||||
import { makeStrongCacheSync, makeStrongCache } from "../lib/config/caching";
|
||||
import { waitFor } from "../lib/gensync-utils/async";
|
||||
|
||||
describe("caching API", () => {
|
||||
it("should allow permacaching with .forever()", () => {
|
||||
let count = 0;
|
||||
|
||||
const fn = makeStrongCache((arg, cache) => {
|
||||
const fn = makeStrongCacheSync((arg, cache) => {
|
||||
cache.forever();
|
||||
return { arg, count: count++ };
|
||||
});
|
||||
@ -21,7 +23,7 @@ describe("caching API", () => {
|
||||
it("should allow disabling caching with .never()", () => {
|
||||
let count = 0;
|
||||
|
||||
const fn = makeStrongCache((arg, cache) => {
|
||||
const fn = makeStrongCacheSync((arg, cache) => {
|
||||
cache.never();
|
||||
return { arg, count: count++ };
|
||||
});
|
||||
@ -41,7 +43,7 @@ describe("caching API", () => {
|
||||
let count = 0;
|
||||
let other = "default";
|
||||
|
||||
const fn = makeStrongCache((arg, cache) => {
|
||||
const fn = makeStrongCacheSync((arg, cache) => {
|
||||
const val = cache.using(() => other);
|
||||
|
||||
return { arg, val, count: count++ };
|
||||
@ -82,7 +84,7 @@ describe("caching API", () => {
|
||||
let count = 0;
|
||||
let other = "default";
|
||||
|
||||
const fn = makeStrongCache((arg, cache) => {
|
||||
const fn = makeStrongCacheSync((arg, cache) => {
|
||||
const val = cache.invalidate(() => other);
|
||||
|
||||
return { arg, val, count: count++ };
|
||||
@ -124,7 +126,7 @@ describe("caching API", () => {
|
||||
let other = "default";
|
||||
let another = "another";
|
||||
|
||||
const fn = makeStrongCache((arg, cache) => {
|
||||
const fn = makeStrongCacheSync((arg, cache) => {
|
||||
const val = cache.using(() => other);
|
||||
const val2 = cache.invalidate(() => another);
|
||||
|
||||
@ -223,7 +225,7 @@ describe("caching API", () => {
|
||||
it("should auto-permacache by default", () => {
|
||||
let count = 0;
|
||||
|
||||
const fn = makeStrongCache(arg => ({ arg, count: count++ }));
|
||||
const fn = makeStrongCacheSync(arg => ({ arg, count: count++ }));
|
||||
|
||||
expect(fn("one")).toEqual({ arg: "one", count: 0 });
|
||||
expect(fn("one")).toBe(fn("one"));
|
||||
@ -235,7 +237,7 @@ describe("caching API", () => {
|
||||
});
|
||||
|
||||
it("should throw if you set permacaching and use .using", () => {
|
||||
const fn = makeStrongCache((arg, cache) => {
|
||||
const fn = makeStrongCacheSync((arg, cache) => {
|
||||
cache.forever();
|
||||
|
||||
cache.using(() => null);
|
||||
@ -245,7 +247,7 @@ describe("caching API", () => {
|
||||
});
|
||||
|
||||
it("should throw if you set permacaching and use .invalidate", () => {
|
||||
const fn = makeStrongCache((arg, cache) => {
|
||||
const fn = makeStrongCacheSync((arg, cache) => {
|
||||
cache.forever();
|
||||
|
||||
cache.invalidate(() => null);
|
||||
@ -255,7 +257,7 @@ describe("caching API", () => {
|
||||
});
|
||||
|
||||
it("should throw if you set permacaching and use .never", () => {
|
||||
const fn = makeStrongCache((arg, cache) => {
|
||||
const fn = makeStrongCacheSync((arg, cache) => {
|
||||
cache.forever();
|
||||
|
||||
cache.never();
|
||||
@ -265,7 +267,7 @@ describe("caching API", () => {
|
||||
});
|
||||
|
||||
it("should throw if you set no caching and use .using", () => {
|
||||
const fn = makeStrongCache((arg, cache) => {
|
||||
const fn = makeStrongCacheSync((arg, cache) => {
|
||||
cache.never();
|
||||
|
||||
cache.using(() => null);
|
||||
@ -275,7 +277,7 @@ describe("caching API", () => {
|
||||
});
|
||||
|
||||
it("should throw if you set no caching and use .invalidate", () => {
|
||||
const fn = makeStrongCache((arg, cache) => {
|
||||
const fn = makeStrongCacheSync((arg, cache) => {
|
||||
cache.never();
|
||||
|
||||
cache.invalidate(() => null);
|
||||
@ -285,7 +287,7 @@ describe("caching API", () => {
|
||||
});
|
||||
|
||||
it("should throw if you set no caching and use .never", () => {
|
||||
const fn = makeStrongCache((arg, cache) => {
|
||||
const fn = makeStrongCacheSync((arg, cache) => {
|
||||
cache.never();
|
||||
|
||||
cache.using(() => null);
|
||||
@ -295,7 +297,7 @@ describe("caching API", () => {
|
||||
});
|
||||
|
||||
it("should throw if you configure .forever after exiting", () => {
|
||||
const fn = makeStrongCache((arg, cache) => cache);
|
||||
const fn = makeStrongCacheSync((arg, cache) => cache);
|
||||
|
||||
expect(() => fn().forever()).toThrow(
|
||||
/Cannot change caching after evaluation/,
|
||||
@ -303,7 +305,7 @@ describe("caching API", () => {
|
||||
});
|
||||
|
||||
it("should throw if you configure .never after exiting", () => {
|
||||
const fn = makeStrongCache((arg, cache) => cache);
|
||||
const fn = makeStrongCacheSync((arg, cache) => cache);
|
||||
|
||||
expect(() => fn().never()).toThrow(
|
||||
/Cannot change caching after evaluation/,
|
||||
@ -311,7 +313,7 @@ describe("caching API", () => {
|
||||
});
|
||||
|
||||
it("should throw if you configure .using after exiting", () => {
|
||||
const fn = makeStrongCache((arg, cache) => cache);
|
||||
const fn = makeStrongCacheSync((arg, cache) => cache);
|
||||
|
||||
expect(() => fn().using(() => null)).toThrow(
|
||||
/Cannot change caching after evaluation/,
|
||||
@ -319,7 +321,7 @@ describe("caching API", () => {
|
||||
});
|
||||
|
||||
it("should throw if you configure .invalidate after exiting", () => {
|
||||
const fn = makeStrongCache((arg, cache) => cache);
|
||||
const fn = makeStrongCacheSync((arg, cache) => cache);
|
||||
|
||||
expect(() => fn().invalidate(() => null)).toThrow(
|
||||
/Cannot change caching after evaluation/,
|
||||
@ -330,7 +332,7 @@ describe("caching API", () => {
|
||||
it("should allow permacaching with cache(true)", () => {
|
||||
let count = 0;
|
||||
|
||||
const fn = makeStrongCache((arg, cache) => {
|
||||
const fn = makeStrongCacheSync((arg, cache) => {
|
||||
cache = cache.simple();
|
||||
|
||||
cache(true);
|
||||
@ -349,7 +351,7 @@ describe("caching API", () => {
|
||||
it("should allow disabling caching with cache(false)", () => {
|
||||
let count = 0;
|
||||
|
||||
const fn = makeStrongCache((arg, cache) => {
|
||||
const fn = makeStrongCacheSync((arg, cache) => {
|
||||
cache = cache.simple();
|
||||
|
||||
cache(false);
|
||||
@ -371,7 +373,7 @@ describe("caching API", () => {
|
||||
let count = 0;
|
||||
let other = "default";
|
||||
|
||||
const fn = makeStrongCache((arg, cache) => {
|
||||
const fn = makeStrongCacheSync((arg, cache) => {
|
||||
cache = cache.simple();
|
||||
|
||||
const val = cache(() => other);
|
||||
@ -410,4 +412,60 @@ describe("caching API", () => {
|
||||
expect(fn("two")).toBe(fn("two"));
|
||||
});
|
||||
});
|
||||
|
||||
describe("async", () => {
|
||||
const wait = gensync({
|
||||
sync: () => {},
|
||||
errback: (t, cb) => setTimeout(cb, t),
|
||||
});
|
||||
|
||||
it("should throw if the cache is configured asynchronously", async () => {
|
||||
const fn = gensync(
|
||||
makeStrongCache(function*(arg, cache) {
|
||||
yield* wait(1000);
|
||||
cache.never();
|
||||
return { arg };
|
||||
}),
|
||||
).async;
|
||||
|
||||
await expect(fn("bar")).rejects.toThrowErrorMatchingInlineSnapshot(
|
||||
`"Cannot change caching after evaluation has completed."`,
|
||||
);
|
||||
});
|
||||
|
||||
it("should allow asynchronous cache invalidation functions", async () => {
|
||||
const fn = gensync(
|
||||
makeStrongCache(function*(arg, cache) {
|
||||
yield* waitFor(
|
||||
cache.using(async () => {
|
||||
await wait.async(50);
|
||||
return "x";
|
||||
}),
|
||||
);
|
||||
return { arg };
|
||||
}),
|
||||
).async;
|
||||
|
||||
const [res1, res2] = await Promise.all([fn("foo"), fn("foo")]);
|
||||
|
||||
expect(res1).toBe(res2);
|
||||
});
|
||||
|
||||
it("should allow synchronous yield before cache configuration", async () => {
|
||||
const fn = gensync(
|
||||
makeStrongCache(function*(arg, cache) {
|
||||
yield* gensync({
|
||||
sync: () => 2,
|
||||
errback: cb => cb(null, 2),
|
||||
})();
|
||||
cache.forever();
|
||||
return { arg };
|
||||
}),
|
||||
).async;
|
||||
|
||||
const [res1, res2] = await Promise.all([fn("foo"), fn("foo")]);
|
||||
|
||||
expect(res1).toBe(res2);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@ -1,6 +1,8 @@
|
||||
import loadConfig, { loadPartialConfig } from "../lib/config";
|
||||
import loadConfigRunner, { loadPartialConfig } from "../lib/config";
|
||||
import path from "path";
|
||||
|
||||
const loadConfig = loadConfigRunner.sync;
|
||||
|
||||
describe("@babel/core config loading", () => {
|
||||
const FILEPATH = path.join(
|
||||
__dirname,
|
||||
|
||||
12
packages/babel-core/test/fixtures/async/config-cache/babel.config.js
vendored
Normal file
12
packages/babel-core/test/fixtures/async/config-cache/babel.config.js
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
const wait = t => new Promise(r => setTimeout(r, t));
|
||||
|
||||
module.exports = function(api) {
|
||||
api.cache.using(async () => {
|
||||
await wait(50);
|
||||
return 2;
|
||||
})
|
||||
|
||||
return {
|
||||
plugins: ["./plugin"],
|
||||
};
|
||||
};
|
||||
9
packages/babel-core/test/fixtures/async/config-cache/plugin.js
vendored
Normal file
9
packages/babel-core/test/fixtures/async/config-cache/plugin.js
vendored
Normal file
@ -0,0 +1,9 @@
|
||||
module.exports = function plugin({ types: t }) {
|
||||
return {
|
||||
visitor: {
|
||||
Program(path) {
|
||||
path.pushContainer("body", t.stringLiteral("success"));
|
||||
},
|
||||
},
|
||||
};
|
||||
};
|
||||
11
packages/babel-core/test/fixtures/async/config-file-async-function/babel.config.js
vendored
Normal file
11
packages/babel-core/test/fixtures/async/config-file-async-function/babel.config.js
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
const wait = t => new Promise(r => setTimeout(r, t));
|
||||
|
||||
module.exports = async function(api) {
|
||||
await wait(50);
|
||||
|
||||
api.cache.never();
|
||||
|
||||
return {
|
||||
plugins: ["./plugin"],
|
||||
};
|
||||
};
|
||||
9
packages/babel-core/test/fixtures/async/config-file-async-function/plugin.js
vendored
Normal file
9
packages/babel-core/test/fixtures/async/config-file-async-function/plugin.js
vendored
Normal file
@ -0,0 +1,9 @@
|
||||
module.exports = function plugin({ types: t }) {
|
||||
return {
|
||||
visitor: {
|
||||
Program(path) {
|
||||
path.pushContainer("body", t.stringLiteral("success"));
|
||||
},
|
||||
},
|
||||
};
|
||||
};
|
||||
5
packages/babel-core/test/fixtures/async/config-file-promise/babel.config.js
vendored
Normal file
5
packages/babel-core/test/fixtures/async/config-file-promise/babel.config.js
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
const wait = t => new Promise(r => setTimeout(r, t));
|
||||
|
||||
module.exports = wait(50).then(() => ({
|
||||
plugins: ["./plugin"],
|
||||
}));
|
||||
9
packages/babel-core/test/fixtures/async/config-file-promise/plugin.js
vendored
Normal file
9
packages/babel-core/test/fixtures/async/config-file-promise/plugin.js
vendored
Normal file
@ -0,0 +1,9 @@
|
||||
module.exports = function plugin({ types: t }) {
|
||||
return {
|
||||
visitor: {
|
||||
Program(path) {
|
||||
path.pushContainer("body", t.stringLiteral("success"));
|
||||
},
|
||||
},
|
||||
};
|
||||
};
|
||||
3
packages/babel-core/test/fixtures/async/plugin-inherits/babel.config.js
vendored
Normal file
3
packages/babel-core/test/fixtures/async/plugin-inherits/babel.config.js
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
module.exports = {
|
||||
plugins: ["./plugin"],
|
||||
};
|
||||
10
packages/babel-core/test/fixtures/async/plugin-inherits/plugin.js
vendored
Normal file
10
packages/babel-core/test/fixtures/async/plugin-inherits/plugin.js
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
module.exports = function plugin({ types: t }) {
|
||||
return {
|
||||
inherits: require("./plugin2"),
|
||||
visitor: {
|
||||
Program(path) {
|
||||
path.pushContainer("body", t.stringLiteral("success"));
|
||||
},
|
||||
},
|
||||
};
|
||||
};
|
||||
13
packages/babel-core/test/fixtures/async/plugin-inherits/plugin2.js
vendored
Normal file
13
packages/babel-core/test/fixtures/async/plugin-inherits/plugin2.js
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
const wait = t => new Promise(r => setTimeout(r, t));
|
||||
|
||||
module.exports = async function plugin({ types: t }) {
|
||||
await wait(50);
|
||||
|
||||
return {
|
||||
visitor: {
|
||||
Program(path) {
|
||||
path.pushContainer("body", t.stringLiteral("success 2"));
|
||||
},
|
||||
},
|
||||
};
|
||||
};
|
||||
3
packages/babel-core/test/fixtures/async/plugin-post/babel.config.js
vendored
Normal file
3
packages/babel-core/test/fixtures/async/plugin-post/babel.config.js
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
module.exports = {
|
||||
plugins: ["./plugin"],
|
||||
};
|
||||
15
packages/babel-core/test/fixtures/async/plugin-post/plugin.js
vendored
Normal file
15
packages/babel-core/test/fixtures/async/plugin-post/plugin.js
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
const wait = t => new Promise(r => setTimeout(r, t));
|
||||
|
||||
module.exports = function plugin({ types: t }) {
|
||||
return {
|
||||
async post() {
|
||||
await wait(50);
|
||||
},
|
||||
|
||||
visitor: {
|
||||
Program(path) {
|
||||
path.pushContainer("body", t.stringLiteral("success"));
|
||||
},
|
||||
},
|
||||
};
|
||||
};
|
||||
3
packages/babel-core/test/fixtures/async/plugin-pre/babel.config.js
vendored
Normal file
3
packages/babel-core/test/fixtures/async/plugin-pre/babel.config.js
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
module.exports = {
|
||||
plugins: ["./plugin"],
|
||||
};
|
||||
15
packages/babel-core/test/fixtures/async/plugin-pre/plugin.js
vendored
Normal file
15
packages/babel-core/test/fixtures/async/plugin-pre/plugin.js
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
const wait = t => new Promise(r => setTimeout(r, t));
|
||||
|
||||
module.exports = function plugin({ types: t }) {
|
||||
return {
|
||||
async pre() {
|
||||
await wait(50);
|
||||
},
|
||||
|
||||
visitor: {
|
||||
Program(path) {
|
||||
path.pushContainer("body", t.stringLiteral("success"));
|
||||
},
|
||||
},
|
||||
};
|
||||
};
|
||||
3
packages/babel-core/test/fixtures/async/plugin/babel.config.js
vendored
Normal file
3
packages/babel-core/test/fixtures/async/plugin/babel.config.js
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
module.exports = {
|
||||
plugins: ["./plugin"],
|
||||
};
|
||||
13
packages/babel-core/test/fixtures/async/plugin/plugin.js
vendored
Normal file
13
packages/babel-core/test/fixtures/async/plugin/plugin.js
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
const wait = t => new Promise(r => setTimeout(r, t));
|
||||
|
||||
module.exports = async function plugin({ types: t }) {
|
||||
await wait(50);
|
||||
|
||||
return {
|
||||
visitor: {
|
||||
Program(path) {
|
||||
path.pushContainer("body", t.stringLiteral("success"));
|
||||
},
|
||||
},
|
||||
};
|
||||
};
|
||||
@ -37,13 +37,20 @@ module.exports = function() {
|
||||
return null;
|
||||
},
|
||||
resolveId(importee) {
|
||||
let packageFolderName;
|
||||
const matches = importee.match(/^@babel\/([^/]+)$/);
|
||||
if (matches) {
|
||||
packageFolderName = `babel-${matches[1]}`;
|
||||
if (importee === "@babel/runtime/regenerator") {
|
||||
return path.join(
|
||||
dirname,
|
||||
"packages",
|
||||
"babel-runtime",
|
||||
"regenerator",
|
||||
"index.js"
|
||||
);
|
||||
}
|
||||
|
||||
if (packageFolderName) {
|
||||
const matches = importee.match(/^@babel\/([^/]+)$/);
|
||||
if (matches) {
|
||||
const packageFolderName = `babel-${matches[1]}`;
|
||||
|
||||
// resolve babel package names to their src index file
|
||||
const packageFolder = path.join(dirname, "packages", packageFolderName);
|
||||
const packageJson = require(path.join(packageFolder, "package.json"));
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user