larry babby and threejs for glsl

This commit is contained in:
Sam
2024-06-24 21:24:00 +12:00
parent 87d5dc634d
commit 907ebae4c0
6474 changed files with 1279596 additions and 8 deletions

View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2017-present Devon Govett
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,31 @@
{
"name": "@parcel/bundler-default",
"version": "2.12.0",
"license": "MIT",
"publishConfig": {
"access": "public"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
},
"repository": {
"type": "git",
"url": "https://github.com/parcel-bundler/parcel.git"
},
"main": "lib/DefaultBundler.js",
"source": "src/DefaultBundler.js",
"engines": {
"node": ">= 12.0.0",
"parcel": "^2.12.0"
},
"dependencies": {
"@parcel/diagnostic": "2.12.0",
"@parcel/graph": "3.2.0",
"@parcel/plugin": "2.12.0",
"@parcel/rust": "2.12.0",
"@parcel/utils": "2.12.0",
"nullthrows": "^1.1.1"
},
"gitHead": "2059029ee91e5f03a273b0954d3e629d7375f986"
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2017-present Devon Govett
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -0,0 +1,11 @@
import type {FilePath} from '@parcel/types';
import type {Cache} from './lib/types';
export type {Cache} from './lib/types';
export const FSCache: {
new (cacheDir: FilePath): Cache
};
export const LMDBCache: {
new (cacheDir: FilePath): Cache
};

View File

@@ -0,0 +1,167 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.FSCache = void 0;
function _stream() {
const data = _interopRequireDefault(require("stream"));
_stream = function () {
return data;
};
return data;
}
function _path() {
const data = _interopRequireDefault(require("path"));
_path = function () {
return data;
};
return data;
}
function _util() {
const data = require("util");
_util = function () {
return data;
};
return data;
}
function _logger() {
const data = _interopRequireDefault(require("@parcel/logger"));
_logger = function () {
return data;
};
return data;
}
function _core() {
const data = require("@parcel/core");
_core = function () {
return data;
};
return data;
}
var _package = _interopRequireDefault(require("../package.json"));
var _constants = require("./constants");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _classPrivateMethodInitSpec(obj, privateSet) { _checkPrivateRedeclaration(obj, privateSet); privateSet.add(obj); }
function _checkPrivateRedeclaration(obj, privateCollection) { if (privateCollection.has(obj)) { throw new TypeError("Cannot initialize the same private elements twice on an object"); } }
function _classPrivateMethodGet(receiver, privateSet, fn) { if (!privateSet.has(receiver)) { throw new TypeError("attempted to get private field on non-instance"); } return fn; } // flowlint-next-line untyped-import:off
const pipeline = (0, _util().promisify)(_stream().default.pipeline);
var _getFilePath = /*#__PURE__*/new WeakSet();
var _unlinkChunks = /*#__PURE__*/new WeakSet();
class FSCache {
constructor(fs, cacheDir) {
_classPrivateMethodInitSpec(this, _unlinkChunks);
_classPrivateMethodInitSpec(this, _getFilePath);
this.fs = fs;
this.dir = cacheDir;
}
async ensure() {
// First, create the main cache directory if necessary.
await this.fs.mkdirp(this.dir);
// In parallel, create sub-directories for every possible hex value
// This speeds up large caches on many file systems since there are fewer files in a single directory.
let dirPromises = [];
for (let i = 0; i < 256; i++) {
dirPromises.push(this.fs.mkdirp(_path().default.join(this.dir, ('00' + i.toString(16)).slice(-2))));
}
await Promise.all(dirPromises);
}
_getCachePath(cacheId) {
return _path().default.join(this.dir, cacheId.slice(0, 2), cacheId.slice(2));
}
getStream(key) {
return this.fs.createReadStream(this._getCachePath(`${key}-large`));
}
setStream(key, stream) {
return pipeline(stream, this.fs.createWriteStream(this._getCachePath(`${key}-large`)));
}
has(key) {
return this.fs.exists(this._getCachePath(key));
}
getBlob(key) {
return this.fs.readFile(this._getCachePath(key));
}
async setBlob(key, contents) {
await this.fs.writeFile(this._getCachePath(key), contents);
}
async getBuffer(key) {
try {
return await this.fs.readFile(this._getCachePath(key));
} catch (err) {
if (err.code === 'ENOENT') {
return null;
} else {
throw err;
}
}
}
hasLargeBlob(key) {
return this.fs.exists(_classPrivateMethodGet(this, _getFilePath, _getFilePath2).call(this, key, 0));
}
async getLargeBlob(key) {
const buffers = [];
for (let i = 0; await this.fs.exists(_classPrivateMethodGet(this, _getFilePath, _getFilePath2).call(this, key, i)); i += 1) {
const file = this.fs.readFile(_classPrivateMethodGet(this, _getFilePath, _getFilePath2).call(this, key, i));
buffers.push(file);
}
return Buffer.concat(await Promise.all(buffers));
}
async setLargeBlob(key, contents, options) {
const chunks = Math.ceil(contents.length / _constants.WRITE_LIMIT_CHUNK);
const writePromises = [];
if (chunks === 1) {
// If there's one chunk, don't slice the content
writePromises.push(this.fs.writeFile(_classPrivateMethodGet(this, _getFilePath, _getFilePath2).call(this, key, 0), contents, {
signal: options === null || options === void 0 ? void 0 : options.signal
}));
} else {
for (let i = 0; i < chunks; i += 1) {
writePromises.push(this.fs.writeFile(_classPrivateMethodGet(this, _getFilePath, _getFilePath2).call(this, key, i), typeof contents === 'string' ? contents.slice(i * _constants.WRITE_LIMIT_CHUNK, (i + 1) * _constants.WRITE_LIMIT_CHUNK) : contents.subarray(i * _constants.WRITE_LIMIT_CHUNK, (i + 1) * _constants.WRITE_LIMIT_CHUNK), {
signal: options === null || options === void 0 ? void 0 : options.signal
}));
}
}
// If there's already a files following this chunk, it's old and should be removed
writePromises.push(_classPrivateMethodGet(this, _unlinkChunks, _unlinkChunks2).call(this, key, chunks));
await Promise.all(writePromises);
}
async get(key) {
try {
let data = await this.fs.readFile(this._getCachePath(key));
return (0, _core().deserialize)(data);
} catch (err) {
if (err.code === 'ENOENT') {
return null;
} else {
throw err;
}
}
}
async set(key, value) {
try {
let blobPath = this._getCachePath(key);
let data = (0, _core().serialize)(value);
await this.fs.writeFile(blobPath, data);
} catch (err) {
_logger().default.error(err, '@parcel/cache');
}
}
refresh() {
// NOOP
}
}
exports.FSCache = FSCache;
function _getFilePath2(key, index) {
return _path().default.join(this.dir, `${key}-${index}`);
}
async function _unlinkChunks2(key, index) {
try {
await this.fs.unlink(_classPrivateMethodGet(this, _getFilePath, _getFilePath2).call(this, key, index));
await _classPrivateMethodGet(this, _unlinkChunks, _unlinkChunks2).call(this, key, index + 1);
} catch (err) {
// If there's an error, no more chunks are left to delete
}
}
(0, _core().registerSerializableClass)(`${_package.default.version}:FSCache`, FSCache);

View File

@@ -0,0 +1,145 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.IDBCache = void 0;
function _stream() {
const data = require("stream");
_stream = function () {
return data;
};
return data;
}
function _core() {
const data = require("@parcel/core");
_core = function () {
return data;
};
return data;
}
function _utils() {
const data = require("@parcel/utils");
_utils = function () {
return data;
};
return data;
}
var _package = _interopRequireDefault(require("../package.json"));
function _idb() {
const data = require("idb");
_idb = function () {
return data;
};
return data;
}
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
// $FlowFixMe[untyped-import]
// $FlowFixMe[untyped-import]
const STORE_NAME = 'cache';
class IDBCache {
// $FlowFixMe
constructor() {
this.store = (0, _idb().openDB)('REPL-parcel-cache', 1, {
upgrade(db) {
db.createObjectStore(STORE_NAME);
},
blocked() {},
blocking() {},
terminated() {}
});
}
ensure() {
return Promise.resolve();
}
serialize() {
return {
/*::...null*/
};
}
static deserialize() {
return new IDBCache();
}
has(key) {
return Promise.resolve(this.store.get(key) != null);
}
async get(key) {
let data = await (await this.store).get(STORE_NAME, key);
if (data == null) {
return null;
}
return Promise.resolve((0, _core().deserialize)(data));
}
async set(key, value) {
await (await this.store).put(STORE_NAME, (0, _core().serialize)(value), key);
}
getStream(key) {
let dataPromise = this.store.then(s => s.get(STORE_NAME, key)).then(d => Buffer.from(d)).catch(e => e);
const stream = new (_stream().Readable)({
// $FlowFixMe(incompatible-call)
async read() {
let data = await dataPromise;
if (data instanceof Error) {
stream.emit('error', data);
} else {
stream.push(Buffer.from(data));
stream.push(null);
}
}
});
return stream;
}
async setStream(key, stream) {
let buf = await (0, _utils().bufferStream)(stream);
await (await this.store).put(STORE_NAME, buf, key);
}
async getBlob(key) {
let data = await (await this.store).get(STORE_NAME, key);
if (data == null) {
return Promise.reject(new Error(`Key ${key} not found in cache`));
}
return Buffer.from(data.buffer);
}
async setBlob(key, contents) {
let data = contents instanceof Uint8Array ? contents : Buffer.from(contents);
await (await this.store).put(STORE_NAME, data, key);
}
// async setBlobs(
// entries: $ReadOnlyArray<[string, Buffer | string]>,
// ): Promise<void> {
// const tx = (await this.store).transaction(STORE_NAME, 'readwrite');
// await Promise.all([
// ...entries.map(([key, value]) =>
// tx.store.put(
// value instanceof Uint8Array ? value : Buffer.from(value),
// key,
// ),
// ),
// tx.done,
// ]);
// }
async getBuffer(key) {
let data = await (await this.store).get(STORE_NAME, key);
if (data == null) {
return null;
}
return Buffer.from(data.buffer);
}
hasLargeBlob(key) {
return this.has(key);
}
getLargeBlob(key) {
return this.getBlob(key);
}
setLargeBlob(key, contents) {
return this.setBlob(key, contents);
}
refresh() {
// NOOP
}
}
exports.IDBCache = IDBCache;
(0, _core().registerSerializableClass)(`${_package.default.version}:IDBCache`, IDBCache);

View File

@@ -0,0 +1,13 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.IDBCache = void 0;
// $FlowFixMe
class IDBCache {
constructor() {
throw new Error('IDBCache is only supported in the browser');
}
}
exports.IDBCache = IDBCache;

View File

@@ -0,0 +1,137 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.LMDBCache = void 0;
function _stream() {
const data = _interopRequireDefault(require("stream"));
_stream = function () {
return data;
};
return data;
}
function _path() {
const data = _interopRequireDefault(require("path"));
_path = function () {
return data;
};
return data;
}
function _util() {
const data = require("util");
_util = function () {
return data;
};
return data;
}
function _core() {
const data = require("@parcel/core");
_core = function () {
return data;
};
return data;
}
function _fs() {
const data = require("@parcel/fs");
_fs = function () {
return data;
};
return data;
}
var _package = _interopRequireDefault(require("../package.json"));
function _lmdb() {
const data = _interopRequireDefault(require("lmdb"));
_lmdb = function () {
return data;
};
return data;
}
var _FSCache = require("./FSCache");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _classPrivateMethodInitSpec(obj, privateSet) { _checkPrivateRedeclaration(obj, privateSet); privateSet.add(obj); }
function _checkPrivateRedeclaration(obj, privateCollection) { if (privateCollection.has(obj)) { throw new TypeError("Cannot initialize the same private elements twice on an object"); } }
function _classPrivateMethodGet(receiver, privateSet, fn) { if (!privateSet.has(receiver)) { throw new TypeError("attempted to get private field on non-instance"); } return fn; } // flowlint-next-line untyped-import:off
// $FlowFixMe
const pipeline = (0, _util().promisify)(_stream().default.pipeline);
var _getFilePath = /*#__PURE__*/new WeakSet();
class LMDBCache {
// $FlowFixMe
constructor(cacheDir) {
_classPrivateMethodInitSpec(this, _getFilePath);
this.fs = new (_fs().NodeFS)();
this.dir = cacheDir;
this.fsCache = new _FSCache.FSCache(this.fs, cacheDir);
this.store = _lmdb().default.open(cacheDir, {
name: 'parcel-cache',
encoding: 'binary',
compression: true
});
}
ensure() {
return Promise.resolve();
}
serialize() {
return {
dir: this.dir
};
}
static deserialize(opts) {
return new LMDBCache(opts.dir);
}
has(key) {
return Promise.resolve(this.store.get(key) != null);
}
get(key) {
let data = this.store.get(key);
if (data == null) {
return Promise.resolve(null);
}
return Promise.resolve((0, _core().deserialize)(data));
}
async set(key, value) {
await this.setBlob(key, (0, _core().serialize)(value));
}
getStream(key) {
return this.fs.createReadStream(_path().default.join(this.dir, key));
}
setStream(key, stream) {
return pipeline(stream, this.fs.createWriteStream(_path().default.join(this.dir, key)));
}
getBlob(key) {
let buffer = this.store.get(key);
return buffer != null ? Promise.resolve(buffer) : Promise.reject(new Error(`Key ${key} not found in cache`));
}
async setBlob(key, contents) {
await this.store.put(key, contents);
}
getBuffer(key) {
return Promise.resolve(this.store.get(key));
}
hasLargeBlob(key) {
return this.fs.exists(_classPrivateMethodGet(this, _getFilePath, _getFilePath2).call(this, key, 0));
}
// eslint-disable-next-line require-await
async getLargeBlob(key) {
return this.fsCache.getLargeBlob(key);
}
// eslint-disable-next-line require-await
async setLargeBlob(key, contents, options) {
return this.fsCache.setLargeBlob(key, contents, options);
}
refresh() {
// Reset the read transaction for the store. This guarantees that
// the next read will see the latest changes to the store.
// Useful in scenarios where reads and writes are multi-threaded.
// See https://github.com/kriszyp/lmdb-js#resetreadtxn-void
this.store.resetReadTxn();
}
}
exports.LMDBCache = LMDBCache;
function _getFilePath2(key, index) {
return _path().default.join(this.dir, `${key}-${index}`);
}
(0, _core().registerSerializableClass)(`${_package.default.version}:LMDBCache`, LMDBCache);

View File

@@ -0,0 +1,8 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.WRITE_LIMIT_CHUNK = void 0;
// Node has a file size limit of 2 GB
const WRITE_LIMIT_CHUNK = exports.WRITE_LIMIT_CHUNK = 2 * 1024 ** 3;

View File

@@ -0,0 +1,38 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
var _LMDBCache = require("./LMDBCache");
Object.keys(_LMDBCache).forEach(function (key) {
if (key === "default" || key === "__esModule") return;
if (key in exports && exports[key] === _LMDBCache[key]) return;
Object.defineProperty(exports, key, {
enumerable: true,
get: function () {
return _LMDBCache[key];
}
});
});
var _FSCache = require("./FSCache");
Object.keys(_FSCache).forEach(function (key) {
if (key === "default" || key === "__esModule") return;
if (key in exports && exports[key] === _FSCache[key]) return;
Object.defineProperty(exports, key, {
enumerable: true,
get: function () {
return _FSCache[key];
}
});
});
var _IDBCache = require("./IDBCache");
Object.keys(_IDBCache).forEach(function (key) {
if (key === "default" || key === "__esModule") return;
if (key in exports && exports[key] === _IDBCache[key]) return;
Object.defineProperty(exports, key, {
enumerable: true,
get: function () {
return _IDBCache[key];
}
});
});

View File

@@ -0,0 +1,25 @@
import type { Readable } from "stream";
import type { AbortSignal } from "abortcontroller-polyfill/dist/cjs-ponyfill";
export interface Cache {
ensure(): Promise<void>;
has(key: string): Promise<boolean>;
get<T>(key: string): Promise<T | null | undefined>;
set(key: string, value: unknown): Promise<void>;
getStream(key: string): Readable;
setStream(key: string, stream: Readable): Promise<void>;
getBlob(key: string): Promise<Buffer>;
setBlob(key: string, contents: Buffer | string): Promise<void>;
hasLargeBlob(key: string): Promise<boolean>;
getLargeBlob(key: string): Promise<Buffer>;
setLargeBlob(key: string, contents: Buffer | string, options?: {
signal?: AbortSignal;
}): Promise<void>;
getBuffer(key: string): Promise<Buffer | null | undefined>;
/**
* In a multi-threaded environment, where there are potentially multiple Cache
* instances writing to the cache, ensure that this instance has the latest view
* of the changes that may have been written to the cache in other threads.
*/
refresh(): void;
}

View File

@@ -0,0 +1 @@
"use strict";

View File

@@ -0,0 +1,43 @@
{
"name": "@parcel/cache",
"version": "2.12.0",
"license": "MIT",
"publishConfig": {
"access": "public"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
},
"repository": {
"type": "git",
"url": "https://github.com/parcel-bundler/parcel.git"
},
"main": "lib/index.js",
"source": "src/index.js",
"types": "index.d.ts",
"engines": {
"node": ">= 12.0.0"
},
"scripts": {
"build-ts": "mkdir -p lib && flow-to-ts src/types.js > lib/types.d.ts",
"check-ts": "tsc --noEmit index.d.ts"
},
"dependencies": {
"@parcel/fs": "2.12.0",
"@parcel/logger": "2.12.0",
"@parcel/utils": "2.12.0",
"lmdb": "2.8.5"
},
"peerDependencies": {
"@parcel/core": "^2.12.0"
},
"devDependencies": {
"idb": "^5.0.8"
},
"browser": {
"./src/IDBCache.js": "./src/IDBCache.browser.js",
"./src/LMDBCache.js": false
},
"gitHead": "2059029ee91e5f03a273b0954d3e629d7375f986"
}

View File

@@ -0,0 +1,184 @@
// @flow strict-local
import type {Readable, Writable} from 'stream';
import type {FilePath} from '@parcel/types';
import type {FileSystem} from '@parcel/fs';
import type {Cache} from './types';
import type {AbortSignal} from 'abortcontroller-polyfill/dist/cjs-ponyfill';
import stream from 'stream';
import path from 'path';
import {promisify} from 'util';
import logger from '@parcel/logger';
import {serialize, deserialize, registerSerializableClass} from '@parcel/core';
// flowlint-next-line untyped-import:off
import packageJson from '../package.json';
import {WRITE_LIMIT_CHUNK} from './constants';
const pipeline: (Readable, Writable) => Promise<void> = promisify(
stream.pipeline,
);
export class FSCache implements Cache {
fs: FileSystem;
dir: FilePath;
constructor(fs: FileSystem, cacheDir: FilePath) {
this.fs = fs;
this.dir = cacheDir;
}
async ensure(): Promise<void> {
// First, create the main cache directory if necessary.
await this.fs.mkdirp(this.dir);
// In parallel, create sub-directories for every possible hex value
// This speeds up large caches on many file systems since there are fewer files in a single directory.
let dirPromises = [];
for (let i = 0; i < 256; i++) {
dirPromises.push(
this.fs.mkdirp(path.join(this.dir, ('00' + i.toString(16)).slice(-2))),
);
}
await Promise.all(dirPromises);
}
_getCachePath(cacheId: string): FilePath {
return path.join(this.dir, cacheId.slice(0, 2), cacheId.slice(2));
}
getStream(key: string): Readable {
return this.fs.createReadStream(this._getCachePath(`${key}-large`));
}
setStream(key: string, stream: Readable): Promise<void> {
return pipeline(
stream,
this.fs.createWriteStream(this._getCachePath(`${key}-large`)),
);
}
has(key: string): Promise<boolean> {
return this.fs.exists(this._getCachePath(key));
}
getBlob(key: string): Promise<Buffer> {
return this.fs.readFile(this._getCachePath(key));
}
async setBlob(key: string, contents: Buffer | string): Promise<void> {
await this.fs.writeFile(this._getCachePath(key), contents);
}
async getBuffer(key: string): Promise<?Buffer> {
try {
return await this.fs.readFile(this._getCachePath(key));
} catch (err) {
if (err.code === 'ENOENT') {
return null;
} else {
throw err;
}
}
}
#getFilePath(key: string, index: number): string {
return path.join(this.dir, `${key}-${index}`);
}
async #unlinkChunks(key: string, index: number): Promise<void> {
try {
await this.fs.unlink(this.#getFilePath(key, index));
await this.#unlinkChunks(key, index + 1);
} catch (err) {
// If there's an error, no more chunks are left to delete
}
}
hasLargeBlob(key: string): Promise<boolean> {
return this.fs.exists(this.#getFilePath(key, 0));
}
async getLargeBlob(key: string): Promise<Buffer> {
const buffers: Promise<Buffer>[] = [];
for (let i = 0; await this.fs.exists(this.#getFilePath(key, i)); i += 1) {
const file: Promise<Buffer> = this.fs.readFile(this.#getFilePath(key, i));
buffers.push(file);
}
return Buffer.concat(await Promise.all(buffers));
}
async setLargeBlob(
key: string,
contents: Buffer | string,
options?: {|signal?: AbortSignal|},
): Promise<void> {
const chunks = Math.ceil(contents.length / WRITE_LIMIT_CHUNK);
const writePromises: Promise<void>[] = [];
if (chunks === 1) {
// If there's one chunk, don't slice the content
writePromises.push(
this.fs.writeFile(this.#getFilePath(key, 0), contents, {
signal: options?.signal,
}),
);
} else {
for (let i = 0; i < chunks; i += 1) {
writePromises.push(
this.fs.writeFile(
this.#getFilePath(key, i),
typeof contents === 'string'
? contents.slice(
i * WRITE_LIMIT_CHUNK,
(i + 1) * WRITE_LIMIT_CHUNK,
)
: contents.subarray(
i * WRITE_LIMIT_CHUNK,
(i + 1) * WRITE_LIMIT_CHUNK,
),
{signal: options?.signal},
),
);
}
}
// If there's already a files following this chunk, it's old and should be removed
writePromises.push(this.#unlinkChunks(key, chunks));
await Promise.all(writePromises);
}
async get<T>(key: string): Promise<?T> {
try {
let data = await this.fs.readFile(this._getCachePath(key));
return deserialize(data);
} catch (err) {
if (err.code === 'ENOENT') {
return null;
} else {
throw err;
}
}
}
async set(key: string, value: mixed): Promise<void> {
try {
let blobPath = this._getCachePath(key);
let data = serialize(value);
await this.fs.writeFile(blobPath, data);
} catch (err) {
logger.error(err, '@parcel/cache');
}
}
refresh(): void {
// NOOP
}
}
registerSerializableClass(`${packageJson.version}:FSCache`, FSCache);

View File

@@ -0,0 +1,141 @@
// @flow strict-local
import type {Cache} from './types';
import {Readable} from 'stream';
import {serialize, deserialize, registerSerializableClass} from '@parcel/core';
import {bufferStream} from '@parcel/utils';
// $FlowFixMe[untyped-import]
import packageJson from '../package.json';
// $FlowFixMe[untyped-import]
import {openDB} from 'idb';
const STORE_NAME = 'cache';
export class IDBCache implements Cache {
// $FlowFixMe
store: any;
constructor() {
this.store = openDB('REPL-parcel-cache', 1, {
upgrade(db) {
db.createObjectStore(STORE_NAME);
},
blocked() {},
blocking() {},
terminated() {},
});
}
ensure(): Promise<void> {
return Promise.resolve();
}
serialize(): {||} {
return {
/*::...null*/
};
}
static deserialize(): IDBCache {
return new IDBCache();
}
has(key: string): Promise<boolean> {
return Promise.resolve(this.store.get(key) != null);
}
async get<T>(key: string): Promise<?T> {
let data = await (await this.store).get(STORE_NAME, key);
if (data == null) {
return null;
}
return Promise.resolve(deserialize(data));
}
async set(key: string, value: mixed): Promise<void> {
await (await this.store).put(STORE_NAME, serialize(value), key);
}
getStream(key: string): Readable {
let dataPromise = this.store
.then(s => s.get(STORE_NAME, key))
.then(d => Buffer.from(d))
.catch(e => e);
const stream = new Readable({
// $FlowFixMe(incompatible-call)
async read() {
let data = await dataPromise;
if (data instanceof Error) {
stream.emit('error', data);
} else {
stream.push(Buffer.from(data));
stream.push(null);
}
},
});
return stream;
}
async setStream(key: string, stream: Readable): Promise<void> {
let buf = await bufferStream(stream);
await (await this.store).put(STORE_NAME, buf, key);
}
async getBlob(key: string): Promise<Buffer> {
let data = await (await this.store).get(STORE_NAME, key);
if (data == null) {
return Promise.reject(new Error(`Key ${key} not found in cache`));
}
return Buffer.from(data.buffer);
}
async setBlob(key: string, contents: Buffer | string): Promise<void> {
let data =
contents instanceof Uint8Array ? contents : Buffer.from(contents);
await (await this.store).put(STORE_NAME, data, key);
}
// async setBlobs(
// entries: $ReadOnlyArray<[string, Buffer | string]>,
// ): Promise<void> {
// const tx = (await this.store).transaction(STORE_NAME, 'readwrite');
// await Promise.all([
// ...entries.map(([key, value]) =>
// tx.store.put(
// value instanceof Uint8Array ? value : Buffer.from(value),
// key,
// ),
// ),
// tx.done,
// ]);
// }
async getBuffer(key: string): Promise<?Buffer> {
let data = await (await this.store).get(STORE_NAME, key);
if (data == null) {
return null;
}
return Buffer.from(data.buffer);
}
hasLargeBlob(key: string): Promise<boolean> {
return this.has(key);
}
getLargeBlob(key: string): Promise<Buffer> {
return this.getBlob(key);
}
setLargeBlob(key: string, contents: Buffer | string): Promise<void> {
return this.setBlob(key, contents);
}
refresh(): void {
// NOOP
}
}
registerSerializableClass(`${packageJson.version}:IDBCache`, IDBCache);

View File

@@ -0,0 +1,9 @@
// @flow strict-local
import type {Cache} from './types';
// $FlowFixMe
export class IDBCache implements Cache {
constructor() {
throw new Error('IDBCache is only supported in the browser');
}
}

View File

@@ -0,0 +1,130 @@
// @flow strict-local
import type {FilePath} from '@parcel/types';
import type {Cache} from './types';
import type {Readable, Writable} from 'stream';
import type {AbortSignal} from 'abortcontroller-polyfill/dist/cjs-ponyfill';
import stream from 'stream';
import path from 'path';
import {promisify} from 'util';
import {serialize, deserialize, registerSerializableClass} from '@parcel/core';
import {NodeFS} from '@parcel/fs';
// flowlint-next-line untyped-import:off
import packageJson from '../package.json';
// $FlowFixMe
import lmdb from 'lmdb';
import {FSCache} from './FSCache';
const pipeline: (Readable, Writable) => Promise<void> = promisify(
stream.pipeline,
);
export class LMDBCache implements Cache {
fs: NodeFS;
dir: FilePath;
// $FlowFixMe
store: any;
fsCache: FSCache;
constructor(cacheDir: FilePath) {
this.fs = new NodeFS();
this.dir = cacheDir;
this.fsCache = new FSCache(this.fs, cacheDir);
this.store = lmdb.open(cacheDir, {
name: 'parcel-cache',
encoding: 'binary',
compression: true,
});
}
ensure(): Promise<void> {
return Promise.resolve();
}
serialize(): {|dir: FilePath|} {
return {
dir: this.dir,
};
}
static deserialize(opts: {|dir: FilePath|}): LMDBCache {
return new LMDBCache(opts.dir);
}
has(key: string): Promise<boolean> {
return Promise.resolve(this.store.get(key) != null);
}
get<T>(key: string): Promise<?T> {
let data = this.store.get(key);
if (data == null) {
return Promise.resolve(null);
}
return Promise.resolve(deserialize(data));
}
async set(key: string, value: mixed): Promise<void> {
await this.setBlob(key, serialize(value));
}
getStream(key: string): Readable {
return this.fs.createReadStream(path.join(this.dir, key));
}
setStream(key: string, stream: Readable): Promise<void> {
return pipeline(
stream,
this.fs.createWriteStream(path.join(this.dir, key)),
);
}
getBlob(key: string): Promise<Buffer> {
let buffer = this.store.get(key);
return buffer != null
? Promise.resolve(buffer)
: Promise.reject(new Error(`Key ${key} not found in cache`));
}
async setBlob(key: string, contents: Buffer | string): Promise<void> {
await this.store.put(key, contents);
}
getBuffer(key: string): Promise<?Buffer> {
return Promise.resolve(this.store.get(key));
}
#getFilePath(key: string, index: number): string {
return path.join(this.dir, `${key}-${index}`);
}
hasLargeBlob(key: string): Promise<boolean> {
return this.fs.exists(this.#getFilePath(key, 0));
}
// eslint-disable-next-line require-await
async getLargeBlob(key: string): Promise<Buffer> {
return this.fsCache.getLargeBlob(key);
}
// eslint-disable-next-line require-await
async setLargeBlob(
key: string,
contents: Buffer | string,
options?: {|signal?: AbortSignal|},
): Promise<void> {
return this.fsCache.setLargeBlob(key, contents, options);
}
refresh(): void {
// Reset the read transaction for the store. This guarantees that
// the next read will see the latest changes to the store.
// Useful in scenarios where reads and writes are multi-threaded.
// See https://github.com/kriszyp/lmdb-js#resetreadtxn-void
this.store.resetReadTxn();
}
}
registerSerializableClass(`${packageJson.version}:LMDBCache`, LMDBCache);

View File

@@ -0,0 +1,4 @@
// @flow strict-local
// Node has a file size limit of 2 GB
export const WRITE_LIMIT_CHUNK = 2 * 1024 ** 3;

View File

@@ -0,0 +1,5 @@
// @flow
export type {Cache} from './types';
export * from './LMDBCache';
export * from './FSCache';
export * from './IDBCache';

View File

@@ -0,0 +1,28 @@
// @flow
import type {Readable} from 'stream';
import type {AbortSignal} from 'abortcontroller-polyfill/dist/cjs-ponyfill';
export interface Cache {
ensure(): Promise<void>;
has(key: string): Promise<boolean>;
get<T>(key: string): Promise<?T>;
set(key: string, value: mixed): Promise<void>;
getStream(key: string): Readable;
setStream(key: string, stream: Readable): Promise<void>;
getBlob(key: string): Promise<Buffer>;
setBlob(key: string, contents: Buffer | string): Promise<void>;
hasLargeBlob(key: string): Promise<boolean>;
getLargeBlob(key: string): Promise<Buffer>;
setLargeBlob(
key: string,
contents: Buffer | string,
options?: {|signal?: AbortSignal|},
): Promise<void>;
getBuffer(key: string): Promise<?Buffer>;
/**
* In a multi-threaded environment, where there are potentially multiple Cache
* instances writing to the cache, ensure that this instance has the latest view
* of the changes that may have been written to the cache in other threads.
*/
refresh(): void;
}

View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2017-present Devon Govett
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,38 @@
{
"name": "@parcel/codeframe",
"version": "2.12.0",
"description": "Blazing fast, zero configuration web application bundler",
"license": "MIT",
"publishConfig": {
"access": "public"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
},
"repository": {
"type": "git",
"url": "https://github.com/parcel-bundler/parcel.git"
},
"main": "lib/codeframe.js",
"source": "src/codeframe.js",
"engines": {
"node": ">= 12.0.0"
},
"targets": {
"main": {
"includeNodeModules": {
"chalk": false
}
}
},
"dependencies": {
"chalk": "^4.1.0"
},
"devDependencies": {
"emphasize": "^4.2.0",
"slice-ansi": "^4.0.0",
"string-width": "^4.2.0"
},
"gitHead": "2059029ee91e5f03a273b0954d3e629d7375f986"
}

View File

@@ -0,0 +1,302 @@
// @flow
import type {DiagnosticCodeHighlight} from '@parcel/diagnostic';
import chalk from 'chalk';
import emphasize from 'emphasize';
import stringWidth from 'string-width';
import sliceAnsi from 'slice-ansi';
type CodeFramePadding = {|
before: number,
after: number,
|};
type CodeFrameOptionsInput = $Shape<CodeFrameOptions>;
type CodeFrameOptions = {|
useColor: boolean,
syntaxHighlighting: boolean,
maxLines: number,
padding: CodeFramePadding,
terminalWidth: number,
language?: string,
|};
const NEWLINE = /\r\n|[\n\r\u2028\u2029]/;
const TAB_REPLACE_REGEX = /\t/g;
const TAB_REPLACEMENT = ' ';
const DEFAULT_TERMINAL_WIDTH = 80;
const highlightSyntax = (txt: string, lang?: string): string => {
if (lang) {
try {
return emphasize.highlight(lang, txt).value;
} catch (e) {
// fallback for unknown languages...
}
}
return emphasize.highlightAuto(txt).value;
};
export default function codeFrame(
code: string,
highlights: Array<DiagnosticCodeHighlight>,
inputOpts: CodeFrameOptionsInput = {},
): string {
if (highlights.length < 1) return '';
let opts: CodeFrameOptions = {
useColor: !!inputOpts.useColor,
syntaxHighlighting: !!inputOpts.syntaxHighlighting,
language: inputOpts.language,
maxLines: inputOpts.maxLines ?? 12,
terminalWidth: inputOpts.terminalWidth || DEFAULT_TERMINAL_WIDTH,
padding: inputOpts.padding || {
before: 1,
after: 2,
},
};
// Highlights messages and prefixes when colors are enabled
const highlighter = (s: string, bold?: boolean) => {
if (opts.useColor) {
let redString = chalk.red(s);
return bold ? chalk.bold(redString) : redString;
}
return s;
};
// Prefix lines with the line number
const lineNumberPrefixer = (params: {|
lineNumber?: string,
lineNumberLength: number,
isHighlighted: boolean,
|}) => {
let {lineNumber, lineNumberLength, isHighlighted} = params;
return `${isHighlighted ? highlighter('>') : ' '} ${
lineNumber
? lineNumber.padStart(lineNumberLength, ' ')
: ' '.repeat(lineNumberLength)
} | `;
};
// Make columns/lines start at 1
let originalHighlights = highlights;
highlights = highlights.map(h => {
return {
start: {
column: h.start.column - 1,
line: h.start.line - 1,
},
end: {
column: h.end.column - 1,
line: h.end.line - 1,
},
message: h.message,
};
});
// Find first and last highlight
let firstHighlight =
highlights.length > 1
? highlights.sort((a, b) => a.start.line - b.start.line)[0]
: highlights[0];
let lastHighlight =
highlights.length > 1
? highlights.sort((a, b) => b.end.line - a.end.line)[0]
: highlights[0];
// Calculate first and last line index of codeframe
let startLine = firstHighlight.start.line - opts.padding.before;
startLine = startLine < 0 ? 0 : startLine;
let endLineIndex = lastHighlight.end.line + opts.padding.after;
let tail;
if (endLineIndex - startLine > opts.maxLines) {
let maxLine = startLine + opts.maxLines - 1;
highlights = highlights.filter(h => h.start.line < maxLine);
lastHighlight = highlights[0];
endLineIndex = Math.min(
maxLine,
lastHighlight.end.line + opts.padding.after,
);
tail = originalHighlights.filter(h => h.start.line > endLineIndex);
}
let lineNumberLength = (endLineIndex + 1).toString(10).length;
// Split input into lines and highlight syntax
let lines = code.split(NEWLINE);
let syntaxHighlightedLines = (
opts.syntaxHighlighting ? highlightSyntax(code, opts.language) : code
)
.replace(TAB_REPLACE_REGEX, TAB_REPLACEMENT)
.split(NEWLINE);
// Loop over all lines and create codeframe
let resultLines = [];
for (
let currentLineIndex = startLine;
currentLineIndex < syntaxHighlightedLines.length;
currentLineIndex++
) {
if (currentLineIndex > endLineIndex) break;
if (currentLineIndex > syntaxHighlightedLines.length - 1) break;
// Find highlights that need to get rendered on the current line
let lineHighlights = highlights
.filter(
highlight =>
highlight.start.line <= currentLineIndex &&
highlight.end.line >= currentLineIndex,
)
.sort(
(a, b) =>
(a.start.line < currentLineIndex ? 0 : a.start.column) -
(b.start.line < currentLineIndex ? 0 : b.start.column),
);
// Check if this line has a full line highlight
let isWholeLine =
lineHighlights.length &&
!!lineHighlights.find(
h => h.start.line < currentLineIndex && h.end.line > currentLineIndex,
);
let lineLengthLimit =
opts.terminalWidth > lineNumberLength + 7
? opts.terminalWidth - (lineNumberLength + 5)
: 10;
// Split the line into line parts that will fit the provided terminal width
let colOffset = 0;
let lineEndCol = lineLengthLimit;
let syntaxHighlightedLine = syntaxHighlightedLines[currentLineIndex];
if (stringWidth(syntaxHighlightedLine) > lineLengthLimit) {
if (lineHighlights.length > 0) {
if (lineHighlights[0].start.line === currentLineIndex) {
colOffset = lineHighlights[0].start.column - 5;
} else if (lineHighlights[0].end.line === currentLineIndex) {
colOffset = lineHighlights[0].end.column - 5;
}
}
colOffset = colOffset > 0 ? colOffset : 0;
lineEndCol = colOffset + lineLengthLimit;
syntaxHighlightedLine = sliceAnsi(
syntaxHighlightedLine,
colOffset,
lineEndCol,
);
}
// Write the syntax highlighted line part
resultLines.push(
lineNumberPrefixer({
lineNumber: (currentLineIndex + 1).toString(10),
lineNumberLength,
isHighlighted: lineHighlights.length > 0,
}) + syntaxHighlightedLine,
);
let lineWidth = stringWidth(syntaxHighlightedLine);
let highlightLine = '';
if (isWholeLine) {
highlightLine = highlighter('^'.repeat(lineWidth));
} else if (lineHighlights.length > 0) {
let lastCol = 0;
let highlight = null;
let highlightHasEnded = false;
for (
let highlightIndex = 0;
highlightIndex < lineHighlights.length;
highlightIndex++
) {
// Set highlight to current highlight
highlight = lineHighlights[highlightIndex];
highlightHasEnded = false;
// Calculate the startColumn and get the real width by doing a substring of the original
// line and replacing tabs with our tab replacement to support tab handling
let startCol = 0;
if (
highlight.start.line === currentLineIndex &&
highlight.start.column > colOffset
) {
startCol = lines[currentLineIndex]
.substring(colOffset, highlight.start.column)
.replace(TAB_REPLACE_REGEX, TAB_REPLACEMENT).length;
}
// Calculate the endColumn and get the real width by doing a substring of the original
// line and replacing tabs with our tab replacement to support tab handling
let endCol = lineWidth - 1;
if (highlight.end.line === currentLineIndex) {
endCol = lines[currentLineIndex]
.substring(colOffset, highlight.end.column)
.replace(TAB_REPLACE_REGEX, TAB_REPLACEMENT).length;
// If the endCol is too big for this line part, trim it so we can handle it in the next one
if (endCol > lineWidth) {
endCol = lineWidth - 1;
}
highlightHasEnded = true;
}
// If endcol is smaller than lastCol it overlaps with another highlight and is no longer visible, we can skip those
if (endCol >= lastCol) {
let characters = endCol - startCol + 1;
if (startCol > lastCol) {
// startCol is before lastCol, so add spaces as padding before the highlight indicators
highlightLine += ' '.repeat(startCol - lastCol);
} else if (lastCol > startCol) {
// If last column is larger than the start, there's overlap in highlights
// This line adjusts the characters count to ensure we don't add too many characters
characters += startCol - lastCol;
}
// Don't crash (and swallow the original message) if the diagnostic is malformed (end is before start).
characters = Math.max(1, characters);
// Append the highlight indicators
highlightLine += highlighter('^'.repeat(characters));
// Set the lastCol equal to character count between start of line part and highlight end-column
lastCol = endCol + 1;
}
// There's no point in processing more highlights if we reached the end of the line
if (endCol >= lineEndCol - 1) {
break;
}
}
// Append the highlight message if the current highlights ends on this line part
if (highlight && highlight.message && highlightHasEnded) {
highlightLine += ' ' + highlighter(highlight.message, true);
}
}
if (highlightLine) {
resultLines.push(
lineNumberPrefixer({
lineNumberLength,
isHighlighted: true,
}) + highlightLine,
);
}
}
let result = resultLines.join('\n');
if (tail && tail.length > 0) {
result += '\n\n' + codeFrame(code, tail, inputOpts);
}
return result;
}

View File

@@ -0,0 +1,822 @@
import assert from 'assert';
import {readFileSync} from 'fs';
import {join as joinPath} from 'path';
import codeframe from '../src/codeframe';
const LINE_END = '\n';
describe('codeframe', () => {
it('should create a codeframe', () => {
let codeframeString = codeframe(
'hello world',
[
{
start: {
column: 1,
line: 1,
},
end: {
column: 1,
line: 1,
},
},
{
start: {
column: 3,
line: 1,
},
end: {
column: 5,
line: 1,
},
},
],
{useColor: false},
);
let lines = codeframeString.split(LINE_END);
assert.equal(lines[0], '> 1 | hello world');
assert.equal(lines[1], '> | ^ ^^^');
});
it('should create a codeframe with multiple lines', () => {
let codeframeString = codeframe(
'hello world\nEnjoy this nice codeframe',
[
{
start: {
column: 1,
line: 1,
},
end: {
column: 1,
line: 1,
},
},
{
start: {
column: 7,
line: 1,
},
end: {
column: 10,
line: 2,
},
},
],
{useColor: false},
);
let lines = codeframeString.split(LINE_END);
assert.equal(lines[0], '> 1 | hello world');
assert.equal(lines[1], '> | ^ ^^^^^');
assert.equal(lines[2], '> 2 | Enjoy this nice codeframe');
assert.equal(lines[3], '> | ^^^^^^^^^^');
});
it('should handle unordered overlapping highlights properly', () => {
let codeframeString = codeframe(
'hello world\nEnjoy this nice codeframe',
[
{
start: {
column: 1,
line: 1,
},
end: {
column: 1,
line: 1,
},
},
{
start: {
column: 7,
line: 1,
},
end: {
column: 10,
line: 2,
},
},
{
start: {
column: 4,
line: 2,
},
end: {
column: 7,
line: 2,
},
},
],
{useColor: false},
);
let lines = codeframeString.split(LINE_END);
assert.equal(lines[0], '> 1 | hello world');
assert.equal(lines[1], '> | ^ ^^^^^');
assert.equal(lines[2], '> 2 | Enjoy this nice codeframe');
assert.equal(lines[3], '> | ^^^^^^^^^^');
});
it('should handle partial overlapping highlights properly', () => {
let codeframeString = codeframe(
'hello world\nEnjoy this nice codeframe',
[
{
start: {
column: 1,
line: 1,
},
end: {
column: 1,
line: 1,
},
},
{
start: {
column: 7,
line: 1,
},
end: {
column: 10,
line: 2,
},
},
{
start: {
column: 4,
line: 2,
},
end: {
column: 12,
line: 2,
},
},
],
{useColor: false},
);
let lines = codeframeString.split(LINE_END);
assert.equal(lines[0], '> 1 | hello world');
assert.equal(lines[1], '> | ^ ^^^^^');
assert.equal(lines[2], '> 2 | Enjoy this nice codeframe');
assert.equal(lines[3], '> | ^^^^^^^^^^^^');
});
it('should be able to render inline messages', () => {
let codeframeString = codeframe(
'hello world\nEnjoy this nice codeframe',
[
{
start: {
column: 1,
line: 1,
},
end: {
column: 6,
line: 1,
},
message: 'test',
},
],
{useColor: false},
);
let lines = codeframeString.split(LINE_END);
assert.equal(lines[0], '> 1 | hello world');
assert.equal(lines[1], '> | ^^^^^^ test');
assert.equal(lines[2], ' 2 | Enjoy this nice codeframe');
});
it('should only render last inline message of a column', () => {
let codeframeString = codeframe(
'hello world\nEnjoy this nice codeframe',
[
{
start: {
column: 1,
line: 1,
},
end: {
column: 3,
line: 1,
},
message: 'test',
},
{
start: {
column: 1,
line: 1,
},
end: {
column: 6,
line: 1,
},
message: 'this should be printed',
},
],
{useColor: false},
);
let lines = codeframeString.split(LINE_END);
assert.equal(lines[0], '> 1 | hello world');
assert.equal(lines[1], '> | ^^^^^^ this should be printed');
assert.equal(lines[2], ' 2 | Enjoy this nice codeframe');
});
it('should only render last inline message of a column with space', () => {
let codeframeString = codeframe(
'hello world\nEnjoy this nice codeframe',
[
{
start: {
column: 1,
line: 1,
},
end: {
column: 1,
line: 1,
},
message: 'test',
},
{
start: {
column: 3,
line: 1,
},
end: {
column: 7,
line: 1,
},
message: 'this should be printed',
},
],
{useColor: false},
);
let lines = codeframeString.split(LINE_END);
assert.equal(lines[0], '> 1 | hello world');
assert.equal(lines[1], '> | ^ ^^^^^ this should be printed');
assert.equal(lines[2], ' 2 | Enjoy this nice codeframe');
});
it('should only render last inline message of a column with multiple lines and space', () => {
let codeframeString = codeframe(
'hello world\nEnjoy this nice codeframe\nThis is another line',
[
{
start: {
column: 1,
line: 1,
},
end: {
column: 1,
line: 1,
},
message: 'test',
},
{
start: {
column: 3,
line: 1,
},
end: {
column: 7,
line: 1,
},
message: 'this should be printed',
},
{
start: {
column: 3,
line: 2,
},
end: {
column: 7,
line: 3,
},
message: 'message line 2',
},
],
{useColor: false},
);
let lines = codeframeString.split(LINE_END);
assert.equal(lines[0], '> 1 | hello world');
assert.equal(lines[1], '> | ^ ^^^^^ this should be printed');
assert.equal(lines[2], '> 2 | Enjoy this nice codeframe');
assert.equal(lines[3], '> | ^^^^^^^^^^^^^^^^^^^^^^^');
assert.equal(lines[4], '> 3 | This is another line');
assert.equal(lines[5], '> | ^^^^^^^ message line 2');
});
it('should only render last inline message of a column with multiple lines and space', () => {
let codeframeString = codeframe(
'hello world\nEnjoy this nice codeframe\nThis is another line',
[
{
start: {
column: 1,
line: 1,
},
end: {
column: 1,
line: 1,
},
message: 'test',
},
{
start: {
column: 3,
line: 1,
},
end: {
column: 7,
line: 1,
},
message: 'this should be printed',
},
{
start: {
column: 3,
line: 2,
},
end: {
column: 7,
line: 3,
},
message: 'message line 2',
},
],
{useColor: false},
);
let lines = codeframeString.split(LINE_END);
assert.equal(lines[0], '> 1 | hello world');
assert.equal(lines[1], '> | ^ ^^^^^ this should be printed');
assert.equal(lines[2], '> 2 | Enjoy this nice codeframe');
assert.equal(lines[3], '> | ^^^^^^^^^^^^^^^^^^^^^^^');
assert.equal(lines[4], '> 3 | This is another line');
assert.equal(lines[5], '> | ^^^^^^^ message line 2');
});
it('should properly use padding', () => {
let codeframeString = codeframe(
'test\n'.repeat(100),
[
{
start: {
column: 2,
line: 5,
},
end: {
column: 2,
line: 5,
},
message: 'test',
},
],
{
useColor: false,
padding: {
before: 2,
after: 4,
},
},
);
let lines = codeframeString.split(LINE_END);
assert.equal(lines.length, 8);
assert.equal(lines[0], ' 3 | test');
assert.equal(lines[2], '> 5 | test');
assert.equal(lines[3], '> | ^ test');
assert.equal(lines[7], ' 9 | test');
});
it('should properly pad numbers for large files', () => {
let codeframeString = codeframe('test\n'.repeat(1000), [
{
start: {
column: 2,
line: 99,
},
end: {
column: 2,
line: 99,
},
message: 'test',
},
{
start: {
column: 2,
line: 100,
},
end: {
column: 2,
line: 100,
},
message: 'test 2',
},
]);
let lines = codeframeString.split(LINE_END);
assert.equal(lines.length, 7);
assert.equal(lines[0], ' 98 | test');
assert.equal(lines[1], '> 99 | test');
assert.equal(lines[2], '> | ^ test');
assert.equal(lines[3], '> 100 | test');
assert.equal(lines[4], '> | ^ test 2');
assert.equal(lines[5], ' 101 | test');
assert.equal(lines[6], ' 102 | test');
});
it('should properly pad numbers for short files', () => {
let codeframeString = codeframe('test\n'.repeat(1000), [
{
start: {
column: 2,
line: 7,
},
end: {
column: 2,
line: 7,
},
message: 'test',
},
{
start: {
column: 2,
line: 12,
},
end: {
column: 2,
line: 12,
},
message: 'test',
},
]);
let lines = codeframeString.split(LINE_END);
assert.equal(lines.length, 11);
assert.equal(lines[0], ' 6 | test');
assert.equal(lines[4], ' 9 | test');
assert.equal(lines[5], ' 10 | test');
assert.equal(lines[6], ' 11 | test');
assert.equal(lines[10], ' 14 | test');
});
it('should properly use maxLines', () => {
let line = 'test '.repeat(100);
let codeframeString = codeframe(
`${line}\n`.repeat(100),
[
{
start: {
column: 2,
line: 5,
},
end: {
column: 2,
line: 5,
},
message: 'test',
},
{
start: {
column: 2,
line: 12,
},
end: {
column: 2,
line: 20,
},
message: 'test',
},
],
{
useColor: false,
maxLines: 10,
terminalWidth: 5,
},
);
let lines = codeframeString.split(LINE_END);
assert.equal(lines.length, 13);
assert.equal(lines[0], ' 4 | test test ');
assert.equal(lines[7], ' 10 | test test ');
assert.equal(lines[11], '> 13 | test test ');
assert.equal(lines[12], '> | ^^^^^^^^^^');
});
it('should be able to handle tabs', () => {
let codeframeString = codeframe(
'hel\tlo wor\tld\nEnjoy thi\ts nice cod\teframe',
[
{
start: {
column: 5,
line: 1,
},
end: {
column: 8,
line: 1,
},
message: 'test',
},
],
{useColor: false},
);
let lines = codeframeString.split(LINE_END);
assert.equal(lines[0], '> 1 | hel lo wor ld');
assert.equal(lines[1], '> | ^^^^ test');
assert.equal(lines[2], ' 2 | Enjoy thi s nice cod eframe');
});
it('should be able to handle tabs with multiple highlights', () => {
let codeframeString = codeframe(
'hel\tlo wor\tld\nEnjoy thi\ts nice cod\teframe',
[
{
start: {
column: 3,
line: 1,
},
end: {
column: 5,
line: 1,
},
message: 'test',
},
{
start: {
column: 7,
line: 1,
},
end: {
column: 8,
line: 1,
},
message: 'test',
},
],
{useColor: false},
);
let lines = codeframeString.split(LINE_END);
assert.equal(lines[0], '> 1 | hel lo wor ld');
assert.equal(lines[1], '> | ^^^^ ^^ test');
assert.equal(lines[2], ' 2 | Enjoy thi s nice cod eframe');
});
it('multiline highlights with tabs', () => {
let codeframeString = codeframe(
'hel\tlo wor\tld\nEnjoy thi\ts nice cod\teframe\ntest',
[
{
start: {
column: 3,
line: 1,
},
end: {
column: 2,
line: 3,
},
message: 'test',
},
],
{useColor: false},
);
let lines = codeframeString.split(LINE_END);
assert.equal(lines[0], '> 1 | hel lo wor ld');
assert.equal(lines[1], '> | ^^^^^^^^^^^^^');
assert.equal(lines[2], '> 2 | Enjoy thi s nice cod eframe');
assert.equal(lines[3], '> | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^');
assert.equal(lines[4], '> 3 | test');
assert.equal(lines[5], '> | ^^ test');
});
it('Should truncate long lines and print message', () => {
let originalLine = 'hello world '.repeat(1000);
let codeframeString = codeframe(
originalLine,
[
{
start: {
column: 1000,
line: 1,
},
end: {
column: 1200,
line: 1,
},
message: 'This is a message',
},
],
{useColor: false, terminalWidth: 25},
);
let lines = codeframeString.split(LINE_END);
assert.equal(lines.length, 2);
assert.equal(lines[0], '> 1 | d hello world hello');
assert.equal(lines[1], '> | ^^^^^^^^^^^^^^ This is a message');
});
it('Truncation across multiple lines', () => {
let originalLine =
'hello world '.repeat(100) + '\n' + 'new line '.repeat(100);
let codeframeString = codeframe(
originalLine,
[
{
start: {
column: 15,
line: 1,
},
end: {
column: 400,
line: 1,
},
message: 'This is the first line',
},
{
start: {
column: 2,
line: 2,
},
end: {
column: 100,
line: 2,
},
message: 'This is the second line',
},
],
{useColor: false, terminalWidth: 25},
);
let lines = codeframeString.split(LINE_END);
assert.equal(lines.length, 4);
assert.equal(lines[0], '> 1 | ld hello world hell');
assert.equal(lines[1], '> | ^^^^^^^^^^^^^^ This is the first line');
assert.equal(lines[2], '> 2 | new line new line n');
assert.equal(lines[3], '> | ^^^^^^^^^^^^^^^^^^ This is the second line');
});
it('Truncation across various types and positions of highlights', () => {
let originalLine =
'hello world '.repeat(100) + '\n' + 'new line '.repeat(100);
let codeframeString = codeframe(
originalLine,
[
{
start: {
column: 2,
line: 1,
},
end: {
column: 5,
line: 1,
},
},
{
start: {
column: 6,
line: 1,
},
end: {
column: 10,
line: 1,
},
message: 'I have a message',
},
{
start: {
column: 15,
line: 1,
},
end: {
column: 25,
line: 1,
},
message: 'I also have a message',
},
{
start: {
column: 2,
line: 2,
},
end: {
column: 5,
line: 2,
},
message: 'This is the second line',
},
],
{useColor: false, terminalWidth: 25},
);
let lines = codeframeString.split(LINE_END);
assert.equal(lines.length, 4);
assert.equal(lines[0], '> 1 | hello world hello w');
assert.equal(lines[1], '> | ^^^^^^^^^ ^^^^^ I also have a message');
assert.equal(lines[2], '> 2 | new line new line n');
assert.equal(lines[3], '> | ^^^^ This is the second line');
});
it('Multi-line highlight w/ truncation', () => {
let originalLine =
'hello world '.repeat(100) + '\n' + 'new line '.repeat(100);
let codeframeString = codeframe(
originalLine,
[
{
start: {
column: 2,
line: 1,
},
end: {
column: 151,
line: 2,
},
message: 'I have a message',
},
],
{useColor: false, terminalWidth: 25},
);
let lines = codeframeString.split(LINE_END);
assert.equal(lines.length, 4);
assert.equal(lines[0], '> 1 | hello world hello w');
assert.equal(lines[1], '> | ^^^^^^^^^^^^^^^^^^');
assert.equal(lines[2], '> 2 | ew line new line ne');
assert.equal(lines[3], '> | ^^^^^^ I have a message');
});
it('Should pad properly, T-650', () => {
let fileContent = readFileSync(
joinPath(__dirname, './fixtures/a.js'),
'utf8',
);
let codeframeString = codeframe(
fileContent,
[
{
start: {
line: 8,
column: 10,
},
end: {
line: 8,
column: 48,
},
},
],
{
useColor: false,
syntaxHighlighting: false,
language: 'js',
terminalWidth: 100,
},
);
let lines = codeframeString.split(LINE_END);
assert.equal(lines.length, 5);
assert.equal(lines[0], ` 7 | import Tooltip from '../tooltip';`);
assert.equal(
lines[1],
`> 8 | import VisuallyHidden from '../visually-hidden';`,
);
assert.equal(
lines[2],
'> | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^',
);
assert.equal(lines[3], ' 9 | ');
assert.equal(lines[4], ' 10 | /**');
});
it('should still generate a codeframe when end is before start', () => {
let codeframeString = codeframe(
'hello world',
[
{
start: {
column: 5,
line: 1,
},
end: {
column: 1,
line: 1,
},
},
],
{useColor: false},
);
let lines = codeframeString.split(LINE_END);
assert.equal(lines[0], '> 1 | hello world');
assert.equal(lines[1], '> | ^');
});
});

View File

@@ -0,0 +1,13 @@
import test from 'test';
import component from './component';
/**
* This is a comment
*/
import Tooltip from '../tooltip';
import VisuallyHidden from '../visually-hidden';
/**
* This is another comment
*/
import {Label} from './label';

View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2017-present Devon Govett
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -0,0 +1,22 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
function _plugin() {
const data = require("@parcel/plugin");
_plugin = function () {
return data;
};
return data;
}
var _default = exports.default = new (_plugin().Compressor)({
compress({
stream
}) {
return {
stream
};
}
});

View File

@@ -0,0 +1,26 @@
{
"name": "@parcel/compressor-raw",
"version": "2.12.0",
"license": "MIT",
"publishConfig": {
"access": "public"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
},
"repository": {
"type": "git",
"url": "https://github.com/parcel-bundler/parcel.git"
},
"main": "lib/RawCompressor.js",
"source": "src/RawCompressor.js",
"engines": {
"node": ">= 12.0.0",
"parcel": "^2.12.0"
},
"dependencies": {
"@parcel/plugin": "2.12.0"
},
"gitHead": "2059029ee91e5f03a273b0954d3e629d7375f986"
}

View File

@@ -0,0 +1,8 @@
// @flow
import {Compressor} from '@parcel/plugin';
export default (new Compressor({
compress({stream}) {
return {stream};
},
}): Compressor);

View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2017-present Devon Govett
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -0,0 +1,80 @@
{
"bundler": "@parcel/bundler-default",
"transformers": {
"types:*.{ts,tsx}": ["@parcel/transformer-typescript-types"],
"bundle-text:*": ["...", "@parcel/transformer-inline-string"],
"data-url:*": ["...", "@parcel/transformer-inline-string"],
"worklet:*.{js,mjs,jsm,jsx,es6,cjs,ts,tsx}": [
"@parcel/transformer-worklet",
"..."
],
"*.{js,mjs,jsm,jsx,es6,cjs,ts,tsx}": [
"@parcel/transformer-babel",
"@parcel/transformer-js",
"@parcel/transformer-react-refresh-wrap"
],
"*.{json,json5}": ["@parcel/transformer-json"],
"*.jsonld": ["@parcel/transformer-jsonld"],
"*.toml": ["@parcel/transformer-toml"],
"*.webmanifest": ["@parcel/transformer-webmanifest"],
"webmanifest:*.{json,webmanifest}": ["@parcel/transformer-webmanifest"],
"*.{yaml,yml}": ["@parcel/transformer-yaml"],
"*.{glsl,vert,frag}": ["@parcel/transformer-glsl"],
"*.{gql,graphql}": ["@parcel/transformer-graphql"],
"*.{styl,stylus}": ["@parcel/transformer-stylus"],
"*.{sass,scss}": ["@parcel/transformer-sass"],
"*.less": ["@parcel/transformer-less"],
"*.{css,pcss}": ["@parcel/transformer-postcss", "@parcel/transformer-css"],
"*.sss": ["@parcel/transformer-sugarss"],
"*.{htm,html,xhtml}": [
"@parcel/transformer-posthtml",
"@parcel/transformer-html"
],
"*.pug": ["@parcel/transformer-pug"],
"*.coffee": ["@parcel/transformer-coffeescript"],
"*.elm": ["@parcel/transformer-elm"],
"*.mdx": ["@parcel/transformer-mdx"],
"*.vue": ["@parcel/transformer-vue"],
"template:*.vue": ["@parcel/transformer-vue"],
"script:*.vue": ["@parcel/transformer-vue"],
"style:*.vue": ["@parcel/transformer-vue"],
"custom:*.vue": ["@parcel/transformer-vue"],
"*.{png,jpg,jpeg,webp,gif,tiff,avif,heic,heif}": [
"@parcel/transformer-image"
],
"*.svg": ["@parcel/transformer-svg"],
"*.{xml,rss,atom}": ["@parcel/transformer-xml"],
"url:*": ["...", "@parcel/transformer-raw"]
},
"namers": ["@parcel/namer-default"],
"runtimes": [
"@parcel/runtime-js",
"@parcel/runtime-browser-hmr",
"@parcel/runtime-react-refresh",
"@parcel/runtime-service-worker"
],
"optimizers": {
"data-url:*": ["...", "@parcel/optimizer-data-url"],
"*.css": ["@parcel/optimizer-css"],
"*.{html,xhtml}": ["@parcel/optimizer-htmlnano"],
"*.{js,mjs,cjs}": ["@parcel/optimizer-swc"],
"*.svg": ["@parcel/optimizer-svgo"],
"*.{jpg,jpeg,png}": ["@parcel/optimizer-image"]
},
"packagers": {
"*.{html,xhtml}": "@parcel/packager-html",
"*.css": "@parcel/packager-css",
"*.{js,mjs,cjs}": "@parcel/packager-js",
"*.svg": "@parcel/packager-svg",
"*.{xml,rss,atom}": "@parcel/packager-xml",
"*.ts": "@parcel/packager-ts",
"*.wasm": "@parcel/packager-wasm",
"*.{jsonld,svg,webmanifest}": "@parcel/packager-raw-url",
"*": "@parcel/packager-raw"
},
"compressors": {
"*": ["@parcel/compressor-raw"]
},
"resolvers": ["@parcel/resolver-default"],
"reporters": ["@parcel/reporter-dev-server"]
}

View File

@@ -0,0 +1,82 @@
{
"name": "@parcel/config-default",
"version": "2.12.0",
"license": "MIT",
"publishConfig": {
"access": "public"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
},
"repository": {
"type": "git",
"url": "https://github.com/parcel-bundler/parcel.git"
},
"main": "index.json",
"scripts": {
"test-ci": "mocha"
},
"dependencies": {
"@parcel/bundler-default": "2.12.0",
"@parcel/compressor-raw": "2.12.0",
"@parcel/namer-default": "2.12.0",
"@parcel/optimizer-css": "2.12.0",
"@parcel/optimizer-htmlnano": "2.12.0",
"@parcel/optimizer-image": "2.12.0",
"@parcel/optimizer-svgo": "2.12.0",
"@parcel/optimizer-swc": "2.12.0",
"@parcel/packager-css": "2.12.0",
"@parcel/packager-html": "2.12.0",
"@parcel/packager-js": "2.12.0",
"@parcel/packager-raw": "2.12.0",
"@parcel/packager-svg": "2.12.0",
"@parcel/packager-wasm": "2.12.0",
"@parcel/reporter-dev-server": "2.12.0",
"@parcel/resolver-default": "2.12.0",
"@parcel/runtime-browser-hmr": "2.12.0",
"@parcel/runtime-js": "2.12.0",
"@parcel/runtime-react-refresh": "2.12.0",
"@parcel/runtime-service-worker": "2.12.0",
"@parcel/transformer-babel": "2.12.0",
"@parcel/transformer-css": "2.12.0",
"@parcel/transformer-html": "2.12.0",
"@parcel/transformer-image": "2.12.0",
"@parcel/transformer-js": "2.12.0",
"@parcel/transformer-json": "2.12.0",
"@parcel/transformer-postcss": "2.12.0",
"@parcel/transformer-posthtml": "2.12.0",
"@parcel/transformer-raw": "2.12.0",
"@parcel/transformer-react-refresh-wrap": "2.12.0",
"@parcel/transformer-svg": "2.12.0"
},
"parcelDependencies": {
"@parcel/optimizer-data-url": "2.12.0",
"@parcel/packager-raw-url": "2.12.0",
"@parcel/packager-ts": "2.12.0",
"@parcel/packager-xml": "2.12.0",
"@parcel/transformer-coffeescript": "2.12.0",
"@parcel/transformer-elm": "2.12.0",
"@parcel/transformer-glsl": "2.12.0",
"@parcel/transformer-graphql": "2.12.0",
"@parcel/transformer-inline-string": "2.12.0",
"@parcel/transformer-jsonld": "2.12.0",
"@parcel/transformer-less": "2.12.0",
"@parcel/transformer-mdx": "2.12.0",
"@parcel/transformer-pug": "2.12.0",
"@parcel/transformer-sass": "2.12.0",
"@parcel/transformer-stylus": "2.12.0",
"@parcel/transformer-sugarss": "2.12.0",
"@parcel/transformer-toml": "2.12.0",
"@parcel/transformer-typescript-types": "2.12.0",
"@parcel/transformer-vue": "2.12.0",
"@parcel/transformer-webmanifest": "2.12.0",
"@parcel/transformer-worklet": "2.12.0",
"@parcel/transformer-xml": "2.12.0",
"@parcel/transformer-yaml": "2.12.0"
},
"peerDependencies": {
"@parcel/core": "^2.12.0"
},
"gitHead": "2059029ee91e5f03a273b0954d3e629d7375f986"
}

View File

@@ -0,0 +1,3 @@
{
"presets": ["@parcel/babel-preset"]
}

View File

@@ -0,0 +1,72 @@
// @flow
import assert from 'assert';
import config from '../';
import packageJson from '../package.json';
describe('@parcel/config-default', () => {
let packageJsonDependencyNames: Set<string>;
let configPackageReferences: Set<string>;
before(() => {
packageJsonDependencyNames = new Set([
...Object.keys(packageJson.dependencies || {}),
...Object.keys(packageJson.parcelDependencies || {}),
]);
configPackageReferences = collectConfigPackageReferences(config);
});
describe('package.json', () => {
it('includes every package referenced in the config', () => {
let missingReferences = [];
for (let reference of configPackageReferences) {
if (!packageJsonDependencyNames.has(reference)) {
missingReferences.push(reference);
}
}
// Assert with deepEqual rather than e.g. missingReferences.size as the
// assertion message with deepEqual enumerates the differences nicely
assert.deepEqual(missingReferences, []);
});
it('does not include packages not referenced in the config', () => {
let unnecessaryDependencies = [];
for (let dependency of packageJsonDependencyNames) {
if (!configPackageReferences.has(dependency)) {
unnecessaryDependencies.push(dependency);
}
}
assert.deepEqual(unnecessaryDependencies, []);
});
});
});
function collectConfigPackageReferences(
configSection: mixed,
references: Set<string> = new Set(),
): Set<string> {
if (configSection == null || typeof configSection !== 'object') {
throw new TypeError('Expected config section to be an object or an array');
}
for (let value of Object.values(configSection)) {
if (typeof value === 'string') {
if (value === '...') {
continue;
}
references.add(value);
} else if (configSection != null && typeof configSection === 'object') {
collectConfigPackageReferences(value, references);
} else {
throw new Error(
'Parcel configs must contain only strings, arrays, or objects in value positions',
);
}
}
return references;
}

View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2017-present Devon Govett
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -0,0 +1,15 @@
import type {InitialParcelOptions, BuildEvent, BuildSuccessEvent, AsyncSubscription} from '@parcel/types';
import type {FarmOptions} from '@parcel/workers';
import type WorkerFarm from '@parcel/workers';
export class Parcel {
constructor(options: InitialParcelOptions);
run(): Promise<BuildSuccessEvent>;
watch(
cb?: (err: Error | null | undefined, buildEvent?: BuildEvent) => unknown,
): Promise<AsyncSubscription>
}
export declare function createWorkerFarm(options?: Partial<FarmOptions>): WorkerFarm;
export default Parcel;

View File

@@ -0,0 +1,496 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
exports.nodeFromAsset = nodeFromAsset;
exports.nodeFromAssetGroup = nodeFromAssetGroup;
exports.nodeFromDep = nodeFromDep;
exports.nodeFromEntryFile = nodeFromEntryFile;
exports.nodeFromEntrySpecifier = nodeFromEntrySpecifier;
function _assert() {
const data = _interopRequireDefault(require("assert"));
_assert = function () {
return data;
};
return data;
}
function _rust() {
const data = require("@parcel/rust");
_rust = function () {
return data;
};
return data;
}
function _utils() {
const data = require("@parcel/utils");
_utils = function () {
return data;
};
return data;
}
function _nullthrows() {
const data = _interopRequireDefault(require("nullthrows"));
_nullthrows = function () {
return data;
};
return data;
}
function _graph() {
const data = require("@parcel/graph");
_graph = function () {
return data;
};
return data;
}
var _Dependency = require("./Dependency");
var _projectPath = require("./projectPath");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function nodeFromDep(dep) {
return {
id: dep.id,
type: 'dependency',
value: dep,
deferred: false,
excluded: false,
usedSymbolsDown: new Set(),
usedSymbolsUp: new Map(),
usedSymbolsDownDirty: true,
usedSymbolsUpDirtyDown: true,
usedSymbolsUpDirtyUp: true
};
}
function nodeFromAssetGroup(assetGroup) {
var _assetGroup$code, _assetGroup$pipeline, _assetGroup$query;
return {
id: (0, _rust().hashString)((0, _projectPath.fromProjectPathRelative)(assetGroup.filePath) + assetGroup.env.id + String(assetGroup.isSource) + String(assetGroup.sideEffects) + ((_assetGroup$code = assetGroup.code) !== null && _assetGroup$code !== void 0 ? _assetGroup$code : '') + ':' + ((_assetGroup$pipeline = assetGroup.pipeline) !== null && _assetGroup$pipeline !== void 0 ? _assetGroup$pipeline : '') + ':' + ((_assetGroup$query = assetGroup.query) !== null && _assetGroup$query !== void 0 ? _assetGroup$query : '')),
type: 'asset_group',
value: assetGroup,
usedSymbolsDownDirty: true
};
}
function nodeFromAsset(asset) {
return {
id: asset.id,
type: 'asset',
value: asset,
usedSymbols: new Set(),
usedSymbolsDownDirty: true,
usedSymbolsUpDirty: true
};
}
function nodeFromEntrySpecifier(entry) {
return {
id: 'entry_specifier:' + (0, _projectPath.fromProjectPathRelative)(entry),
type: 'entry_specifier',
value: entry
};
}
function nodeFromEntryFile(entry) {
return {
id: 'entry_file:' + (0, _utils().hashObject)(entry),
type: 'entry_file',
value: entry
};
}
class AssetGraph extends _graph().ContentGraph {
safeToIncrementallyBundle = true;
constructor(opts) {
if (opts) {
let {
hash,
...rest
} = opts;
super(rest);
this.hash = hash;
} else {
super();
this.setRootNodeId(this.addNode({
id: '@@root',
type: 'root',
value: null
}));
}
this.envCache = new Map();
}
// $FlowFixMe[prop-missing]
static deserialize(opts) {
return new AssetGraph(opts);
}
// $FlowFixMe[prop-missing]
serialize() {
return {
...super.serialize(),
hash: this.hash
};
}
// Deduplicates Environments by making them referentially equal
normalizeEnvironment(input) {
let {
id,
context
} = input.env;
let idAndContext = `${id}-${context}`;
let env = this.envCache.get(idAndContext);
if (env) {
input.env = env;
} else {
this.envCache.set(idAndContext, input.env);
}
}
setRootConnections({
entries,
assetGroups
}) {
let nodes = [];
if (entries) {
for (let entry of entries) {
let node = nodeFromEntrySpecifier(entry);
nodes.push(node);
}
} else if (assetGroups) {
nodes.push(...assetGroups.map(assetGroup => nodeFromAssetGroup(assetGroup)));
}
this.replaceNodeIdsConnectedTo((0, _nullthrows().default)(this.rootNodeId), nodes.map(node => this.addNode(node)));
}
addNode(node) {
this.hash = null;
let existing = this.getNodeByContentKey(node.id);
if (existing != null) {
(0, _assert().default)(existing.type === node.type);
// $FlowFixMe[incompatible-type] Checked above
// $FlowFixMe[prop-missing]
existing.value = node.value;
let existingId = this.getNodeIdByContentKey(node.id);
this.updateNode(existingId, existing);
return existingId;
}
return super.addNodeByContentKey(node.id, node);
}
removeNode(nodeId) {
this.hash = null;
this.onNodeRemoved && this.onNodeRemoved(nodeId);
return super.removeNode(nodeId);
}
resolveEntry(entry, resolved, correspondingRequest) {
let entrySpecifierNodeId = this.getNodeIdByContentKey(nodeFromEntrySpecifier(entry).id);
let entrySpecifierNode = (0, _nullthrows().default)(this.getNode(entrySpecifierNodeId));
(0, _assert().default)(entrySpecifierNode.type === 'entry_specifier');
entrySpecifierNode.correspondingRequest = correspondingRequest;
this.replaceNodeIdsConnectedTo(entrySpecifierNodeId, resolved.map(file => this.addNode(nodeFromEntryFile(file))));
}
resolveTargets(entry, targets, correspondingRequest) {
let depNodes = targets.map(target => {
let node = nodeFromDep(
// The passed project path is ignored in this case, because there is no `loc`
(0, _Dependency.createDependency)('', {
specifier: (0, _projectPath.fromProjectPathRelative)(entry.filePath),
specifierType: 'esm',
// ???
pipeline: target.pipeline,
target: target,
env: target.env,
isEntry: true,
needsStableName: true,
symbols: target.env.isLibrary ? new Map([['*', {
local: '*',
isWeak: true,
loc: null
}]]) : undefined
}));
if (node.value.env.isLibrary) {
// in library mode, all of the entry's symbols are "used"
node.usedSymbolsDown.add('*');
node.usedSymbolsUp.set('*', undefined);
}
return node;
});
let entryNodeId = this.getNodeIdByContentKey(nodeFromEntryFile(entry).id);
let entryNode = (0, _nullthrows().default)(this.getNode(entryNodeId));
(0, _assert().default)(entryNode.type === 'entry_file');
entryNode.correspondingRequest = correspondingRequest;
this.replaceNodeIdsConnectedTo(entryNodeId, depNodes.map(node => this.addNode(node)));
}
resolveDependency(dependency, assetGroup, correspondingRequest) {
let depNodeId = this.getNodeIdByContentKey(dependency.id);
let depNode = (0, _nullthrows().default)(this.getNode(depNodeId));
(0, _assert().default)(depNode.type === 'dependency');
depNode.correspondingRequest = correspondingRequest;
if (!assetGroup) {
return;
}
let assetGroupNode = nodeFromAssetGroup(assetGroup);
let existing = this.getNodeByContentKey(assetGroupNode.id);
if (existing != null) {
(0, _assert().default)(existing.type === 'asset_group');
assetGroupNode.value.canDefer = assetGroupNode.value.canDefer && existing.value.canDefer;
}
let assetGroupNodeId = this.addNode(assetGroupNode);
this.replaceNodeIdsConnectedTo(this.getNodeIdByContentKey(dependency.id), [assetGroupNodeId]);
this.replaceNodeIdsConnectedTo(depNodeId, [assetGroupNodeId]);
}
shouldVisitChild(nodeId, childNodeId) {
let node = (0, _nullthrows().default)(this.getNode(nodeId));
let childNode = (0, _nullthrows().default)(this.getNode(childNodeId));
if (node.type !== 'dependency' || childNode.type !== 'asset_group' || childNode.deferred === false) {
return true;
}
// Node types are proved above
let dependencyNode = node;
let assetGroupNode = childNode;
let {
sideEffects,
canDefer = true
} = assetGroupNode.value;
let dependency = dependencyNode.value;
let dependencyPreviouslyDeferred = dependencyNode.hasDeferred;
let assetGroupPreviouslyDeferred = assetGroupNode.deferred;
let defer = this.shouldDeferDependency(dependency, sideEffects, canDefer);
dependencyNode.hasDeferred = defer;
assetGroupNode.deferred = defer;
if (!dependencyPreviouslyDeferred && defer) {
this.markParentsWithHasDeferred(nodeId);
} else if (assetGroupPreviouslyDeferred && !defer) {
this.unmarkParentsWithHasDeferred(childNodeId);
}
return !defer;
}
// Dependency: mark parent Asset <- AssetGroup with hasDeferred true
markParentsWithHasDeferred(nodeId) {
this.traverseAncestors(nodeId, (traversedNodeId, _, actions) => {
let traversedNode = (0, _nullthrows().default)(this.getNode(traversedNodeId));
if (traversedNode.type === 'asset') {
traversedNode.hasDeferred = true;
} else if (traversedNode.type === 'asset_group') {
traversedNode.hasDeferred = true;
actions.skipChildren();
} else if (nodeId !== traversedNodeId) {
actions.skipChildren();
}
});
}
// AssetGroup: update hasDeferred of all parent Dependency <- Asset <- AssetGroup
unmarkParentsWithHasDeferred(nodeId) {
this.traverseAncestors(nodeId, (traversedNodeId, ctx, actions) => {
let traversedNode = (0, _nullthrows().default)(this.getNode(traversedNodeId));
if (traversedNode.type === 'asset') {
let hasDeferred = this.getNodeIdsConnectedFrom(traversedNodeId).some(childNodeId => {
let childNode = (0, _nullthrows().default)(this.getNode(childNodeId));
return childNode.hasDeferred == null ? false : childNode.hasDeferred;
});
if (!hasDeferred) {
delete traversedNode.hasDeferred;
}
return {
hasDeferred
};
} else if (traversedNode.type === 'asset_group' && nodeId !== traversedNodeId) {
if (!(ctx !== null && ctx !== void 0 && ctx.hasDeferred)) {
this.safeToIncrementallyBundle = false;
delete traversedNode.hasDeferred;
}
actions.skipChildren();
} else if (traversedNode.type === 'dependency') {
this.safeToIncrementallyBundle = false;
traversedNode.hasDeferred = false;
} else if (nodeId !== traversedNodeId) {
actions.skipChildren();
}
});
}
// Defer transforming this dependency if it is marked as weak, there are no side effects,
// no re-exported symbols are used by ancestor dependencies and the re-exporting asset isn't
// using a wildcard and isn't an entry (in library mode).
// This helps with performance building large libraries like `lodash-es`, which re-exports
// a huge number of functions since we can avoid even transforming the files that aren't used.
shouldDeferDependency(dependency, sideEffects, canDefer) {
let defer = false;
let dependencySymbols = dependency.symbols;
if (dependencySymbols && [...dependencySymbols].every(([, {
isWeak
}]) => isWeak) && sideEffects === false && canDefer && !dependencySymbols.has('*')) {
let depNodeId = this.getNodeIdByContentKey(dependency.id);
let depNode = this.getNode(depNodeId);
(0, _assert().default)(depNode);
let assets = this.getNodeIdsConnectedTo(depNodeId);
let symbols = new Map([...dependencySymbols].map(([key, val]) => [val.local, key]));
(0, _assert().default)(assets.length === 1);
let firstAsset = (0, _nullthrows().default)(this.getNode(assets[0]));
(0, _assert().default)(firstAsset.type === 'asset');
let resolvedAsset = firstAsset.value;
let deps = this.getIncomingDependencies(resolvedAsset);
defer = deps.every(d => d.symbols && !(d.env.isLibrary && d.isEntry) && !d.symbols.has('*') && ![...d.symbols.keys()].some(symbol => {
var _resolvedAsset$symbol;
if (!resolvedAsset.symbols) return true;
let assetSymbol = (_resolvedAsset$symbol = resolvedAsset.symbols) === null || _resolvedAsset$symbol === void 0 || (_resolvedAsset$symbol = _resolvedAsset$symbol.get(symbol)) === null || _resolvedAsset$symbol === void 0 ? void 0 : _resolvedAsset$symbol.local;
return assetSymbol != null && symbols.has(assetSymbol);
}));
}
return defer;
}
resolveAssetGroup(assetGroup, assets, correspondingRequest) {
this.normalizeEnvironment(assetGroup);
let assetGroupNode = nodeFromAssetGroup(assetGroup);
assetGroupNode = this.getNodeByContentKey(assetGroupNode.id);
if (!assetGroupNode) {
return;
}
(0, _assert().default)(assetGroupNode.type === 'asset_group');
assetGroupNode.correspondingRequest = correspondingRequest;
let assetsByKey = new Map();
for (let asset of assets) {
if (asset.uniqueKey != null) {
assetsByKey.set(asset.uniqueKey, asset);
}
}
let dependentAssetKeys = new Set();
for (let asset of assets) {
for (let dep of asset.dependencies.values()) {
if (assetsByKey.has(dep.specifier)) {
dependentAssetKeys.add(dep.specifier);
}
}
}
let assetObjects = [];
let assetNodeIds = [];
for (let asset of assets) {
this.normalizeEnvironment(asset);
let isDirect = !dependentAssetKeys.has(asset.uniqueKey);
let dependentAssets = [];
for (let dep of asset.dependencies.values()) {
let dependentAsset = assetsByKey.get(dep.specifier);
if (dependentAsset) {
dependentAssets.push(dependentAsset);
if (dependentAsset.id === asset.id) {
// Don't orphan circular dependencies.
isDirect = true;
}
}
}
let id = this.addNode(nodeFromAsset(asset));
assetObjects.push({
assetNodeId: id,
dependentAssets
});
if (isDirect) {
assetNodeIds.push(id);
}
}
this.replaceNodeIdsConnectedTo(this.getNodeIdByContentKey(assetGroupNode.id), assetNodeIds);
for (let {
assetNodeId,
dependentAssets
} of assetObjects) {
// replaceNodesConnectedTo has merged the value into the existing node, retrieve
// the actual current node.
let assetNode = (0, _nullthrows().default)(this.getNode(assetNodeId));
(0, _assert().default)(assetNode.type === 'asset');
this.resolveAsset(assetNode, dependentAssets);
}
}
resolveAsset(assetNode, dependentAssets) {
let depNodeIds = [];
let depNodesWithAssets = [];
for (let dep of assetNode.value.dependencies.values()) {
this.normalizeEnvironment(dep);
let depNode = nodeFromDep(dep);
let existing = this.getNodeByContentKey(depNode.id);
if ((existing === null || existing === void 0 ? void 0 : existing.type) === 'dependency' && existing.value.resolverMeta != null) {
depNode.value.meta = {
...depNode.value.meta,
...existing.value.resolverMeta
};
}
let dependentAsset = dependentAssets.find(a => a.uniqueKey === dep.specifier);
if (dependentAsset) {
depNode.complete = true;
depNodesWithAssets.push([depNode, nodeFromAsset(dependentAsset)]);
}
depNode.value.sourceAssetType = assetNode.value.type;
depNodeIds.push(this.addNode(depNode));
}
assetNode.usedSymbolsUpDirty = true;
assetNode.usedSymbolsDownDirty = true;
this.replaceNodeIdsConnectedTo(this.getNodeIdByContentKey(assetNode.id), depNodeIds);
for (let [depNode, dependentAssetNode] of depNodesWithAssets) {
let depAssetNodeId = this.addNode(dependentAssetNode);
this.replaceNodeIdsConnectedTo(this.getNodeIdByContentKey(depNode.id), [depAssetNodeId]);
}
}
getIncomingDependencies(asset) {
let nodeId = this.getNodeIdByContentKey(asset.id);
let assetGroupIds = this.getNodeIdsConnectedTo(nodeId);
let dependencies = [];
for (let i = 0; i < assetGroupIds.length; i++) {
let assetGroupId = assetGroupIds[i];
// Sometimes assets are connected directly to dependencies
// rather than through an asset group. This happens due to
// inline dependencies on assets via uniqueKey. See resolveAsset.
let node = this.getNode(assetGroupId);
if ((node === null || node === void 0 ? void 0 : node.type) === 'dependency') {
dependencies.push(node.value);
continue;
}
let assetIds = this.getNodeIdsConnectedTo(assetGroupId);
for (let j = 0; j < assetIds.length; j++) {
let node = this.getNode(assetIds[j]);
if (!node || node.type !== 'dependency') {
continue;
}
dependencies.push(node.value);
}
}
return dependencies;
}
traverseAssets(visit, startNodeId) {
return this.filteredTraverse(nodeId => {
let node = (0, _nullthrows().default)(this.getNode(nodeId));
return node.type === 'asset' ? node.value : null;
}, visit, startNodeId);
}
getEntryAssetGroupNodes() {
let entryNodes = [];
this.traverse((nodeId, _, actions) => {
let node = (0, _nullthrows().default)(this.getNode(nodeId));
if (node.type === 'asset_group') {
entryNodes.push(node);
actions.skipChildren();
}
});
return entryNodes;
}
getEntryAssets() {
let entries = [];
this.traverseAssets((asset, ctx, traversal) => {
entries.push(asset);
traversal.skipChildren();
});
return entries;
}
getHash() {
if (this.hash != null) {
return this.hash;
}
let hash = new (_rust().Hash)();
// TODO: sort??
this.traverse(nodeId => {
let node = (0, _nullthrows().default)(this.getNode(nodeId));
if (node.type === 'asset') {
hash.writeString((0, _nullthrows().default)(node.value.outputHash));
} else if (node.type === 'dependency' && node.value.target) {
hash.writeString(JSON.stringify(node.value.target));
}
});
this.hash = hash.finish();
return this.hash;
}
}
exports.default = AssetGraph;

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,133 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
function _stream() {
const data = require("stream");
_stream = function () {
return data;
};
return data;
}
function _sourceMap() {
const data = _interopRequireDefault(require("@parcel/source-map"));
_sourceMap = function () {
return data;
};
return data;
}
function _utils() {
const data = require("@parcel/utils");
_utils = function () {
return data;
};
return data;
}
var _assetUtils = require("./assetUtils");
var _serializer = require("./serializer");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
class CommittedAsset {
constructor(value, options) {
this.value = value;
this.options = options;
}
getContent() {
if (this.content == null) {
if (this.value.contentKey != null) {
if (this.value.isLargeBlob) {
return this.options.cache.getStream(this.value.contentKey);
} else {
return this.options.cache.getBlob(this.value.contentKey);
}
} else if (this.value.astKey != null) {
return (0, _utils().streamFromPromise)((0, _assetUtils.generateFromAST)(this).then(({
content
}) => {
if (!(content instanceof _stream().Readable)) {
this.content = Promise.resolve(content);
}
return content;
}));
} else {
throw new Error('Asset has no content');
}
}
return this.content;
}
async getCode() {
let content;
if (this.content == null && this.value.contentKey != null) {
this.content = this.options.cache.getBlob(this.value.contentKey);
content = await this.content;
} else {
content = await this.getContent();
}
if (typeof content === 'string' || content instanceof Buffer) {
return content.toString();
} else if (content != null) {
this.content = (0, _utils().bufferStream)(content);
return (await this.content).toString();
}
return '';
}
async getBuffer() {
let content = await this.getContent();
if (content == null) {
return Buffer.alloc(0);
} else if (typeof content === 'string' || content instanceof Buffer) {
return Buffer.from(content);
}
this.content = (0, _utils().bufferStream)(content);
return this.content;
}
getStream() {
let content = this.getContent();
return content instanceof Promise ? (0, _utils().streamFromPromise)(content) : (0, _utils().blobToStream)(content);
}
getMapBuffer() {
var _this$mapBuffer;
let mapKey = this.value.mapKey;
if (mapKey != null && this.mapBuffer == null) {
this.mapBuffer = (async () => {
try {
return await this.options.cache.getBlob(mapKey);
} catch (err) {
if (err.code === 'ENOENT' && this.value.astKey != null) {
var _await$generateFromAS;
return (_await$generateFromAS = (await (0, _assetUtils.generateFromAST)(this)).map) === null || _await$generateFromAS === void 0 ? void 0 : _await$generateFromAS.toBuffer();
} else {
throw err;
}
}
})();
}
return (_this$mapBuffer = this.mapBuffer) !== null && _this$mapBuffer !== void 0 ? _this$mapBuffer : Promise.resolve();
}
getMap() {
if (this.map == null) {
this.map = (async () => {
let mapBuffer = await this.getMapBuffer();
if (mapBuffer) {
// Get sourcemap from flatbuffer
return new (_sourceMap().default)(this.options.projectRoot, mapBuffer);
}
})();
}
return this.map;
}
getAST() {
if (this.value.astKey == null) {
return Promise.resolve(null);
}
if (this.ast == null) {
this.ast = this.options.cache.getBlob(this.value.astKey).then(serializedAst => (0, _serializer.deserializeRaw)(serializedAst));
}
return this.ast;
}
getDependencies() {
return Array.from(this.value.dependencies.values());
}
}
exports.default = CommittedAsset;

View File

@@ -0,0 +1,89 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.createDependency = createDependency;
exports.mergeDependencies = mergeDependencies;
function _rust() {
const data = require("@parcel/rust");
_rust = function () {
return data;
};
return data;
}
var _types = require("./types");
var _utils = require("./utils");
var _projectPath = require("./projectPath");
function createDependency(projectRoot, opts) {
var _opts$sourceAssetId, _opts$pipeline, _opts$bundleBehavior, _opts$priority, _opts$priority2, _opts$needsStableName, _opts$isEntry, _opts$isOptional;
let id = opts.id || (0, _rust().hashString)(((_opts$sourceAssetId = opts.sourceAssetId) !== null && _opts$sourceAssetId !== void 0 ? _opts$sourceAssetId : '') + opts.specifier + opts.env.id + (opts.target ? JSON.stringify(opts.target) : '') + ((_opts$pipeline = opts.pipeline) !== null && _opts$pipeline !== void 0 ? _opts$pipeline : '') + opts.specifierType + ((_opts$bundleBehavior = opts.bundleBehavior) !== null && _opts$bundleBehavior !== void 0 ? _opts$bundleBehavior : '') + ((_opts$priority = opts.priority) !== null && _opts$priority !== void 0 ? _opts$priority : 'sync') + (opts.packageConditions ? JSON.stringify(opts.packageConditions) : ''));
let dep = {
id,
specifier: opts.specifier,
specifierType: _types.SpecifierType[opts.specifierType],
priority: _types.Priority[(_opts$priority2 = opts.priority) !== null && _opts$priority2 !== void 0 ? _opts$priority2 : 'sync'],
needsStableName: (_opts$needsStableName = opts.needsStableName) !== null && _opts$needsStableName !== void 0 ? _opts$needsStableName : false,
bundleBehavior: opts.bundleBehavior ? _types.BundleBehavior[opts.bundleBehavior] : null,
isEntry: (_opts$isEntry = opts.isEntry) !== null && _opts$isEntry !== void 0 ? _opts$isEntry : false,
isOptional: (_opts$isOptional = opts.isOptional) !== null && _opts$isOptional !== void 0 ? _opts$isOptional : false,
loc: (0, _utils.toInternalSourceLocation)(projectRoot, opts.loc),
env: opts.env,
meta: opts.meta || {},
target: opts.target,
sourceAssetId: opts.sourceAssetId,
sourcePath: (0, _projectPath.toProjectPath)(projectRoot, opts.sourcePath),
resolveFrom: (0, _projectPath.toProjectPath)(projectRoot, opts.resolveFrom),
range: opts.range,
symbols: opts.symbols && new Map([...opts.symbols].map(([k, v]) => [k, {
local: v.local,
meta: v.meta,
isWeak: v.isWeak,
loc: (0, _utils.toInternalSourceLocation)(projectRoot, v.loc)
}])),
pipeline: opts.pipeline
};
if (opts.packageConditions) {
convertConditions(opts.packageConditions, dep);
}
return dep;
}
function mergeDependencies(a, b) {
let {
meta,
symbols,
needsStableName,
isEntry,
isOptional,
...other
} = b;
Object.assign(a, other);
Object.assign(a.meta, meta);
if (a.symbols && symbols) {
for (let [k, v] of symbols) {
a.symbols.set(k, v);
}
}
if (needsStableName) a.needsStableName = true;
if (isEntry) a.isEntry = true;
if (!isOptional) a.isOptional = false;
}
function convertConditions(conditions, dep) {
// Store common package conditions as bit flags to reduce size.
// Custom conditions are stored as strings.
let packageConditions = 0;
let customConditions = [];
for (let condition of conditions) {
if (_types.ExportsCondition[condition]) {
packageConditions |= _types.ExportsCondition[condition];
} else {
customConditions.push(condition);
}
}
if (packageConditions) {
dep.packageConditions = packageConditions;
}
if (customConditions.length) {
dep.customPackageConditions = customConditions;
}
}

View File

@@ -0,0 +1,128 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.createEnvironment = createEnvironment;
exports.mergeEnvironments = mergeEnvironments;
function _rust() {
const data = require("@parcel/rust");
_rust = function () {
return data;
};
return data;
}
var _utils = require("./utils");
var _Environment = _interopRequireWildcard(require("./public/Environment"));
function _getRequireWildcardCache(e) { if ("function" != typeof WeakMap) return null; var r = new WeakMap(), t = new WeakMap(); return (_getRequireWildcardCache = function (e) { return e ? t : r; })(e); }
function _interopRequireWildcard(e, r) { if (!r && e && e.__esModule) return e; if (null === e || "object" != typeof e && "function" != typeof e) return { default: e }; var t = _getRequireWildcardCache(r); if (t && t.has(e)) return t.get(e); var n = { __proto__: null }, a = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var u in e) if ("default" !== u && Object.prototype.hasOwnProperty.call(e, u)) { var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; } return n.default = e, t && t.set(e, n), n; }
const DEFAULT_ENGINES = {
browsers: ['> 0.25%'],
node: '>= 8.0.0'
};
function createEnvironment({
context,
engines,
includeNodeModules,
outputFormat,
sourceType = 'module',
shouldOptimize = false,
isLibrary = false,
shouldScopeHoist = false,
sourceMap,
loc
} = {
/*::...null*/
}) {
if (context == null) {
var _engines, _engines2;
if ((_engines = engines) !== null && _engines !== void 0 && _engines.node) {
context = 'node';
} else if ((_engines2 = engines) !== null && _engines2 !== void 0 && _engines2.browsers) {
context = 'browser';
} else {
context = 'browser';
}
}
if (engines == null) {
switch (context) {
case 'node':
case 'electron-main':
engines = {
node: DEFAULT_ENGINES.node
};
break;
case 'browser':
case 'web-worker':
case 'service-worker':
case 'electron-renderer':
engines = {
browsers: DEFAULT_ENGINES.browsers
};
break;
default:
engines = {};
}
}
if (includeNodeModules == null) {
switch (context) {
case 'node':
case 'electron-main':
case 'electron-renderer':
includeNodeModules = false;
break;
case 'browser':
case 'web-worker':
case 'service-worker':
default:
includeNodeModules = true;
break;
}
}
if (outputFormat == null) {
switch (context) {
case 'node':
case 'electron-main':
case 'electron-renderer':
outputFormat = 'commonjs';
break;
default:
outputFormat = 'global';
break;
}
}
let res = {
id: '',
context,
engines,
includeNodeModules,
outputFormat,
sourceType,
isLibrary,
shouldOptimize,
shouldScopeHoist,
sourceMap,
loc
};
res.id = getEnvironmentHash(res);
return res;
}
function mergeEnvironments(projectRoot, a, b) {
// If merging the same object, avoid copying.
if (a === b || !b) {
return a;
}
if (b instanceof _Environment.default) {
return (0, _Environment.environmentToInternalEnvironment)(b);
}
// $FlowFixMe - ignore the `id` that is already on a
return createEnvironment({
...a,
...b,
loc: b.loc ? (0, _utils.toInternalSourceLocation)(projectRoot, b.loc) : a.loc
});
}
function getEnvironmentHash(env) {
return (0, _rust().hashString)(JSON.stringify([env.context, env.engines, env.includeNodeModules, env.outputFormat, env.sourceType, env.isLibrary, env.shouldOptimize, env.shouldScopeHoist, env.sourceMap]));
}

View File

@@ -0,0 +1,46 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.createConfig = createConfig;
var _projectPath = require("./projectPath");
var _Environment = require("./Environment");
function _rust() {
const data = require("@parcel/rust");
_rust = function () {
return data;
};
return data;
}
function createConfig({
plugin,
isSource,
searchPath,
env,
result,
invalidateOnFileChange,
invalidateOnFileCreate,
invalidateOnEnvChange,
invalidateOnOptionChange,
devDeps,
invalidateOnStartup,
invalidateOnBuild
}) {
let environment = env !== null && env !== void 0 ? env : (0, _Environment.createEnvironment)();
return {
id: (0, _rust().hashString)(plugin + (0, _projectPath.fromProjectPathRelative)(searchPath) + environment.id + String(isSource)),
isSource: isSource !== null && isSource !== void 0 ? isSource : false,
searchPath,
env: environment,
result: result !== null && result !== void 0 ? result : null,
cacheKey: null,
invalidateOnFileChange: invalidateOnFileChange !== null && invalidateOnFileChange !== void 0 ? invalidateOnFileChange : new Set(),
invalidateOnFileCreate: invalidateOnFileCreate !== null && invalidateOnFileCreate !== void 0 ? invalidateOnFileCreate : [],
invalidateOnEnvChange: invalidateOnEnvChange !== null && invalidateOnEnvChange !== void 0 ? invalidateOnEnvChange : new Set(),
invalidateOnOptionChange: invalidateOnOptionChange !== null && invalidateOnOptionChange !== void 0 ? invalidateOnOptionChange : new Set(),
devDeps: devDeps !== null && devDeps !== void 0 ? devDeps : [],
invalidateOnStartup: invalidateOnStartup !== null && invalidateOnStartup !== void 0 ? invalidateOnStartup : false,
invalidateOnBuild: invalidateOnBuild !== null && invalidateOnBuild !== void 0 ? invalidateOnBuild : false
};
}

View File

@@ -0,0 +1,525 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
function _assert() {
const data = _interopRequireDefault(require("assert"));
_assert = function () {
return data;
};
return data;
}
function _utils() {
const data = require("@parcel/utils");
_utils = function () {
return data;
};
return data;
}
function _logger() {
const data = require("@parcel/logger");
_logger = function () {
return data;
};
return data;
}
function _diagnostic() {
const data = _interopRequireWildcard(require("@parcel/diagnostic"));
_diagnostic = function () {
return data;
};
return data;
}
function _stream() {
const data = require("stream");
_stream = function () {
return data;
};
return data;
}
function _nullthrows() {
const data = _interopRequireDefault(require("nullthrows"));
_nullthrows = function () {
return data;
};
return data;
}
function _path() {
const data = _interopRequireDefault(require("path"));
_path = function () {
return data;
};
return data;
}
function _url() {
const data = _interopRequireDefault(require("url"));
_url = function () {
return data;
};
return data;
}
function _rust() {
const data = require("@parcel/rust");
_rust = function () {
return data;
};
return data;
}
var _Bundle = require("./public/Bundle");
var _BundleGraph = _interopRequireWildcard(require("./public/BundleGraph"));
var _PluginOptions = _interopRequireDefault(require("./public/PluginOptions"));
var _Config = _interopRequireDefault(require("./public/Config"));
var _constants = require("./constants");
var _projectPath = require("./projectPath");
var _InternalConfig = require("./InternalConfig");
var _ConfigRequest = require("./requests/ConfigRequest");
var _DevDepRequest = require("./requests/DevDepRequest");
var _buildCache = require("./buildCache");
var _assetUtils = require("./assetUtils");
var _utils2 = require("./utils");
function _profiler() {
const data = require("@parcel/profiler");
_profiler = function () {
return data;
};
return data;
}
function _getRequireWildcardCache(e) { if ("function" != typeof WeakMap) return null; var r = new WeakMap(), t = new WeakMap(); return (_getRequireWildcardCache = function (e) { return e ? t : r; })(e); }
function _interopRequireWildcard(e, r) { if (!r && e && e.__esModule) return e; if (null === e || "object" != typeof e && "function" != typeof e) return { default: e }; var t = _getRequireWildcardCache(r); if (t && t.has(e)) return t.get(e); var n = { __proto__: null }, a = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var u in e) if ("default" !== u && Object.prototype.hasOwnProperty.call(e, u)) { var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; } return n.default = e, t && t.set(e, n), n; }
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const BOUNDARY_LENGTH = _constants.HASH_REF_PREFIX.length + 32 - 1;
// Packager/optimizer configs are not bundle-specific, so we only need to
// load them once per build.
const pluginConfigs = (0, _buildCache.createBuildCache)();
class PackagerRunner {
constructor({
config,
options,
report,
previousDevDeps,
previousInvalidations
}) {
this.config = config;
this.options = options;
this.report = report;
this.previousDevDeps = previousDevDeps;
this.devDepRequests = new Map();
this.previousInvalidations = previousInvalidations;
this.invalidations = new Map();
this.pluginOptions = new _PluginOptions.default((0, _utils2.optionsProxy)(this.options, option => {
let invalidation = {
type: 'option',
key: option
};
this.invalidations.set((0, _assetUtils.getInvalidationId)(invalidation), invalidation);
}));
}
async run(bundleGraph, bundle, invalidDevDeps) {
var _await$this$getBundle;
(0, _DevDepRequest.invalidateDevDeps)(invalidDevDeps, this.options, this.config);
let {
configs,
bundleConfigs
} = await this.loadConfigs(bundleGraph, bundle);
let bundleInfo = (_await$this$getBundle = await this.getBundleInfoFromCache(bundleGraph, bundle, configs, bundleConfigs)) !== null && _await$this$getBundle !== void 0 ? _await$this$getBundle : await this.getBundleInfo(bundle, bundleGraph, configs, bundleConfigs);
let configRequests = (0, _ConfigRequest.getConfigRequests)([...configs.values(), ...bundleConfigs.values()]);
let devDepRequests = (0, _DevDepRequest.getWorkerDevDepRequests)([...this.devDepRequests.values()]);
return {
bundleInfo,
configRequests,
devDepRequests,
invalidations: [...this.invalidations.values()]
};
}
async loadConfigs(bundleGraph, bundle) {
let configs = new Map();
let bundleConfigs = new Map();
await this.loadConfig(bundleGraph, bundle, configs, bundleConfigs);
for (let inlineBundle of bundleGraph.getInlineBundles(bundle)) {
await this.loadConfig(bundleGraph, inlineBundle, configs, bundleConfigs);
}
return {
configs,
bundleConfigs
};
}
async loadConfig(bundleGraph, bundle, configs, bundleConfigs) {
let name = (0, _nullthrows().default)(bundle.name);
let plugin = await this.config.getPackager(name);
await this.loadPluginConfig(bundleGraph, bundle, plugin, configs, bundleConfigs);
let optimizers = await this.config.getOptimizers(name, bundle.pipeline);
for (let optimizer of optimizers) {
await this.loadPluginConfig(bundleGraph, bundle, optimizer, configs, bundleConfigs);
}
}
async loadPluginConfig(bundleGraph, bundle, plugin, configs, bundleConfigs) {
if (!configs.has(plugin.name)) {
// Only load config for a plugin once per build.
let existing = pluginConfigs.get(plugin.name);
if (existing != null) {
configs.set(plugin.name, existing);
} else {
if (plugin.plugin.loadConfig != null) {
let config = (0, _InternalConfig.createConfig)({
plugin: plugin.name,
searchPath: (0, _projectPath.toProjectPathUnsafe)('index')
});
await (0, _ConfigRequest.loadPluginConfig)(plugin, config, this.options);
for (let devDep of config.devDeps) {
let devDepRequest = await (0, _DevDepRequest.createDevDependency)(devDep, this.previousDevDeps, this.options);
let key = `${devDep.specifier}:${(0, _projectPath.fromProjectPath)(this.options.projectRoot, devDep.resolveFrom)}`;
this.devDepRequests.set(key, devDepRequest);
}
pluginConfigs.set(plugin.name, config);
configs.set(plugin.name, config);
}
}
}
let loadBundleConfig = plugin.plugin.loadBundleConfig;
if (!bundleConfigs.has(plugin.name) && loadBundleConfig != null) {
var _bundle$name;
let config = (0, _InternalConfig.createConfig)({
plugin: plugin.name,
searchPath: (0, _projectPath.joinProjectPath)(bundle.target.distDir, (_bundle$name = bundle.name) !== null && _bundle$name !== void 0 ? _bundle$name : bundle.id)
});
config.result = await loadBundleConfig({
bundle: _Bundle.NamedBundle.get(bundle, bundleGraph, this.options),
bundleGraph: new _BundleGraph.default(bundleGraph, _Bundle.NamedBundle.get.bind(_Bundle.NamedBundle), this.options),
config: new _Config.default(config, this.options),
options: new _PluginOptions.default(this.options),
logger: new (_logger().PluginLogger)({
origin: plugin.name
}),
tracer: new (_profiler().PluginTracer)({
origin: plugin.name,
category: 'loadConfig'
})
});
bundleConfigs.set(plugin.name, config);
}
}
async getBundleInfoFromCache(bundleGraph, bundle, configs, bundleConfigs) {
if (this.options.shouldDisableCache) {
return;
}
let cacheKey = await this.getCacheKey(bundle, bundleGraph, configs, bundleConfigs, this.previousInvalidations);
let infoKey = PackagerRunner.getInfoKey(cacheKey);
return this.options.cache.get(infoKey);
}
async getBundleInfo(bundle, bundleGraph, configs, bundleConfigs) {
let {
type,
contents,
map
} = await this.getBundleResult(bundle, bundleGraph, configs, bundleConfigs);
// Recompute cache keys as they may have changed due to dev dependencies.
let cacheKey = await this.getCacheKey(bundle, bundleGraph, configs, bundleConfigs, [...this.invalidations.values()]);
let cacheKeys = {
content: PackagerRunner.getContentKey(cacheKey),
map: PackagerRunner.getMapKey(cacheKey),
info: PackagerRunner.getInfoKey(cacheKey)
};
return this.writeToCache(cacheKeys, type, contents, map);
}
async getBundleResult(bundle, bundleGraph, configs, bundleConfigs) {
var _packaged$type, _res$type;
let packaged = await this.package(bundle, bundleGraph, configs, bundleConfigs);
let type = (_packaged$type = packaged.type) !== null && _packaged$type !== void 0 ? _packaged$type : bundle.type;
let res = await this.optimize(bundle, bundleGraph, type, packaged.contents, packaged.map, configs, bundleConfigs);
let map = res.map != null ? await this.generateSourceMap(bundle, res.map) : null;
return {
type: (_res$type = res.type) !== null && _res$type !== void 0 ? _res$type : type,
contents: res.contents,
map
};
}
getSourceMapReference(bundle, map) {
if (map && bundle.env.sourceMap && bundle.bundleBehavior !== 'inline') {
if (bundle.env.sourceMap && bundle.env.sourceMap.inline) {
return this.generateSourceMap((0, _Bundle.bundleToInternalBundle)(bundle), map);
} else {
return _path().default.basename(bundle.name) + '.map';
}
} else {
return null;
}
}
async package(internalBundle, bundleGraph, configs, bundleConfigs) {
let bundle = _Bundle.NamedBundle.get(internalBundle, bundleGraph, this.options);
this.report({
type: 'buildProgress',
phase: 'packaging',
bundle
});
let packager = await this.config.getPackager(bundle.name);
let {
name,
resolveFrom,
plugin
} = packager;
let measurement;
try {
var _configs$get, _bundleConfigs$get;
measurement = _profiler().tracer.createMeasurement(name, 'packaging', bundle.name, {
type: bundle.type
});
return await plugin.package({
config: (_configs$get = configs.get(name)) === null || _configs$get === void 0 ? void 0 : _configs$get.result,
bundleConfig: (_bundleConfigs$get = bundleConfigs.get(name)) === null || _bundleConfigs$get === void 0 ? void 0 : _bundleConfigs$get.result,
bundle,
bundleGraph: new _BundleGraph.default(bundleGraph, _Bundle.NamedBundle.get.bind(_Bundle.NamedBundle), this.options),
getSourceMapReference: map => {
return this.getSourceMapReference(bundle, map);
},
options: this.pluginOptions,
logger: new (_logger().PluginLogger)({
origin: name
}),
tracer: new (_profiler().PluginTracer)({
origin: name,
category: 'package'
}),
getInlineBundleContents: async (bundle, bundleGraph) => {
if (bundle.bundleBehavior !== 'inline') {
throw new Error('Bundle is not inline and unable to retrieve contents');
}
let res = await this.getBundleResult((0, _Bundle.bundleToInternalBundle)(bundle),
// $FlowFixMe
(0, _BundleGraph.bundleGraphToInternalBundleGraph)(bundleGraph), configs, bundleConfigs);
return {
contents: res.contents
};
}
});
} catch (e) {
throw new (_diagnostic().default)({
diagnostic: (0, _diagnostic().errorToDiagnostic)(e, {
origin: name,
filePath: _path().default.join(bundle.target.distDir, bundle.name)
})
});
} finally {
measurement && measurement.end();
// Add dev dependency for the packager. This must be done AFTER running it due to
// the potential for lazy require() that aren't executed until the request runs.
let devDepRequest = await (0, _DevDepRequest.createDevDependency)({
specifier: name,
resolveFrom
}, this.previousDevDeps, this.options);
this.devDepRequests.set(`${name}:${(0, _projectPath.fromProjectPathRelative)(resolveFrom)}`, devDepRequest);
}
}
async optimize(internalBundle, internalBundleGraph, type, contents, map, configs, bundleConfigs) {
let bundle = _Bundle.NamedBundle.get(internalBundle, internalBundleGraph, this.options);
let bundleGraph = new _BundleGraph.default(internalBundleGraph, _Bundle.NamedBundle.get.bind(_Bundle.NamedBundle), this.options);
let optimizers = await this.config.getOptimizers(bundle.name, internalBundle.pipeline);
if (!optimizers.length) {
return {
type: bundle.type,
contents,
map
};
}
this.report({
type: 'buildProgress',
phase: 'optimizing',
bundle
});
let optimized = {
type,
contents,
map
};
for (let optimizer of optimizers) {
let measurement;
try {
var _configs$get2, _bundleConfigs$get2, _next$type;
measurement = _profiler().tracer.createMeasurement(optimizer.name, 'optimize', bundle.name);
let next = await optimizer.plugin.optimize({
config: (_configs$get2 = configs.get(optimizer.name)) === null || _configs$get2 === void 0 ? void 0 : _configs$get2.result,
bundleConfig: (_bundleConfigs$get2 = bundleConfigs.get(optimizer.name)) === null || _bundleConfigs$get2 === void 0 ? void 0 : _bundleConfigs$get2.result,
bundle,
bundleGraph,
contents: optimized.contents,
map: optimized.map,
getSourceMapReference: map => {
return this.getSourceMapReference(bundle, map);
},
options: this.pluginOptions,
logger: new (_logger().PluginLogger)({
origin: optimizer.name
}),
tracer: new (_profiler().PluginTracer)({
origin: optimizer.name,
category: 'optimize'
})
});
optimized.type = (_next$type = next.type) !== null && _next$type !== void 0 ? _next$type : optimized.type;
optimized.contents = next.contents;
optimized.map = next.map;
} catch (e) {
throw new (_diagnostic().default)({
diagnostic: (0, _diagnostic().errorToDiagnostic)(e, {
origin: optimizer.name,
filePath: _path().default.join(bundle.target.distDir, bundle.name)
})
});
} finally {
measurement && measurement.end();
// Add dev dependency for the optimizer. This must be done AFTER running it due to
// the potential for lazy require() that aren't executed until the request runs.
let devDepRequest = await (0, _DevDepRequest.createDevDependency)({
specifier: optimizer.name,
resolveFrom: optimizer.resolveFrom
}, this.previousDevDeps, this.options);
this.devDepRequests.set(`${optimizer.name}:${(0, _projectPath.fromProjectPathRelative)(optimizer.resolveFrom)}`, devDepRequest);
}
}
return optimized;
}
async generateSourceMap(bundle, map) {
// sourceRoot should be a relative path between outDir and rootDir for node.js targets
let filePath = (0, _projectPath.joinProjectPath)(bundle.target.distDir, (0, _nullthrows().default)(bundle.name));
let fullPath = (0, _projectPath.fromProjectPath)(this.options.projectRoot, filePath);
let sourceRoot = _path().default.relative(_path().default.dirname(fullPath), this.options.projectRoot);
let inlineSources = false;
if (bundle.target) {
if (bundle.env.sourceMap && bundle.env.sourceMap.sourceRoot !== undefined) {
sourceRoot = bundle.env.sourceMap.sourceRoot;
} else if (this.options.serveOptions && bundle.target.env.context === 'browser') {
sourceRoot = '/__parcel_source_root';
}
if (bundle.env.sourceMap && bundle.env.sourceMap.inlineSources !== undefined) {
inlineSources = bundle.env.sourceMap.inlineSources;
} else if (bundle.target.env.context !== 'node') {
// inlining should only happen in production for browser targets by default
inlineSources = this.options.mode === 'production';
}
}
let isInlineMap = bundle.env.sourceMap && bundle.env.sourceMap.inline;
let stringified = await map.stringify({
file: _path().default.basename(fullPath + '.map'),
// $FlowFixMe
fs: this.options.inputFS,
rootDir: this.options.projectRoot,
sourceRoot: !inlineSources ? _url().default.format(_url().default.parse(sourceRoot + '/')) : undefined,
inlineSources,
format: isInlineMap ? 'inline' : 'string'
});
(0, _assert().default)(typeof stringified === 'string');
return stringified;
}
async getCacheKey(bundle, bundleGraph, configs, bundleConfigs, invalidations) {
let configResults = {};
for (let [pluginName, config] of configs) {
if (config) {
configResults[pluginName] = await (0, _ConfigRequest.getConfigHash)(config, pluginName, this.options);
}
}
let globalInfoResults = {};
for (let [pluginName, config] of bundleConfigs) {
if (config) {
globalInfoResults[pluginName] = await (0, _ConfigRequest.getConfigHash)(config, pluginName, this.options);
}
}
let devDepHashes = await this.getDevDepHashes(bundle);
for (let inlineBundle of bundleGraph.getInlineBundles(bundle)) {
devDepHashes += await this.getDevDepHashes(inlineBundle);
}
let invalidationHash = await (0, _assetUtils.getInvalidationHash)(invalidations, this.options);
return (0, _rust().hashString)(_constants.PARCEL_VERSION + devDepHashes + invalidationHash + bundle.target.publicUrl + bundleGraph.getHash(bundle) + JSON.stringify(configResults) + JSON.stringify(globalInfoResults) + this.options.mode + (this.options.shouldBuildLazily ? 'lazy' : 'eager'));
}
async getDevDepHashes(bundle) {
var _ref, _this$devDepRequests$, _this$devDepRequests$2;
let name = (0, _nullthrows().default)(bundle.name);
let packager = await this.config.getPackager(name);
let optimizers = await this.config.getOptimizers(name);
let key = `${packager.name}:${(0, _projectPath.fromProjectPathRelative)(packager.resolveFrom)}`;
let devDepHashes = (_ref = (_this$devDepRequests$ = (_this$devDepRequests$2 = this.devDepRequests.get(key)) === null || _this$devDepRequests$2 === void 0 ? void 0 : _this$devDepRequests$2.hash) !== null && _this$devDepRequests$ !== void 0 ? _this$devDepRequests$ : this.previousDevDeps.get(key)) !== null && _ref !== void 0 ? _ref : '';
for (let {
name,
resolveFrom
} of optimizers) {
var _ref2, _this$devDepRequests$3, _this$devDepRequests$4;
let key = `${name}:${(0, _projectPath.fromProjectPathRelative)(resolveFrom)}`;
devDepHashes += (_ref2 = (_this$devDepRequests$3 = (_this$devDepRequests$4 = this.devDepRequests.get(key)) === null || _this$devDepRequests$4 === void 0 ? void 0 : _this$devDepRequests$4.hash) !== null && _this$devDepRequests$3 !== void 0 ? _this$devDepRequests$3 : this.previousDevDeps.get(key)) !== null && _ref2 !== void 0 ? _ref2 : '';
}
return devDepHashes;
}
async readFromCache(cacheKey) {
let contentKey = PackagerRunner.getContentKey(cacheKey);
let mapKey = PackagerRunner.getMapKey(cacheKey);
let isLargeBlob = await this.options.cache.hasLargeBlob(contentKey);
let contentExists = isLargeBlob || (await this.options.cache.has(contentKey));
if (!contentExists) {
return null;
}
let mapExists = await this.options.cache.has(mapKey);
return {
contents: isLargeBlob ? this.options.cache.getStream(contentKey) : (0, _utils().blobToStream)(await this.options.cache.getBlob(contentKey)),
map: mapExists ? (0, _utils().blobToStream)(await this.options.cache.getBlob(mapKey)) : null
};
}
async writeToCache(cacheKeys, type, contents, map) {
let size = 0;
let hash;
let hashReferences = [];
let isLargeBlob = false;
// TODO: don't replace hash references in binary files??
if (contents instanceof _stream().Readable) {
isLargeBlob = true;
let boundaryStr = '';
let h = new (_rust().Hash)();
await this.options.cache.setStream(cacheKeys.content, (0, _utils().blobToStream)(contents).pipe(new (_utils().TapStream)(buf => {
var _str$match;
let str = boundaryStr + buf.toString();
hashReferences = hashReferences.concat((_str$match = str.match(_constants.HASH_REF_REGEX)) !== null && _str$match !== void 0 ? _str$match : []);
size += buf.length;
h.writeBuffer(buf);
boundaryStr = str.slice(str.length - BOUNDARY_LENGTH);
})));
hash = h.finish();
} else if (typeof contents === 'string') {
var _contents$match;
let buffer = Buffer.from(contents);
size = buffer.byteLength;
hash = (0, _rust().hashBuffer)(buffer);
hashReferences = (_contents$match = contents.match(_constants.HASH_REF_REGEX)) !== null && _contents$match !== void 0 ? _contents$match : [];
await this.options.cache.setBlob(cacheKeys.content, buffer);
} else {
var _contents$toString$ma;
size = contents.length;
hash = (0, _rust().hashBuffer)(contents);
hashReferences = (_contents$toString$ma = contents.toString().match(_constants.HASH_REF_REGEX)) !== null && _contents$toString$ma !== void 0 ? _contents$toString$ma : [];
await this.options.cache.setBlob(cacheKeys.content, contents);
}
if (map != null) {
await this.options.cache.setBlob(cacheKeys.map, map);
}
let info = {
type,
size,
hash,
hashReferences,
cacheKeys,
isLargeBlob
};
await this.options.cache.set(cacheKeys.info, info);
return info;
}
static getContentKey(cacheKey) {
return (0, _rust().hashString)(`${cacheKey}:content`);
}
static getMapKey(cacheKey) {
return (0, _rust().hashString)(`${cacheKey}:map`);
}
static getInfoKey(cacheKey) {
return (0, _rust().hashString)(`${cacheKey}:info`);
}
}
exports.default = PackagerRunner;

View File

@@ -0,0 +1,541 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.INTERNAL_TRANSFORM = exports.INTERNAL_RESOLVE = exports.BuildError = void 0;
exports.createWorkerFarm = createWorkerFarm;
exports.default = void 0;
function _path() {
const data = _interopRequireDefault(require("path"));
_path = function () {
return data;
};
return data;
}
function _assert() {
const data = _interopRequireDefault(require("assert"));
_assert = function () {
return data;
};
return data;
}
function _diagnostic() {
const data = _interopRequireWildcard(require("@parcel/diagnostic"));
_diagnostic = function () {
return data;
};
return data;
}
var _Asset = require("./public/Asset");
var _Bundle = require("./public/Bundle");
var _BundleGraph = _interopRequireDefault(require("./public/BundleGraph"));
function _workers() {
const data = _interopRequireDefault(require("@parcel/workers"));
_workers = function () {
return data;
};
return data;
}
function _nullthrows() {
const data = _interopRequireDefault(require("nullthrows"));
_nullthrows = function () {
return data;
};
return data;
}
var _utils = require("./utils");
var _ParcelConfigRequest = require("./requests/ParcelConfigRequest");
var _ReporterRunner = _interopRequireDefault(require("./ReporterRunner"));
var _dumpGraphToGraphViz = _interopRequireDefault(require("./dumpGraphToGraphViz"));
var _resolveOptions = _interopRequireDefault(require("./resolveOptions"));
function _events() {
const data = require("@parcel/events");
_events = function () {
return data;
};
return data;
}
var _registerCoreWithSerializer = require("./registerCoreWithSerializer");
function _cjsPonyfill() {
const data = require("abortcontroller-polyfill/dist/cjs-ponyfill");
_cjsPonyfill = function () {
return data;
};
return data;
}
function _utils2() {
const data = require("@parcel/utils");
_utils2 = function () {
return data;
};
return data;
}
var _ParcelConfig = _interopRequireDefault(require("./ParcelConfig"));
function _logger() {
const data = _interopRequireDefault(require("@parcel/logger"));
_logger = function () {
return data;
};
return data;
}
var _RequestTracker = _interopRequireWildcard(require("./RequestTracker"));
var _ValidationRequest = _interopRequireDefault(require("./requests/ValidationRequest"));
var _ParcelBuildRequest = _interopRequireDefault(require("./requests/ParcelBuildRequest"));
var _AssetRequest = _interopRequireDefault(require("./requests/AssetRequest"));
var _PathRequest = _interopRequireDefault(require("./requests/PathRequest"));
var _Environment = require("./Environment");
var _Dependency = require("./Dependency");
function _sourceMap() {
const data = require("@parcel/source-map");
_sourceMap = function () {
return data;
};
return data;
}
function _rust() {
const data = require("@parcel/rust");
_rust = function () {
return data;
};
return data;
}
var _projectPath = require("./projectPath");
function _profiler() {
const data = require("@parcel/profiler");
_profiler = function () {
return data;
};
return data;
}
function _getRequireWildcardCache(e) { if ("function" != typeof WeakMap) return null; var r = new WeakMap(), t = new WeakMap(); return (_getRequireWildcardCache = function (e) { return e ? t : r; })(e); }
function _interopRequireWildcard(e, r) { if (!r && e && e.__esModule) return e; if (null === e || "object" != typeof e && "function" != typeof e) return { default: e }; var t = _getRequireWildcardCache(r); if (t && t.has(e)) return t.get(e); var n = { __proto__: null }, a = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var u in e) if ("default" !== u && Object.prototype.hasOwnProperty.call(e, u)) { var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; } return n.default = e, t && t.set(e, n), n; }
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
// eslint-disable-next-line no-unused-vars
(0, _registerCoreWithSerializer.registerCoreWithSerializer)();
const INTERNAL_TRANSFORM = exports.INTERNAL_TRANSFORM = Symbol('internal_transform');
const INTERNAL_RESOLVE = exports.INTERNAL_RESOLVE = Symbol('internal_resolve');
class Parcel {
#requestTracker /*: RequestTracker*/;
#config /*: ParcelConfig*/;
#farm /*: WorkerFarm*/;
#initialized /*: boolean*/ = false;
#disposable /*: Disposable */;
#initialOptions /*: InitialParcelOptions*/;
#reporterRunner /*: ReporterRunner*/;
#resolvedOptions /*: ?ParcelOptions*/ = null;
#optionsRef /*: SharedReference */;
#watchAbortController /*: AbortController*/;
#watchQueue /*: PromiseQueue<?BuildEvent>*/ = new (_utils2().PromiseQueue)({
maxConcurrent: 1
});
#watchEvents /*: ValueEmitter<
| {|
+error: Error,
+buildEvent?: void,
|}
| {|
+buildEvent: BuildEvent,
+error?: void,
|},
> */;
#watcherSubscription /*: ?AsyncSubscription*/;
#watcherCount /*: number*/ = 0;
#requestedAssetIds /*: Set<string>*/ = new Set();
constructor(options) {
this.#initialOptions = options;
}
async _init() {
if (this.#initialized) {
return;
}
await _sourceMap().init;
await (_rust().init === null || _rust().init === void 0 ? void 0 : (0, _rust().init)());
let resolvedOptions = await (0, _resolveOptions.default)(this.#initialOptions);
this.#resolvedOptions = resolvedOptions;
let {
config
} = await (0, _ParcelConfigRequest.loadParcelConfig)(resolvedOptions);
this.#config = new _ParcelConfig.default(config, resolvedOptions);
if (this.#initialOptions.workerFarm) {
if (this.#initialOptions.workerFarm.ending) {
throw new Error('Supplied WorkerFarm is ending');
}
this.#farm = this.#initialOptions.workerFarm;
} else {
this.#farm = createWorkerFarm({
shouldPatchConsole: resolvedOptions.shouldPatchConsole,
shouldTrace: resolvedOptions.shouldTrace
});
}
await resolvedOptions.cache.ensure();
let {
dispose: disposeOptions,
ref: optionsRef
} = await this.#farm.createSharedReference(resolvedOptions, false);
this.#optionsRef = optionsRef;
this.#disposable = new (_events().Disposable)();
if (this.#initialOptions.workerFarm) {
// If we don't own the farm, dispose of only these references when
// Parcel ends.
this.#disposable.add(disposeOptions);
} else {
// Otherwise, when shutting down, end the entire farm we created.
this.#disposable.add(() => this.#farm.end());
}
this.#watchEvents = new (_events().ValueEmitter)();
this.#disposable.add(() => this.#watchEvents.dispose());
this.#requestTracker = await _RequestTracker.default.init({
farm: this.#farm,
options: resolvedOptions
});
this.#reporterRunner = new _ReporterRunner.default({
config: this.#config,
options: resolvedOptions,
workerFarm: this.#farm
});
this.#disposable.add(this.#reporterRunner);
this.#initialized = true;
}
async run() {
let startTime = Date.now();
if (!this.#initialized) {
await this._init();
}
let result = await this._build({
startTime
});
await this.#requestTracker.writeToCache();
await this._end();
if (result.type === 'buildFailure') {
throw new BuildError(result.diagnostics);
}
return result;
}
async _end() {
this.#initialized = false;
await this.#disposable.dispose();
}
async writeRequestTrackerToCache() {
if (this.#watchQueue.getNumWaiting() === 0) {
// If there's no queued events, we are safe to write the request graph to disk
const abortController = new (_cjsPonyfill().AbortController)();
const unsubscribe = this.#watchQueue.subscribeToAdd(() => {
abortController.abort();
});
try {
await this.#requestTracker.writeToCache(abortController.signal);
} catch (err) {
if (!abortController.signal.aborted) {
// We expect abort errors if we interrupt the cache write
throw err;
}
}
unsubscribe();
}
}
async _startNextBuild() {
this.#watchAbortController = new (_cjsPonyfill().AbortController)();
await this.#farm.callAllWorkers('clearConfigCache', []);
try {
let buildEvent = await this._build({
signal: this.#watchAbortController.signal
});
this.#watchEvents.emit({
buildEvent
});
return buildEvent;
} catch (err) {
// Ignore BuildAbortErrors and only emit critical errors.
if (!(err instanceof _utils.BuildAbortError)) {
throw err;
}
} finally {
// If the build passes or fails, we want to cache the request graph
await this.writeRequestTrackerToCache();
}
}
async watch(cb) {
if (!this.#initialized) {
await this._init();
}
let watchEventsDisposable;
if (cb) {
watchEventsDisposable = this.#watchEvents.addListener(({
error,
buildEvent
}) => cb(error, buildEvent));
}
if (this.#watcherCount === 0) {
this.#watcherSubscription = await this._getWatcherSubscription();
await this.#reporterRunner.report({
type: 'watchStart'
});
// Kick off a first build, but don't await its results. Its results will
// be provided to the callback.
this.#watchQueue.add(() => this._startNextBuild());
this.#watchQueue.run();
}
this.#watcherCount++;
let unsubscribePromise;
const unsubscribe = async () => {
if (watchEventsDisposable) {
watchEventsDisposable.dispose();
}
this.#watcherCount--;
if (this.#watcherCount === 0) {
await (0, _nullthrows().default)(this.#watcherSubscription).unsubscribe();
this.#watcherSubscription = null;
await this.#reporterRunner.report({
type: 'watchEnd'
});
this.#watchAbortController.abort();
await this.#watchQueue.run();
await this._end();
}
};
return {
unsubscribe() {
if (unsubscribePromise == null) {
unsubscribePromise = unsubscribe();
}
return unsubscribePromise;
}
};
}
async _build({
signal,
startTime = Date.now()
} = {
/*::...null*/
}) {
this.#requestTracker.setSignal(signal);
let options = (0, _nullthrows().default)(this.#resolvedOptions);
try {
if (options.shouldProfile) {
await this.startProfiling();
}
if (options.shouldTrace) {
_profiler().tracer.enable();
}
this.#reporterRunner.report({
type: 'buildStart'
});
this.#requestTracker.graph.invalidateOnBuildNodes();
let request = (0, _ParcelBuildRequest.default)({
optionsRef: this.#optionsRef,
requestedAssetIds: this.#requestedAssetIds,
signal
});
let {
bundleGraph,
bundleInfo,
changedAssets,
assetRequests
} = await this.#requestTracker.runRequest(request, {
force: true
});
this.#requestedAssetIds.clear();
await (0, _dumpGraphToGraphViz.default)(
// $FlowFixMe
this.#requestTracker.graph, 'RequestGraph', _RequestTracker.requestGraphEdgeTypes);
let event = {
type: 'buildSuccess',
changedAssets: new Map(Array.from(changedAssets).map(([id, asset]) => [id, (0, _Asset.assetFromValue)(asset, options)])),
bundleGraph: new _BundleGraph.default(bundleGraph, (bundle, bundleGraph, options) => _Bundle.PackagedBundle.getWithInfo(bundle, bundleGraph, options, bundleInfo.get(bundle.id)), options),
buildTime: Date.now() - startTime,
requestBundle: async bundle => {
let bundleNode = bundleGraph._graph.getNodeByContentKey(bundle.id);
(0, _assert().default)((bundleNode === null || bundleNode === void 0 ? void 0 : bundleNode.type) === 'bundle', 'Bundle does not exist');
if (!bundleNode.value.isPlaceholder) {
// Nothing to do.
return {
type: 'buildSuccess',
changedAssets: new Map(),
bundleGraph: event.bundleGraph,
buildTime: 0,
requestBundle: event.requestBundle
};
}
for (let assetId of bundleNode.value.entryAssetIds) {
this.#requestedAssetIds.add(assetId);
}
if (this.#watchQueue.getNumWaiting() === 0) {
if (this.#watchAbortController) {
this.#watchAbortController.abort();
}
this.#watchQueue.add(() => this._startNextBuild());
}
let results = await this.#watchQueue.run();
let result = results.filter(Boolean).pop();
if (result.type === 'buildFailure') {
throw new BuildError(result.diagnostics);
}
return result;
}
};
await this.#reporterRunner.report(event);
await this.#requestTracker.runRequest((0, _ValidationRequest.default)({
optionsRef: this.#optionsRef,
assetRequests
}), {
force: assetRequests.length > 0
});
return event;
} catch (e) {
if (e instanceof _utils.BuildAbortError) {
throw e;
}
let diagnostic = (0, _diagnostic().anyToDiagnostic)(e);
let event = {
type: 'buildFailure',
diagnostics: Array.isArray(diagnostic) ? diagnostic : [diagnostic]
};
await this.#reporterRunner.report(event);
return event;
} finally {
if (this.isProfiling) {
await this.stopProfiling();
}
await this.#farm.callAllWorkers('clearConfigCache', []);
}
}
async _getWatcherSubscription() {
(0, _assert().default)(this.#watcherSubscription == null);
let resolvedOptions = (0, _nullthrows().default)(this.#resolvedOptions);
let opts = (0, _RequestTracker.getWatcherOptions)(resolvedOptions);
let sub = await resolvedOptions.inputFS.watch(resolvedOptions.watchDir, (err, events) => {
if (err) {
this.#watchEvents.emit({
error: err
});
return;
}
let isInvalid = this.#requestTracker.respondToFSEvents(events.map(e => ({
type: e.type,
path: (0, _projectPath.toProjectPath)(resolvedOptions.projectRoot, e.path)
})));
if (isInvalid && this.#watchQueue.getNumWaiting() === 0) {
if (this.#watchAbortController) {
this.#watchAbortController.abort();
}
this.#watchQueue.add(() => this._startNextBuild());
this.#watchQueue.run();
}
}, opts);
return {
unsubscribe: () => sub.unsubscribe()
};
}
// This is mainly for integration tests and it not public api!
_getResolvedParcelOptions() {
return (0, _nullthrows().default)(this.#resolvedOptions, 'Resolved options is null, please let parcel initialize before accessing this.');
}
async startProfiling() {
if (this.isProfiling) {
throw new Error('Parcel is already profiling');
}
_logger().default.info({
origin: '@parcel/core',
message: 'Starting profiling...'
});
this.isProfiling = true;
await this.#farm.startProfile();
}
stopProfiling() {
if (!this.isProfiling) {
throw new Error('Parcel is not profiling');
}
_logger().default.info({
origin: '@parcel/core',
message: 'Stopping profiling...'
});
this.isProfiling = false;
return this.#farm.endProfile();
}
takeHeapSnapshot() {
_logger().default.info({
origin: '@parcel/core',
message: 'Taking heap snapshot...'
});
return this.#farm.takeHeapSnapshot();
}
async unstable_transform(options) {
var _options$env;
if (!this.#initialized) {
await this._init();
}
let projectRoot = (0, _nullthrows().default)(this.#resolvedOptions).projectRoot;
let request = (0, _AssetRequest.default)({
...options,
filePath: (0, _projectPath.toProjectPath)(projectRoot, options.filePath),
optionsRef: this.#optionsRef,
env: (0, _Environment.createEnvironment)({
...options.env,
loc: ((_options$env = options.env) === null || _options$env === void 0 ? void 0 : _options$env.loc) != null ? {
...options.env.loc,
filePath: (0, _projectPath.toProjectPath)(projectRoot, options.env.loc.filePath)
} : undefined
})
});
let res = await this.#requestTracker.runRequest(request, {
force: true
});
return res.map(asset => (0, _Asset.assetFromValue)(asset, (0, _nullthrows().default)(this.#resolvedOptions)));
}
async unstable_resolve(request) {
var _request$env;
if (!this.#initialized) {
await this._init();
}
let projectRoot = (0, _nullthrows().default)(this.#resolvedOptions).projectRoot;
if (request.resolveFrom == null && _path().default.isAbsolute(request.specifier)) {
request.specifier = (0, _projectPath.fromProjectPathRelative)((0, _projectPath.toProjectPath)(projectRoot, request.specifier));
}
let dependency = (0, _Dependency.createDependency)(projectRoot, {
...request,
env: (0, _Environment.createEnvironment)({
...request.env,
loc: ((_request$env = request.env) === null || _request$env === void 0 ? void 0 : _request$env.loc) != null ? {
...request.env.loc,
filePath: (0, _projectPath.toProjectPath)(projectRoot, request.env.loc.filePath)
} : undefined
})
});
let req = (0, _PathRequest.default)({
dependency,
name: request.specifier
});
let res = await this.#requestTracker.runRequest(req, {
force: true
});
if (!res) {
return null;
}
return {
filePath: (0, _projectPath.fromProjectPath)(projectRoot, res.filePath),
code: res.code,
query: res.query,
sideEffects: res.sideEffects
};
}
}
exports.default = Parcel;
class BuildError extends _diagnostic().default {
constructor(diagnostic) {
super({
diagnostic
});
this.name = 'BuildError';
}
}
exports.BuildError = BuildError;
function createWorkerFarm(options = {}) {
return new (_workers().default)({
...options,
// $FlowFixMe
workerPath: process.browser ? '@parcel/core/src/worker.js' : require.resolve('./worker')
});
}

View File

@@ -0,0 +1,300 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
function _diagnostic() {
const data = _interopRequireWildcard(require("@parcel/diagnostic"));
_diagnostic = function () {
return data;
};
return data;
}
function _json() {
const data = _interopRequireDefault(require("json5"));
_json = function () {
return data;
};
return data;
}
function _utils() {
const data = require("@parcel/utils");
_utils = function () {
return data;
};
return data;
}
function _path() {
const data = require("path");
_path = function () {
return data;
};
return data;
}
var _loadParcelPlugin = _interopRequireDefault(require("./loadParcelPlugin"));
var _projectPath = require("./projectPath");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _getRequireWildcardCache(e) { if ("function" != typeof WeakMap) return null; var r = new WeakMap(), t = new WeakMap(); return (_getRequireWildcardCache = function (e) { return e ? t : r; })(e); }
function _interopRequireWildcard(e, r) { if (!r && e && e.__esModule) return e; if (null === e || "object" != typeof e && "function" != typeof e) return { default: e }; var t = _getRequireWildcardCache(r); if (t && t.has(e)) return t.get(e); var n = { __proto__: null }, a = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var u in e) if ("default" !== u && Object.prototype.hasOwnProperty.call(e, u)) { var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; } return n.default = e, t && t.set(e, n), n; }
class ParcelConfig {
constructor(config, options) {
this.options = options;
this.filePath = config.filePath;
this.resolvers = config.resolvers || [];
this.transformers = config.transformers || {};
this.runtimes = config.runtimes || [];
this.bundler = config.bundler;
this.namers = config.namers || [];
this.packagers = config.packagers || {};
this.optimizers = config.optimizers || {};
this.compressors = config.compressors || {};
this.reporters = config.reporters || [];
this.validators = config.validators || {};
this.pluginCache = new Map();
this.regexCache = new Map();
}
static deserialize(serialized) {
return new ParcelConfig(serialized.config, serialized.options);
}
getConfig() {
return {
filePath: this.filePath,
resolvers: this.resolvers,
transformers: this.transformers,
validators: this.validators,
runtimes: this.runtimes,
bundler: this.bundler,
namers: this.namers,
packagers: this.packagers,
optimizers: this.optimizers,
compressors: this.compressors,
reporters: this.reporters
};
}
serialize() {
return {
$$raw: false,
config: this.getConfig(),
options: this.options
};
}
_loadPlugin(node) {
let plugin = this.pluginCache.get(node.packageName);
if (plugin) {
return plugin;
}
plugin = (0, _loadParcelPlugin.default)(node.packageName, (0, _projectPath.fromProjectPath)(this.options.projectRoot, node.resolveFrom), node.keyPath, this.options);
this.pluginCache.set(node.packageName, plugin);
return plugin;
}
async loadPlugin(node) {
let plugin = await this._loadPlugin(node);
return {
...plugin,
name: node.packageName,
keyPath: node.keyPath
};
}
invalidatePlugin(packageName) {
this.pluginCache.delete(packageName);
}
loadPlugins(plugins) {
return Promise.all(plugins.map(p => this.loadPlugin(p)));
}
async getResolvers() {
if (this.resolvers.length === 0) {
throw await this.missingPluginError(this.resolvers, 'No resolver plugins specified in .parcelrc config', '/resolvers');
}
return this.loadPlugins(this.resolvers);
}
_getValidatorNodes(filePath) {
let validators = this.matchGlobMapPipelines(filePath, this.validators) || [];
return validators;
}
getValidatorNames(filePath) {
let validators = this._getValidatorNodes(filePath);
return validators.map(v => v.packageName);
}
getValidators(filePath) {
let validators = this._getValidatorNodes(filePath);
return this.loadPlugins(validators);
}
getNamedPipelines() {
return Object.keys(this.transformers).filter(glob => glob.includes(':')).map(glob => glob.split(':')[0]);
}
async getTransformers(filePath, pipeline, allowEmpty) {
let transformers = this.matchGlobMapPipelines(filePath, this.transformers, pipeline);
if (!transformers || transformers.length === 0) {
if (allowEmpty) {
return [];
}
throw await this.missingPluginError(this.transformers, (0, _diagnostic().md)`No transformers found for __${(0, _projectPath.fromProjectPathRelative)(filePath)}__` + (pipeline != null ? ` with pipeline: '${pipeline}'` : '') + '.', '/transformers');
}
return this.loadPlugins(transformers);
}
async getBundler() {
if (!this.bundler) {
throw await this.missingPluginError([], 'No bundler specified in .parcelrc config', '/bundler');
}
return this.loadPlugin(this.bundler);
}
async getNamers() {
if (this.namers.length === 0) {
throw await this.missingPluginError(this.namers, 'No namer plugins specified in .parcelrc config', '/namers');
}
return this.loadPlugins(this.namers);
}
getRuntimes() {
if (!this.runtimes) {
return Promise.resolve([]);
}
return this.loadPlugins(this.runtimes);
}
async getPackager(filePath) {
let packager = this.matchGlobMap((0, _projectPath.toProjectPathUnsafe)(filePath), this.packagers);
if (!packager) {
throw await this.missingPluginError(this.packagers, (0, _diagnostic().md)`No packager found for __${filePath}__.`, '/packagers');
}
return this.loadPlugin(packager);
}
_getOptimizerNodes(filePath, pipeline) {
var _this$matchGlobMapPip;
// If a pipeline is specified, but it doesn't exist in the optimizers config, ignore it.
// Pipelines for bundles come from their entry assets, so the pipeline likely exists in transformers.
if (pipeline) {
let prefix = pipeline + ':';
if (!Object.keys(this.optimizers).some(glob => glob.startsWith(prefix))) {
pipeline = null;
}
}
return (_this$matchGlobMapPip = this.matchGlobMapPipelines((0, _projectPath.toProjectPathUnsafe)(filePath), this.optimizers, pipeline)) !== null && _this$matchGlobMapPip !== void 0 ? _this$matchGlobMapPip : [];
}
getOptimizerNames(filePath, pipeline) {
let optimizers = this._getOptimizerNodes(filePath, pipeline);
return optimizers.map(o => o.packageName);
}
getOptimizers(filePath, pipeline) {
let optimizers = this._getOptimizerNodes(filePath, pipeline);
if (optimizers.length === 0) {
return Promise.resolve([]);
}
return this.loadPlugins(optimizers);
}
async getCompressors(filePath) {
var _this$matchGlobMapPip2;
let compressors = (_this$matchGlobMapPip2 = this.matchGlobMapPipelines((0, _projectPath.toProjectPathUnsafe)(filePath), this.compressors)) !== null && _this$matchGlobMapPip2 !== void 0 ? _this$matchGlobMapPip2 : [];
if (compressors.length === 0) {
throw await this.missingPluginError(this.compressors, (0, _diagnostic().md)`No compressors found for __${filePath}__.`, '/compressors');
}
return this.loadPlugins(compressors);
}
getReporters() {
return this.loadPlugins(this.reporters);
}
isGlobMatch(projectPath, pattern, pipeline) {
// glob's shouldn't be dependant on absolute paths anyway
let filePath = (0, _projectPath.fromProjectPathRelative)(projectPath);
let [patternPipeline, patternGlob] = pattern.split(':');
if (!patternGlob) {
patternGlob = patternPipeline;
patternPipeline = null;
}
let re = this.regexCache.get(patternGlob);
if (!re) {
re = (0, _utils().globToRegex)(patternGlob, {
dot: true,
nocase: true
});
this.regexCache.set(patternGlob, re);
}
return (pipeline === patternPipeline || !pipeline && !patternPipeline) && (re.test(filePath) || re.test((0, _path().basename)(filePath)));
}
matchGlobMap(filePath, globMap) {
for (let pattern in globMap) {
if (this.isGlobMatch(filePath, pattern)) {
return globMap[pattern];
}
}
return null;
}
matchGlobMapPipelines(filePath, globMap, pipeline) {
let matches = [];
if (pipeline) {
// If a pipeline is requested, a the glob needs to match exactly
let exactMatch;
for (let pattern in globMap) {
if (this.isGlobMatch(filePath, pattern, pipeline)) {
exactMatch = globMap[pattern];
break;
}
}
if (!exactMatch) {
return [];
} else {
matches.push(exactMatch);
}
}
for (let pattern in globMap) {
if (this.isGlobMatch(filePath, pattern)) {
matches.push(globMap[pattern]);
}
}
let flatten = () => {
let pipeline = matches.shift() || [];
let spreadIndex = pipeline.indexOf('...');
if (spreadIndex >= 0) {
pipeline = [...pipeline.slice(0, spreadIndex), ...flatten(), ...pipeline.slice(spreadIndex + 1)];
}
if (pipeline.includes('...')) {
throw new Error('Only one spread parameter can be included in a config pipeline');
}
return pipeline;
};
let res = flatten();
// $FlowFixMe afaik this should work
return res;
}
async missingPluginError(plugins, message, key) {
let configsWithPlugin;
if (Array.isArray(plugins)) {
configsWithPlugin = new Set(getConfigPaths(this.options, plugins));
} else {
configsWithPlugin = new Set(Object.keys(plugins).flatMap(k => Array.isArray(plugins[k]) ? getConfigPaths(this.options, plugins[k]) : [getConfigPath(this.options, plugins[k])]));
}
if (configsWithPlugin.size === 0) {
configsWithPlugin.add((0, _projectPath.fromProjectPath)(this.options.projectRoot, this.filePath));
}
let seenKey = false;
let codeFrames = await Promise.all([...configsWithPlugin].map(async filePath => {
let configContents = await this.options.inputFS.readFile(filePath, 'utf8');
if (!_json().default.parse(configContents)[key.slice(1)]) {
key = '';
} else {
seenKey = true;
}
return {
filePath,
code: configContents,
codeHighlights: (0, _diagnostic().generateJSONCodeHighlights)(configContents, [{
key
}])
};
}));
return new (_diagnostic().default)({
diagnostic: {
message,
origin: '@parcel/core',
codeFrames,
hints: !seenKey ? ['Try extending __@parcel/config-default__'] : []
}
});
}
}
exports.default = ParcelConfig;
function getConfigPaths(options, nodes) {
return nodes.map(node => node !== '...' ? getConfigPath(options, node) : null).filter(Boolean);
}
function getConfigPath(options, node) {
return (0, _projectPath.fromProjectPath)(options.projectRoot, node.resolveFrom);
}

View File

@@ -0,0 +1,118 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
exports.validatePackageName = validatePackageName;
function _assert() {
const data = _interopRequireDefault(require("assert"));
_assert = function () {
return data;
};
return data;
}
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
// Reasoning behind this validation:
// https://github.com/parcel-bundler/parcel/issues/3397#issuecomment-521353931
function validatePackageName(pkg, pluginType, key) {
// $FlowFixMe
if (!pkg) {
return;
}
(0, _assert().default)(typeof pkg === 'string', `"${key}" must be a string`);
if (pkg.startsWith('@parcel')) {
(0, _assert().default)(pkg.replace(/^@parcel\//, '').startsWith(`${pluginType}-`), `Official parcel ${pluginType} packages must be named according to "@parcel/${pluginType}-{name}"`);
} else if (pkg.startsWith('@')) {
let [scope, name] = pkg.split('/');
(0, _assert().default)(name.startsWith(`parcel-${pluginType}-`) || name === `parcel-${pluginType}`, `Scoped parcel ${pluginType} packages must be named according to "${scope}/parcel-${pluginType}[-{name}]"`);
} else if (!pkg.startsWith('.')) {
(0, _assert().default)(pkg.startsWith(`parcel-${pluginType}-`), `Parcel ${pluginType} packages must be named according to "parcel-${pluginType}-{name}"`);
}
}
const validatePluginName = (pluginType, key) => {
return val => {
// allow plugin spread...
if (val === '...') return;
try {
validatePackageName(val, pluginType, key);
} catch (e) {
return e.message;
}
};
};
const validateExtends = val => {
// allow relative paths...
if (val.startsWith('.')) return;
try {
validatePackageName(val, 'config', 'extends');
} catch (e) {
return e.message;
}
};
const pipelineSchema = (pluginType, key) => {
return {
type: 'array',
items: {
type: 'string',
__validate: validatePluginName(pluginType, key)
}
};
};
const mapPipelineSchema = (pluginType, key) => {
return {
type: 'object',
properties: {},
additionalProperties: pipelineSchema(pluginType, key)
};
};
const mapStringSchema = (pluginType, key) => {
return {
type: 'object',
properties: {},
additionalProperties: {
type: 'string',
__validate: validatePluginName(pluginType, key)
}
};
};
var _default = exports.default = {
type: 'object',
properties: {
$schema: {
type: 'string'
},
extends: {
oneOf: [{
type: 'string',
__validate: validateExtends
}, {
type: 'array',
items: {
type: 'string',
__validate: validateExtends
}
}]
},
bundler: {
type: 'string',
__validate: validatePluginName('bundler', 'bundler')
},
resolvers: pipelineSchema('resolver', 'resolvers'),
transformers: mapPipelineSchema('transformer', 'transformers'),
validators: mapPipelineSchema('validator', 'validators'),
namers: pipelineSchema('namer', 'namers'),
packagers: mapStringSchema('packager', 'packagers'),
optimizers: mapPipelineSchema('optimizer', 'optimizers'),
compressors: mapPipelineSchema('compressor', 'compressors'),
reporters: pipelineSchema('reporter', 'reporters'),
runtimes: pipelineSchema('runtime', 'runtimes'),
filePath: {
type: 'string'
},
resolveFrom: {
type: 'string'
}
},
additionalProperties: false
};

View File

@@ -0,0 +1,140 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
exports.report = report;
exports.reportWorker = reportWorker;
function _assert() {
const data = _interopRequireDefault(require("assert"));
_assert = function () {
return data;
};
return data;
}
var _Bundle = require("./public/Bundle");
function _workers() {
const data = _interopRequireWildcard(require("@parcel/workers"));
_workers = function () {
return data;
};
return data;
}
var _ParcelConfig = _interopRequireDefault(require("./ParcelConfig"));
function _logger() {
const data = _interopRequireWildcard(require("@parcel/logger"));
_logger = function () {
return data;
};
return data;
}
var _PluginOptions = _interopRequireDefault(require("./public/PluginOptions"));
var _BundleGraph = _interopRequireDefault(require("./BundleGraph"));
function _profiler() {
const data = require("@parcel/profiler");
_profiler = function () {
return data;
};
return data;
}
function _getRequireWildcardCache(e) { if ("function" != typeof WeakMap) return null; var r = new WeakMap(), t = new WeakMap(); return (_getRequireWildcardCache = function (e) { return e ? t : r; })(e); }
function _interopRequireWildcard(e, r) { if (!r && e && e.__esModule) return e; if (null === e || "object" != typeof e && "function" != typeof e) return { default: e }; var t = _getRequireWildcardCache(r); if (t && t.has(e)) return t.get(e); var n = { __proto__: null }, a = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var u in e) if ("default" !== u && Object.prototype.hasOwnProperty.call(e, u)) { var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; } return n.default = e, t && t.set(e, n), n; }
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const instances = new Set();
class ReporterRunner {
constructor(opts) {
this.config = opts.config;
this.options = opts.options;
this.workerFarm = opts.workerFarm;
this.pluginOptions = new _PluginOptions.default(this.options);
_logger().default.onLog(event => this.report(event));
_profiler().tracer.onTrace(event => this.report(event));
_workers().bus.on('reporterEvent', this.eventHandler);
instances.add(this);
if (this.options.shouldPatchConsole) {
(0, _logger().patchConsole)();
} else {
(0, _logger().unpatchConsole)();
}
}
eventHandler = event => {
if (event.type === 'buildProgress' && (event.phase === 'optimizing' || event.phase === 'packaging') && !(event.bundle instanceof _Bundle.NamedBundle)) {
// $FlowFixMe[prop-missing]
let bundleGraphRef = event.bundleGraphRef;
// $FlowFixMe[incompatible-exact]
let bundle = event.bundle;
// Convert any internal bundles back to their public equivalents as reporting
// is public api
let bundleGraph = this.workerFarm.workerApi.getSharedReference(
// $FlowFixMe
bundleGraphRef);
(0, _assert().default)(bundleGraph instanceof _BundleGraph.default);
// $FlowFixMe[incompatible-call]
this.report({
...event,
bundle: _Bundle.NamedBundle.get(bundle, bundleGraph, this.options)
});
return;
}
this.report(event);
};
async report(event) {
// We should catch all errors originating from reporter plugins to prevent infinite loops
try {
let reporters = this.reporters;
if (!reporters) {
this.reporters = await this.config.getReporters();
reporters = this.reporters;
}
for (let reporter of this.reporters) {
let measurement;
try {
// To avoid an infinite loop we don't measure trace events, as they'll
// result in another trace!
if (event.type !== 'trace') {
measurement = _profiler().tracer.createMeasurement(reporter.name, 'reporter');
}
await reporter.plugin.report({
event,
options: this.pluginOptions,
logger: new (_logger().PluginLogger)({
origin: reporter.name
}),
tracer: new (_profiler().PluginTracer)({
origin: reporter.name,
category: 'reporter'
})
});
} catch (reportError) {
_logger().INTERNAL_ORIGINAL_CONSOLE.error(reportError);
} finally {
measurement && measurement.end();
}
}
} catch (err) {
_logger().INTERNAL_ORIGINAL_CONSOLE.error(err);
}
}
dispose() {
_workers().bus.off('reporterEvent', this.eventHandler);
instances.delete(this);
}
}
exports.default = ReporterRunner;
function reportWorker(workerApi, event) {
if (event.type === 'buildProgress' && (event.phase === 'optimizing' || event.phase === 'packaging')) {
// Convert any public api bundles to their internal equivalents for
// easy serialization
_workers().bus.emit('reporterEvent', {
...event,
bundle: (0, _Bundle.bundleToInternalBundle)(event.bundle),
bundleGraphRef: workerApi.resolveSharedReference((0, _Bundle.bundleToInternalBundleGraph)(event.bundle))
});
return;
}
_workers().bus.emit('reporterEvent', event);
}
async function report(event) {
await Promise.all([...instances].map(instance => instance.report(event)));
}

View File

@@ -0,0 +1,880 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = exports.RequestGraph = void 0;
exports.getWatcherOptions = getWatcherOptions;
exports.requestTypes = exports.requestGraphEdgeTypes = void 0;
function _assert() {
const data = _interopRequireDefault(require("assert"));
_assert = function () {
return data;
};
return data;
}
function _nullthrows() {
const data = _interopRequireDefault(require("nullthrows"));
_nullthrows = function () {
return data;
};
return data;
}
function _path() {
const data = _interopRequireDefault(require("path"));
_path = function () {
return data;
};
return data;
}
function _utils() {
const data = require("@parcel/utils");
_utils = function () {
return data;
};
return data;
}
function _rust() {
const data = require("@parcel/rust");
_rust = function () {
return data;
};
return data;
}
function _graph() {
const data = require("@parcel/graph");
_graph = function () {
return data;
};
return data;
}
var _serializer = require("./serializer");
var _utils2 = require("./utils");
var _projectPath = require("./projectPath");
var _constants = require("./constants");
var _ReporterRunner = require("./ReporterRunner");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const requestGraphEdgeTypes = exports.requestGraphEdgeTypes = {
subrequest: 2,
invalidated_by_update: 3,
invalidated_by_delete: 4,
invalidated_by_create: 5,
invalidated_by_create_above: 6,
dirname: 7
};
const FILE = 0;
const REQUEST = 1;
const FILE_NAME = 2;
const ENV = 3;
const OPTION = 4;
const GLOB = 5;
const requestTypes = exports.requestTypes = {
parcel_build_request: 1,
bundle_graph_request: 2,
asset_graph_request: 3,
entry_request: 4,
target_request: 5,
parcel_config_request: 6,
path_request: 7,
dev_dep_request: 8,
asset_request: 9,
config_request: 10,
write_bundles_request: 11,
package_request: 12,
write_bundle_request: 13,
validation_request: 14
};
const nodeFromFilePath = filePath => ({
id: (0, _projectPath.fromProjectPathRelative)(filePath),
type: FILE
});
const nodeFromGlob = glob => ({
id: (0, _projectPath.fromProjectPathRelative)(glob),
type: GLOB,
value: glob
});
const nodeFromFileName = fileName => ({
id: 'file_name:' + fileName,
type: FILE_NAME
});
const nodeFromRequest = request => ({
id: request.id,
type: REQUEST,
requestType: request.requestType,
invalidateReason: _constants.INITIAL_BUILD
});
const nodeFromEnv = (env, value) => ({
id: 'env:' + env,
type: ENV,
value
});
const nodeFromOption = (option, value) => ({
id: 'option:' + option,
type: OPTION,
hash: (0, _utils2.hashFromOption)(value)
});
const keyFromEnvContentKey = contentKey => contentKey.slice('env:'.length);
const keyFromOptionContentKey = contentKey => contentKey.slice('option:'.length);
class RequestGraph extends _graph().ContentGraph {
invalidNodeIds = new Set();
incompleteNodeIds = new Set();
incompleteNodePromises = new Map();
globNodeIds = new Set();
envNodeIds = new Set();
optionNodeIds = new Set();
// Unpredictable nodes are requests that cannot be predicted whether they should rerun based on
// filesystem changes alone. They should rerun on each startup of Parcel.
unpredicatableNodeIds = new Set();
invalidateOnBuildNodeIds = new Set();
cachedRequestChunks = new Set();
// $FlowFixMe[prop-missing]
static deserialize(opts) {
// $FlowFixMe[prop-missing]
let deserialized = new RequestGraph(opts);
deserialized.invalidNodeIds = opts.invalidNodeIds;
deserialized.incompleteNodeIds = opts.incompleteNodeIds;
deserialized.globNodeIds = opts.globNodeIds;
deserialized.envNodeIds = opts.envNodeIds;
deserialized.optionNodeIds = opts.optionNodeIds;
deserialized.unpredicatableNodeIds = opts.unpredicatableNodeIds;
deserialized.invalidateOnBuildNodeIds = opts.invalidateOnBuildNodeIds;
deserialized.cachedRequestChunks = opts.cachedRequestChunks;
return deserialized;
}
// $FlowFixMe[prop-missing]
serialize() {
return {
...super.serialize(),
invalidNodeIds: this.invalidNodeIds,
incompleteNodeIds: this.incompleteNodeIds,
globNodeIds: this.globNodeIds,
envNodeIds: this.envNodeIds,
optionNodeIds: this.optionNodeIds,
unpredicatableNodeIds: this.unpredicatableNodeIds,
invalidateOnBuildNodeIds: this.invalidateOnBuildNodeIds,
cachedRequestChunks: this.cachedRequestChunks
};
}
// addNode for RequestGraph should not override the value if added multiple times
addNode(node) {
let nodeId = this._contentKeyToNodeId.get(node.id);
if (nodeId != null) {
return nodeId;
}
nodeId = super.addNodeByContentKey(node.id, node);
if (node.type === GLOB) {
this.globNodeIds.add(nodeId);
} else if (node.type === ENV) {
this.envNodeIds.add(nodeId);
} else if (node.type === OPTION) {
this.optionNodeIds.add(nodeId);
}
return nodeId;
}
removeNode(nodeId) {
this.invalidNodeIds.delete(nodeId);
this.incompleteNodeIds.delete(nodeId);
this.incompleteNodePromises.delete(nodeId);
this.unpredicatableNodeIds.delete(nodeId);
this.invalidateOnBuildNodeIds.delete(nodeId);
let node = (0, _nullthrows().default)(this.getNode(nodeId));
if (node.type === GLOB) {
this.globNodeIds.delete(nodeId);
} else if (node.type === ENV) {
this.envNodeIds.delete(nodeId);
} else if (node.type === OPTION) {
this.optionNodeIds.delete(nodeId);
}
return super.removeNode(nodeId);
}
getRequestNode(nodeId) {
let node = (0, _nullthrows().default)(this.getNode(nodeId));
(0, _assert().default)(node.type === REQUEST);
return node;
}
replaceSubrequests(requestNodeId, subrequestContentKeys) {
let subrequestNodeIds = [];
for (let key of subrequestContentKeys) {
if (this.hasContentKey(key)) {
subrequestNodeIds.push(this.getNodeIdByContentKey(key));
}
}
this.replaceNodeIdsConnectedTo(requestNodeId, subrequestNodeIds, null, requestGraphEdgeTypes.subrequest);
}
invalidateNode(nodeId, reason) {
let node = (0, _nullthrows().default)(this.getNode(nodeId));
(0, _assert().default)(node.type === REQUEST);
node.invalidateReason |= reason;
this.invalidNodeIds.add(nodeId);
let parentNodes = this.getNodeIdsConnectedTo(nodeId, requestGraphEdgeTypes.subrequest);
for (let parentNode of parentNodes) {
this.invalidateNode(parentNode, reason);
}
// If the node is invalidated, the cached request chunk on disk needs to be re-written
this.removeCachedRequestChunkForNode(nodeId);
}
invalidateUnpredictableNodes() {
for (let nodeId of this.unpredicatableNodeIds) {
let node = (0, _nullthrows().default)(this.getNode(nodeId));
(0, _assert().default)(node.type !== FILE && node.type !== GLOB);
this.invalidateNode(nodeId, _constants.STARTUP);
}
}
invalidateOnBuildNodes() {
for (let nodeId of this.invalidateOnBuildNodeIds) {
let node = (0, _nullthrows().default)(this.getNode(nodeId));
(0, _assert().default)(node.type !== FILE && node.type !== GLOB);
this.invalidateNode(nodeId, _constants.STARTUP);
}
}
invalidateEnvNodes(env) {
for (let nodeId of this.envNodeIds) {
let node = (0, _nullthrows().default)(this.getNode(nodeId));
(0, _assert().default)(node.type === ENV);
if (env[keyFromEnvContentKey(node.id)] !== node.value) {
let parentNodes = this.getNodeIdsConnectedTo(nodeId, requestGraphEdgeTypes.invalidated_by_update);
for (let parentNode of parentNodes) {
this.invalidateNode(parentNode, _constants.ENV_CHANGE);
}
}
}
}
invalidateOptionNodes(options) {
for (let nodeId of this.optionNodeIds) {
let node = (0, _nullthrows().default)(this.getNode(nodeId));
(0, _assert().default)(node.type === OPTION);
if ((0, _utils2.hashFromOption)(options[keyFromOptionContentKey(node.id)]) !== node.hash) {
let parentNodes = this.getNodeIdsConnectedTo(nodeId, requestGraphEdgeTypes.invalidated_by_update);
for (let parentNode of parentNodes) {
this.invalidateNode(parentNode, _constants.OPTION_CHANGE);
}
}
}
}
invalidateOnFileUpdate(requestNodeId, filePath) {
let fileNodeId = this.addNode(nodeFromFilePath(filePath));
if (!this.hasEdge(requestNodeId, fileNodeId, requestGraphEdgeTypes.invalidated_by_update)) {
this.addEdge(requestNodeId, fileNodeId, requestGraphEdgeTypes.invalidated_by_update);
}
}
invalidateOnFileDelete(requestNodeId, filePath) {
let fileNodeId = this.addNode(nodeFromFilePath(filePath));
if (!this.hasEdge(requestNodeId, fileNodeId, requestGraphEdgeTypes.invalidated_by_delete)) {
this.addEdge(requestNodeId, fileNodeId, requestGraphEdgeTypes.invalidated_by_delete);
}
}
invalidateOnFileCreate(requestNodeId, input) {
let node;
if (input.glob != null) {
node = nodeFromGlob(input.glob);
} else if (input.fileName != null && input.aboveFilePath != null) {
let aboveFilePath = input.aboveFilePath;
// Create nodes and edges for each part of the filename pattern.
// For example, 'node_modules/foo' would create two nodes and one edge.
// This creates a sort of trie structure within the graph that can be
// quickly matched by following the edges. This is also memory efficient
// since common sub-paths (e.g. 'node_modules') are deduplicated.
let parts = input.fileName.split('/').reverse();
let lastNodeId;
for (let part of parts) {
let fileNameNode = nodeFromFileName(part);
let fileNameNodeId = this.addNode(fileNameNode);
if (lastNodeId != null && !this.hasEdge(lastNodeId, fileNameNodeId, requestGraphEdgeTypes.dirname)) {
this.addEdge(lastNodeId, fileNameNodeId, requestGraphEdgeTypes.dirname);
}
lastNodeId = fileNameNodeId;
}
// The `aboveFilePath` condition asserts that requests are only invalidated
// if the file being created is "above" it in the filesystem (e.g. the file
// is created in a parent directory). There is likely to already be a node
// for this file in the graph (e.g. the source file) that we can reuse for this.
node = nodeFromFilePath(aboveFilePath);
let nodeId = this.addNode(node);
// Now create an edge from the `aboveFilePath` node to the first file_name node
// in the chain created above, and an edge from the last node in the chain back to
// the `aboveFilePath` node. When matching, we will start from the first node in
// the chain, and continue following it to parent directories until there is an
// edge pointing an `aboveFilePath` node that also points to the start of the chain.
// This indicates a complete match, and any requests attached to the `aboveFilePath`
// node will be invalidated.
let firstId = 'file_name:' + parts[0];
let firstNodeId = this.getNodeIdByContentKey(firstId);
if (!this.hasEdge(nodeId, firstNodeId, requestGraphEdgeTypes.invalidated_by_create_above)) {
this.addEdge(nodeId, firstNodeId, requestGraphEdgeTypes.invalidated_by_create_above);
}
(0, _assert().default)(lastNodeId != null);
if (!this.hasEdge(lastNodeId, nodeId, requestGraphEdgeTypes.invalidated_by_create_above)) {
this.addEdge(lastNodeId, nodeId, requestGraphEdgeTypes.invalidated_by_create_above);
}
} else if (input.filePath != null) {
node = nodeFromFilePath(input.filePath);
} else {
throw new Error('Invalid invalidation');
}
let nodeId = this.addNode(node);
if (!this.hasEdge(requestNodeId, nodeId, requestGraphEdgeTypes.invalidated_by_create)) {
this.addEdge(requestNodeId, nodeId, requestGraphEdgeTypes.invalidated_by_create);
}
}
invalidateOnStartup(requestNodeId) {
this.getRequestNode(requestNodeId);
this.unpredicatableNodeIds.add(requestNodeId);
}
invalidateOnBuild(requestNodeId) {
this.getRequestNode(requestNodeId);
this.invalidateOnBuildNodeIds.add(requestNodeId);
}
invalidateOnEnvChange(requestNodeId, env, value) {
let envNode = nodeFromEnv(env, value);
let envNodeId = this.addNode(envNode);
if (!this.hasEdge(requestNodeId, envNodeId, requestGraphEdgeTypes.invalidated_by_update)) {
this.addEdge(requestNodeId, envNodeId, requestGraphEdgeTypes.invalidated_by_update);
}
}
invalidateOnOptionChange(requestNodeId, option, value) {
let optionNode = nodeFromOption(option, value);
let optionNodeId = this.addNode(optionNode);
if (!this.hasEdge(requestNodeId, optionNodeId, requestGraphEdgeTypes.invalidated_by_update)) {
this.addEdge(requestNodeId, optionNodeId, requestGraphEdgeTypes.invalidated_by_update);
}
}
clearInvalidations(nodeId) {
this.unpredicatableNodeIds.delete(nodeId);
this.invalidateOnBuildNodeIds.delete(nodeId);
this.replaceNodeIdsConnectedTo(nodeId, [], null, requestGraphEdgeTypes.invalidated_by_update);
this.replaceNodeIdsConnectedTo(nodeId, [], null, requestGraphEdgeTypes.invalidated_by_delete);
this.replaceNodeIdsConnectedTo(nodeId, [], null, requestGraphEdgeTypes.invalidated_by_create);
}
getInvalidations(requestNodeId) {
if (!this.hasNode(requestNodeId)) {
return [];
}
// For now just handling updates. Could add creates/deletes later if needed.
let invalidations = this.getNodeIdsConnectedFrom(requestNodeId, requestGraphEdgeTypes.invalidated_by_update);
return invalidations.map(nodeId => {
let node = (0, _nullthrows().default)(this.getNode(nodeId));
switch (node.type) {
case FILE:
return {
type: 'file',
filePath: (0, _projectPath.toProjectPathUnsafe)(node.id)
};
case ENV:
return {
type: 'env',
key: keyFromEnvContentKey(node.id)
};
case OPTION:
return {
type: 'option',
key: keyFromOptionContentKey(node.id)
};
}
}).filter(Boolean);
}
getSubRequests(requestNodeId) {
if (!this.hasNode(requestNodeId)) {
return [];
}
let subRequests = this.getNodeIdsConnectedFrom(requestNodeId, requestGraphEdgeTypes.subrequest);
return subRequests.map(nodeId => {
let node = (0, _nullthrows().default)(this.getNode(nodeId));
(0, _assert().default)(node.type === REQUEST);
return node;
});
}
getInvalidSubRequests(requestNodeId) {
if (!this.hasNode(requestNodeId)) {
return [];
}
let subRequests = this.getNodeIdsConnectedFrom(requestNodeId, requestGraphEdgeTypes.subrequest);
return subRequests.filter(id => this.invalidNodeIds.has(id)).map(nodeId => {
let node = (0, _nullthrows().default)(this.getNode(nodeId));
(0, _assert().default)(node.type === REQUEST);
return node;
});
}
invalidateFileNameNode(node, filePath, matchNodes) {
// If there is an edge between this file_name node and one of the original file nodes pointed to
// by the original file_name node, and the matched node is inside the current directory, invalidate
// all connected requests pointed to by the file node.
let dirname = _path().default.dirname((0, _projectPath.fromProjectPathRelative)(filePath));
let nodeId = this.getNodeIdByContentKey(node.id);
for (let matchNode of matchNodes) {
let matchNodeId = this.getNodeIdByContentKey(matchNode.id);
if (this.hasEdge(nodeId, matchNodeId, requestGraphEdgeTypes.invalidated_by_create_above) && (0, _utils().isDirectoryInside)((0, _projectPath.fromProjectPathRelative)((0, _projectPath.toProjectPathUnsafe)(matchNode.id)), dirname)) {
let connectedNodes = this.getNodeIdsConnectedTo(matchNodeId, requestGraphEdgeTypes.invalidated_by_create);
for (let connectedNode of connectedNodes) {
this.invalidateNode(connectedNode, _constants.FILE_CREATE);
}
}
}
// Find the `file_name` node for the parent directory and
// recursively invalidate connected requests as described above.
let basename = _path().default.basename(dirname);
let contentKey = 'file_name:' + basename;
if (this.hasContentKey(contentKey)) {
if (this.hasEdge(nodeId, this.getNodeIdByContentKey(contentKey), requestGraphEdgeTypes.dirname)) {
let parent = (0, _nullthrows().default)(this.getNodeByContentKey(contentKey));
(0, _assert().default)(parent.type === FILE_NAME);
this.invalidateFileNameNode(parent, (0, _projectPath.toProjectPathUnsafe)(dirname), matchNodes);
}
}
}
respondToFSEvents(events) {
let didInvalidate = false;
for (let {
path: _filePath,
type
} of events) {
let filePath = (0, _projectPath.fromProjectPathRelative)(_filePath);
let hasFileRequest = this.hasContentKey(filePath);
// If we see a 'create' event for the project root itself,
// this means the project root was moved and we need to
// re-run all requests.
if (type === 'create' && filePath === '') {
for (let [id, node] of this.nodes.entries()) {
if ((node === null || node === void 0 ? void 0 : node.type) === REQUEST) {
this.invalidNodeIds.add(id);
}
}
return true;
}
// sometimes mac os reports update events as create events.
// if it was a create event, but the file already exists in the graph,
// then also invalidate nodes connected by invalidated_by_update edges.
if (hasFileRequest && (type === 'create' || type === 'update')) {
let nodeId = this.getNodeIdByContentKey(filePath);
let nodes = this.getNodeIdsConnectedTo(nodeId, requestGraphEdgeTypes.invalidated_by_update);
for (let connectedNode of nodes) {
didInvalidate = true;
this.invalidateNode(connectedNode, _constants.FILE_UPDATE);
}
if (type === 'create') {
let nodes = this.getNodeIdsConnectedTo(nodeId, requestGraphEdgeTypes.invalidated_by_create);
for (let connectedNode of nodes) {
didInvalidate = true;
this.invalidateNode(connectedNode, _constants.FILE_CREATE);
}
}
} else if (type === 'create') {
let basename = _path().default.basename(filePath);
let fileNameNode = this.getNodeByContentKey('file_name:' + basename);
if (fileNameNode != null && fileNameNode.type === FILE_NAME) {
let fileNameNodeId = this.getNodeIdByContentKey('file_name:' + basename);
// Find potential file nodes to be invalidated if this file name pattern matches
let above = [];
for (const nodeId of this.getNodeIdsConnectedTo(fileNameNodeId, requestGraphEdgeTypes.invalidated_by_create_above)) {
let node = (0, _nullthrows().default)(this.getNode(nodeId));
// these might also be `glob` nodes which get handled below, we only care about files here.
if (node.type === FILE) {
above.push(node);
}
}
if (above.length > 0) {
didInvalidate = true;
this.invalidateFileNameNode(fileNameNode, _filePath, above);
}
}
for (let globeNodeId of this.globNodeIds) {
let globNode = this.getNode(globeNodeId);
(0, _assert().default)(globNode && globNode.type === GLOB);
if ((0, _utils().isGlobMatch)(filePath, (0, _projectPath.fromProjectPathRelative)(globNode.value))) {
let connectedNodes = this.getNodeIdsConnectedTo(globeNodeId, requestGraphEdgeTypes.invalidated_by_create);
for (let connectedNode of connectedNodes) {
didInvalidate = true;
this.invalidateNode(connectedNode, _constants.FILE_CREATE);
}
}
}
} else if (hasFileRequest && type === 'delete') {
let nodeId = this.getNodeIdByContentKey(filePath);
for (let connectedNode of this.getNodeIdsConnectedTo(nodeId, requestGraphEdgeTypes.invalidated_by_delete)) {
didInvalidate = true;
this.invalidateNode(connectedNode, _constants.FILE_DELETE);
}
// Delete the file node since it doesn't exist anymore.
// This ensures that files that don't exist aren't sent
// to requests as invalidations for future requests.
this.removeNode(nodeId);
}
}
return didInvalidate && this.invalidNodeIds.size > 0;
}
hasCachedRequestChunk(index) {
return this.cachedRequestChunks.has(index);
}
setCachedRequestChunk(index) {
this.cachedRequestChunks.add(index);
}
removeCachedRequestChunkForNode(nodeId) {
this.cachedRequestChunks.delete(Math.floor(nodeId / NODES_PER_BLOB));
}
}
// This constant is chosen by local profiling the time to serialise n nodes and tuning until an average time of ~50 ms per blob.
// The goal is to free up the event loop periodically to allow interruption by the user.
exports.RequestGraph = RequestGraph;
const NODES_PER_BLOB = 2 ** 14;
class RequestTracker {
constructor({
graph,
farm,
options
}) {
this.graph = graph || new RequestGraph();
this.farm = farm;
this.options = options;
}
// TODO: refactor (abortcontroller should be created by RequestTracker)
setSignal(signal) {
this.signal = signal;
}
startRequest(request) {
let didPreviouslyExist = this.graph.hasContentKey(request.id);
let requestNodeId;
if (didPreviouslyExist) {
requestNodeId = this.graph.getNodeIdByContentKey(request.id);
// Clear existing invalidations for the request so that the new
// invalidations created during the request replace the existing ones.
this.graph.clearInvalidations(requestNodeId);
} else {
requestNodeId = this.graph.addNode(nodeFromRequest(request));
}
this.graph.incompleteNodeIds.add(requestNodeId);
this.graph.invalidNodeIds.delete(requestNodeId);
let {
promise,
deferred
} = (0, _utils().makeDeferredWithPromise)();
this.graph.incompleteNodePromises.set(requestNodeId, promise);
return {
requestNodeId,
deferred
};
}
// If a cache key is provided, the result will be removed from the node and stored in a separate cache entry
storeResult(nodeId, result, cacheKey) {
let node = this.graph.getNode(nodeId);
if (node && node.type === REQUEST) {
node.result = result;
node.resultCacheKey = cacheKey;
}
}
hasValidResult(nodeId) {
return this.graph.hasNode(nodeId) && !this.graph.invalidNodeIds.has(nodeId) && !this.graph.incompleteNodeIds.has(nodeId);
}
async getRequestResult(contentKey, ifMatch) {
let node = (0, _nullthrows().default)(this.graph.getNodeByContentKey(contentKey));
(0, _assert().default)(node.type === REQUEST);
if (ifMatch != null && node.resultCacheKey !== ifMatch) {
return null;
}
if (node.result != undefined) {
// $FlowFixMe
let result = node.result;
return result;
} else if (node.resultCacheKey != null && ifMatch == null) {
let key = node.resultCacheKey;
(0, _assert().default)(this.options.cache.hasLargeBlob(key));
let cachedResult = (0, _serializer.deserialize)(await this.options.cache.getLargeBlob(key));
node.result = cachedResult;
return cachedResult;
}
}
completeRequest(nodeId) {
this.graph.invalidNodeIds.delete(nodeId);
this.graph.incompleteNodeIds.delete(nodeId);
this.graph.incompleteNodePromises.delete(nodeId);
let node = this.graph.getNode(nodeId);
if (node && node.type === REQUEST) {
node.invalidateReason = _constants.VALID;
}
this.graph.removeCachedRequestChunkForNode(nodeId);
}
rejectRequest(nodeId) {
this.graph.incompleteNodeIds.delete(nodeId);
this.graph.incompleteNodePromises.delete(nodeId);
let node = this.graph.getNode(nodeId);
if ((node === null || node === void 0 ? void 0 : node.type) === REQUEST) {
this.graph.invalidateNode(nodeId, _constants.ERROR);
}
}
respondToFSEvents(events) {
return this.graph.respondToFSEvents(events);
}
hasInvalidRequests() {
return this.graph.invalidNodeIds.size > 0;
}
getInvalidRequests() {
let invalidRequests = [];
for (let id of this.graph.invalidNodeIds) {
let node = (0, _nullthrows().default)(this.graph.getNode(id));
(0, _assert().default)(node.type === REQUEST);
invalidRequests.push(node);
}
return invalidRequests;
}
replaceSubrequests(requestNodeId, subrequestContextKeys) {
this.graph.replaceSubrequests(requestNodeId, subrequestContextKeys);
}
async runRequest(request, opts) {
let requestId = this.graph.hasContentKey(request.id) ? this.graph.getNodeIdByContentKey(request.id) : undefined;
let hasValidResult = requestId != null && this.hasValidResult(requestId);
if (!(opts !== null && opts !== void 0 && opts.force) && hasValidResult) {
// $FlowFixMe[incompatible-type]
return this.getRequestResult(request.id);
}
if (requestId != null) {
let incompletePromise = this.graph.incompleteNodePromises.get(requestId);
if (incompletePromise != null) {
// There is a another instance of this request already running, wait for its completion and reuse its result
try {
if (await incompletePromise) {
// $FlowFixMe[incompatible-type]
return this.getRequestResult(request.id);
}
} catch (e) {
// Rerun this request
}
}
}
let previousInvalidations = requestId != null ? this.graph.getInvalidations(requestId) : [];
let {
requestNodeId,
deferred
} = this.startRequest({
id: request.id,
type: REQUEST,
requestType: request.type,
invalidateReason: _constants.INITIAL_BUILD
});
let {
api,
subRequestContentKeys
} = this.createAPI(requestNodeId, previousInvalidations);
try {
let node = this.graph.getRequestNode(requestNodeId);
let result = await request.run({
input: request.input,
api,
farm: this.farm,
options: this.options,
invalidateReason: node.invalidateReason
});
(0, _utils2.assertSignalNotAborted)(this.signal);
this.completeRequest(requestNodeId);
deferred.resolve(true);
return result;
} catch (err) {
this.rejectRequest(requestNodeId);
deferred.resolve(false);
throw err;
} finally {
this.graph.replaceSubrequests(requestNodeId, [...subRequestContentKeys]);
}
}
createAPI(requestId, previousInvalidations) {
let subRequestContentKeys = new Set();
return {
api: {
invalidateOnFileCreate: input => this.graph.invalidateOnFileCreate(requestId, input),
invalidateOnFileDelete: filePath => this.graph.invalidateOnFileDelete(requestId, filePath),
invalidateOnFileUpdate: filePath => this.graph.invalidateOnFileUpdate(requestId, filePath),
invalidateOnStartup: () => this.graph.invalidateOnStartup(requestId),
invalidateOnBuild: () => this.graph.invalidateOnBuild(requestId),
invalidateOnEnvChange: env => this.graph.invalidateOnEnvChange(requestId, env, this.options.env[env]),
invalidateOnOptionChange: option => this.graph.invalidateOnOptionChange(requestId, option, this.options[option]),
getInvalidations: () => previousInvalidations,
storeResult: (result, cacheKey) => {
this.storeResult(requestId, result, cacheKey);
},
getSubRequests: () => this.graph.getSubRequests(requestId),
getInvalidSubRequests: () => this.graph.getInvalidSubRequests(requestId),
getPreviousResult: ifMatch => {
var _this$graph$getNode;
let contentKey = (0, _nullthrows().default)((_this$graph$getNode = this.graph.getNode(requestId)) === null || _this$graph$getNode === void 0 ? void 0 : _this$graph$getNode.id);
return this.getRequestResult(contentKey, ifMatch);
},
getRequestResult: id => this.getRequestResult(id),
canSkipSubrequest: contentKey => {
if (this.graph.hasContentKey(contentKey) && this.hasValidResult(this.graph.getNodeIdByContentKey(contentKey))) {
subRequestContentKeys.add(contentKey);
return true;
}
return false;
},
runRequest: (subRequest, opts) => {
subRequestContentKeys.add(subRequest.id);
return this.runRequest(subRequest, opts);
}
},
subRequestContentKeys
};
}
async writeToCache(signal) {
let cacheKey = getCacheKey(this.options);
let hashedCacheKey = (0, _rust().hashString)(cacheKey);
if (this.options.shouldDisableCache) {
return;
}
let serialisedGraph = this.graph.serialize();
let total = 0;
const serialiseAndSet = async (key, contents) => {
if (signal !== null && signal !== void 0 && signal.aborted) {
throw new Error('Serialization was aborted');
}
await this.options.cache.setLargeBlob(key, (0, _serializer.serialize)(contents), signal ? {
signal: signal
} : undefined);
total += 1;
(0, _ReporterRunner.report)({
type: 'cache',
phase: 'write',
total,
size: this.graph.nodes.length
});
};
let queue = new (_utils().PromiseQueue)({
maxConcurrent: 32
});
(0, _ReporterRunner.report)({
type: 'cache',
phase: 'start',
total,
size: this.graph.nodes.length
});
// Preallocating a sparse array is faster than pushing when N is high enough
let cacheableNodes = new Array(serialisedGraph.nodes.length);
for (let i = 0; i < serialisedGraph.nodes.length; i += 1) {
let node = serialisedGraph.nodes[i];
let resultCacheKey = node === null || node === void 0 ? void 0 : node.resultCacheKey;
if ((node === null || node === void 0 ? void 0 : node.type) === REQUEST && resultCacheKey != null && (node === null || node === void 0 ? void 0 : node.result) != null) {
queue.add(() => serialiseAndSet(resultCacheKey, node.result)).catch(() => {
// Handle promise rejection
});
// eslint-disable-next-line no-unused-vars
let {
result: _,
...newNode
} = node;
cacheableNodes[i] = newNode;
} else {
cacheableNodes[i] = node;
}
}
for (let i = 0; i * NODES_PER_BLOB < cacheableNodes.length; i += 1) {
if (!this.graph.hasCachedRequestChunk(i)) {
// We assume the request graph nodes are immutable and won't change
queue.add(() => serialiseAndSet(getRequestGraphNodeKey(i, hashedCacheKey), cacheableNodes.slice(i * NODES_PER_BLOB, (i + 1) * NODES_PER_BLOB)).then(() => {
// Succeeded in writing to disk, save that we have completed this chunk
this.graph.setCachedRequestChunk(i);
})).catch(() => {
// Handle promise rejection
});
}
}
queue.add(() => serialiseAndSet(`requestGraph-${hashedCacheKey}`, {
...serialisedGraph,
nodes: undefined
})).catch(() => {
// Handle promise rejection
});
let opts = getWatcherOptions(this.options);
let snapshotPath = _path().default.join(this.options.cacheDir, `snapshot-${hashedCacheKey}` + '.txt');
queue.add(() => this.options.inputFS.writeSnapshot(this.options.projectRoot, snapshotPath, opts)).catch(() => {
// Handle promise rejection
});
try {
await queue.run();
} catch (err) {
// If we have aborted, ignore the error and continue
if (!(signal !== null && signal !== void 0 && signal.aborted)) throw err;
}
(0, _ReporterRunner.report)({
type: 'cache',
phase: 'end',
total,
size: this.graph.nodes.length
});
}
static async init({
farm,
options
}) {
let graph = await loadRequestGraph(options);
return new RequestTracker({
farm,
options,
graph
});
}
}
exports.default = RequestTracker;
function getWatcherOptions(options) {
let vcsDirs = ['.git', '.hg'].map(dir => _path().default.join(options.projectRoot, dir));
let ignore = [options.cacheDir, ...vcsDirs];
return {
ignore
};
}
function getCacheKey(options) {
return `${_constants.PARCEL_VERSION}:${JSON.stringify(options.entries)}:${options.mode}:${options.shouldBuildLazily ? 'lazy' : 'eager'}`;
}
function getRequestGraphNodeKey(index, hashedCacheKey) {
return `requestGraph-nodes-${index}-${hashedCacheKey}`;
}
async function loadRequestGraph(options) {
if (options.shouldDisableCache) {
return new RequestGraph();
}
let cacheKey = getCacheKey(options);
let hashedCacheKey = (0, _rust().hashString)(cacheKey);
let requestGraphKey = `requestGraph-${hashedCacheKey}`;
if (await options.cache.hasLargeBlob(requestGraphKey)) {
const getAndDeserialize = async key => {
return (0, _serializer.deserialize)(await options.cache.getLargeBlob(key));
};
let i = 0;
let nodePromises = [];
while (await options.cache.hasLargeBlob(getRequestGraphNodeKey(i, hashedCacheKey))) {
nodePromises.push(getAndDeserialize(getRequestGraphNodeKey(i, hashedCacheKey)));
i += 1;
}
let serializedRequestGraph = await getAndDeserialize(requestGraphKey);
let requestGraph = RequestGraph.deserialize({
...serializedRequestGraph,
nodes: (await Promise.all(nodePromises)).flatMap(nodeChunk => nodeChunk)
});
let opts = getWatcherOptions(options);
let snapshotPath = _path().default.join(options.cacheDir, `snapshot-${hashedCacheKey}` + '.txt');
let events = await options.inputFS.getEventsSince(options.watchDir, snapshotPath, opts);
requestGraph.invalidateUnpredictableNodes();
requestGraph.invalidateOnBuildNodes();
requestGraph.invalidateEnvNodes(options.env);
requestGraph.invalidateOptionNodes(options);
requestGraph.respondToFSEvents((options.unstableFileInvalidations || events).map(e => ({
type: e.type,
path: (0, _projectPath.toProjectPath)(options.projectRoot, e.path)
})));
return requestGraph;
}
return new RequestGraph();
}

View File

@@ -0,0 +1,626 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.propagateSymbols = propagateSymbols;
function _assert() {
const data = _interopRequireDefault(require("assert"));
_assert = function () {
return data;
};
return data;
}
function _nullthrows() {
const data = _interopRequireDefault(require("nullthrows"));
_nullthrows = function () {
return data;
};
return data;
}
function _utils() {
const data = require("@parcel/utils");
_utils = function () {
return data;
};
return data;
}
function _logger() {
const data = _interopRequireDefault(require("@parcel/logger"));
_logger = function () {
return data;
};
return data;
}
function _diagnostic() {
const data = require("@parcel/diagnostic");
_diagnostic = function () {
return data;
};
return data;
}
var _types = require("./types");
var _projectPath = require("./projectPath");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function propagateSymbols({
options,
assetGraph,
changedAssetsPropagation,
assetGroupsWithRemovedParents,
previousErrors
}) {
let changedAssets = new Set([...changedAssetsPropagation].map(id => assetGraph.getNodeIdByContentKey(id)));
// To reorder once at the end
let changedDeps = new Set();
// For the down traversal, the nodes with `usedSymbolsDownDirty = true` are exactly
// `changedAssetsPropagation` (= asset and therefore potentially dependencies changed) or the
// asset children of `assetGroupsWithRemovedParents` (= fewer incoming dependencies causing less
// used symbols).
//
// The up traversal has to consider all nodes that changed in the down traversal
// (`useSymbolsUpDirtyDown = true`) which are listed in `changedDepsUsedSymbolsUpDirtyDown`
// (more or less requested symbols) and in `changedAssetsPropagation` (changing an asset might
// change exports).
// The dependencies that changed in the down traversal causing an update in the up traversal.
let changedDepsUsedSymbolsUpDirtyDown = new Set();
// Propagate the requested symbols down from the root to the leaves
propagateSymbolsDown(assetGraph, changedAssets, assetGroupsWithRemovedParents, (assetNode, incomingDeps, outgoingDeps) => {
// exportSymbol -> identifier
let assetSymbols = assetNode.value.symbols;
// identifier -> exportSymbol
let assetSymbolsInverse;
if (assetSymbols) {
assetSymbolsInverse = new Map();
for (let [s, {
local
}] of assetSymbols) {
let set = assetSymbolsInverse.get(local);
if (!set) {
set = new Set();
assetSymbolsInverse.set(local, set);
}
set.add(s);
}
}
let hasNamespaceOutgoingDeps = outgoingDeps.some(d => {
var _d$value$symbols;
return ((_d$value$symbols = d.value.symbols) === null || _d$value$symbols === void 0 || (_d$value$symbols = _d$value$symbols.get('*')) === null || _d$value$symbols === void 0 ? void 0 : _d$value$symbols.local) === '*';
});
// 1) Determine what the incomingDeps requests from the asset
// ----------------------------------------------------------
let isEntry = false;
let addAll = false;
// Used symbols that are exported or reexported (symbol will be removed again later) by asset.
assetNode.usedSymbols = new Set();
// Symbols that have to be namespace reexported by outgoingDeps.
let namespaceReexportedSymbols = new Set();
if (incomingDeps.length === 0) {
// Root in the runtimes Graph
assetNode.usedSymbols.add('*');
namespaceReexportedSymbols.add('*');
} else {
for (let incomingDep of incomingDeps) {
if (incomingDep.value.symbols == null) {
if (incomingDep.value.sourceAssetId == null) {
// The root dependency on non-library builds
isEntry = true;
} else {
// A regular dependency with cleared symbols
addAll = true;
}
continue;
}
for (let exportSymbol of incomingDep.usedSymbolsDown) {
if (exportSymbol === '*') {
assetNode.usedSymbols.add('*');
namespaceReexportedSymbols.add('*');
}
if (!assetSymbols || assetSymbols.has(exportSymbol) || assetSymbols.has('*')) {
// An own symbol or a non-namespace reexport
assetNode.usedSymbols.add(exportSymbol);
}
// A namespace reexport
// (but only if we actually have namespace-exporting outgoing dependencies,
// This usually happens with a reexporting asset with many namespace exports which means that
// we cannot match up the correct asset with the used symbol at this level.)
else if (hasNamespaceOutgoingDeps && exportSymbol !== 'default') {
namespaceReexportedSymbols.add(exportSymbol);
}
}
}
}
// Incomding dependency with cleared symbols, add everything
if (addAll) {
assetSymbols === null || assetSymbols === void 0 || assetSymbols.forEach((_, exportSymbol) => assetNode.usedSymbols.add(exportSymbol));
}
// 2) Distribute the symbols to the outgoing dependencies
// ----------------------------------------------------------
for (let dep of outgoingDeps) {
let depUsedSymbolsDownOld = dep.usedSymbolsDown;
let depUsedSymbolsDown = new Set();
dep.usedSymbolsDown = depUsedSymbolsDown;
if (assetNode.value.sideEffects ||
// Incoming dependency with cleared symbols
addAll ||
// For entries, we still need to add dep.value.symbols of the entry (which are "used" but not according to the symbols data)
isEntry ||
// If not a single symbol is used, we can say the entire subgraph is not used.
// This is e.g. needed when some symbol is imported and then used for a export which isn't used (= "semi-weak" reexport)
// index.js: `import {bar} from "./lib"; ...`
// lib/index.js: `export * from "./foo.js"; export * from "./bar.js";`
// lib/foo.js: `import { data } from "./bar.js"; export const foo = data + " esm2";`
assetNode.usedSymbols.size > 0 || namespaceReexportedSymbols.size > 0) {
var _depSymbols$get;
let depSymbols = dep.value.symbols;
if (!depSymbols) continue;
if (((_depSymbols$get = depSymbols.get('*')) === null || _depSymbols$get === void 0 ? void 0 : _depSymbols$get.local) === '*') {
if (addAll) {
depUsedSymbolsDown.add('*');
} else {
for (let s of namespaceReexportedSymbols) {
// We need to propagate the namespaceReexportedSymbols to all namespace dependencies (= even wrong ones because we don't know yet)
depUsedSymbolsDown.add(s);
}
}
}
for (let [symbol, {
local
}] of depSymbols) {
var _depSymbols$get2;
// Was already handled above
if (local === '*') continue;
if (!assetSymbolsInverse || !((_depSymbols$get2 = depSymbols.get(symbol)) !== null && _depSymbols$get2 !== void 0 && _depSymbols$get2.isWeak)) {
// Bailout or non-weak symbol (= used in the asset itself = not a reexport)
depUsedSymbolsDown.add(symbol);
} else {
let reexportedExportSymbols = assetSymbolsInverse.get(local);
if (reexportedExportSymbols == null) {
// not reexported = used in asset itself
depUsedSymbolsDown.add(symbol);
} else if (assetNode.usedSymbols.has('*')) {
// we need everything
depUsedSymbolsDown.add(symbol);
[...reexportedExportSymbols].forEach(s => assetNode.usedSymbols.delete(s));
} else {
let usedReexportedExportSymbols = [...reexportedExportSymbols].filter(s => assetNode.usedSymbols.has(s));
if (usedReexportedExportSymbols.length > 0) {
// The symbol is indeed a reexport, so it's not used from the asset itself
depUsedSymbolsDown.add(symbol);
usedReexportedExportSymbols.forEach(s => assetNode.usedSymbols.delete(s));
}
}
}
}
} else {
depUsedSymbolsDown.clear();
}
if (!(0, _utils().setEqual)(depUsedSymbolsDownOld, depUsedSymbolsDown)) {
dep.usedSymbolsDownDirty = true;
dep.usedSymbolsUpDirtyDown = true;
changedDepsUsedSymbolsUpDirtyDown.add(dep.id);
}
if (dep.usedSymbolsUpDirtyDown) {
// Set on node creation
changedDepsUsedSymbolsUpDirtyDown.add(dep.id);
}
}
});
const logFallbackNamespaceInsertion = (assetNode, symbol, depNode1, depNode2) => {
if (options.logLevel === 'verbose') {
_logger().default.warn({
message: `${(0, _projectPath.fromProjectPathRelative)(assetNode.value.filePath)} reexports "${symbol}", which could be resolved either to the dependency "${depNode1.value.specifier}" or "${depNode2.value.specifier}" at runtime. Adding a namespace object to fall back on.`,
origin: '@parcel/core'
});
}
};
// Because namespace reexports introduce ambiguity, go up the graph from the leaves to the
// root and remove requested symbols that aren't actually exported
let errors = propagateSymbolsUp(assetGraph, changedAssets, changedDepsUsedSymbolsUpDirtyDown, previousErrors, (assetNode, incomingDeps, outgoingDeps) => {
let assetSymbols = assetNode.value.symbols;
let assetSymbolsInverse = null;
if (assetSymbols) {
assetSymbolsInverse = new Map();
for (let [s, {
local
}] of assetSymbols) {
let set = assetSymbolsInverse.get(local);
if (!set) {
set = new Set();
assetSymbolsInverse.set(local, set);
}
set.add(s);
}
}
// the symbols that are reexported (not used in `asset`) -> asset they resolved to
let reexportedSymbols = new Map();
// the symbols that are reexported (not used in `asset`) -> the corresponding outgoingDep(s)
// To generate the diagnostic when there are multiple dependencies with non-statically
// analyzable exports
let reexportedSymbolsSource = new Map();
for (let outgoingDep of outgoingDeps) {
var _outgoingDepSymbols$g;
let outgoingDepSymbols = outgoingDep.value.symbols;
if (!outgoingDepSymbols) continue;
let isExcluded = assetGraph.getNodeIdsConnectedFrom(assetGraph.getNodeIdByContentKey(outgoingDep.id)).length === 0;
// excluded, assume everything that is requested exists
if (isExcluded) {
outgoingDep.usedSymbolsDown.forEach((_, s) => outgoingDep.usedSymbolsUp.set(s, null));
}
if (((_outgoingDepSymbols$g = outgoingDepSymbols.get('*')) === null || _outgoingDepSymbols$g === void 0 ? void 0 : _outgoingDepSymbols$g.local) === '*') {
outgoingDep.usedSymbolsUp.forEach((sResolved, s) => {
if (s === 'default') {
return;
}
// If the symbol could come from multiple assets at runtime, assetNode's
// namespace will be needed at runtime to perform the lookup on.
if (reexportedSymbols.has(s)) {
if (!assetNode.usedSymbols.has('*')) {
logFallbackNamespaceInsertion(assetNode, s, (0, _nullthrows().default)(reexportedSymbolsSource.get(s)), outgoingDep);
}
assetNode.usedSymbols.add('*');
reexportedSymbols.set(s, {
asset: assetNode.id,
symbol: s
});
} else {
reexportedSymbols.set(s, sResolved);
reexportedSymbolsSource.set(s, outgoingDep);
}
});
}
for (let [s, sResolved] of outgoingDep.usedSymbolsUp) {
var _outgoingDepSymbols$g2, _assetSymbolsInverse;
if (!outgoingDep.usedSymbolsDown.has(s)) {
// usedSymbolsDown is a superset of usedSymbolsUp
continue;
}
let local = (_outgoingDepSymbols$g2 = outgoingDepSymbols.get(s)) === null || _outgoingDepSymbols$g2 === void 0 ? void 0 : _outgoingDepSymbols$g2.local;
if (local == null) {
// Caused by '*' => '*', already handled
continue;
}
let reexported = (_assetSymbolsInverse = assetSymbolsInverse) === null || _assetSymbolsInverse === void 0 ? void 0 : _assetSymbolsInverse.get(local);
if (reexported != null) {
reexported.forEach(s => {
// see same code above
if (reexportedSymbols.has(s)) {
if (!assetNode.usedSymbols.has('*')) {
logFallbackNamespaceInsertion(assetNode, s, (0, _nullthrows().default)(reexportedSymbolsSource.get(s)), outgoingDep);
}
assetNode.usedSymbols.add('*');
reexportedSymbols.set(s, {
asset: assetNode.id,
symbol: s
});
} else {
reexportedSymbols.set(s, sResolved);
reexportedSymbolsSource.set(s, outgoingDep);
}
});
}
}
}
let errors = [];
function usedSymbolsUpAmbiguous(old, current, s, value) {
if (old.has(s)) {
let valueOld = old.get(s);
if (valueOld !== value && !((valueOld === null || valueOld === void 0 ? void 0 : valueOld.asset) === value.asset && (valueOld === null || valueOld === void 0 ? void 0 : valueOld.symbol) === value.symbol)) {
// The dependency points to multiple assets (via an asset group).
current.set(s, undefined);
return;
}
}
current.set(s, value);
}
for (let incomingDep of incomingDeps) {
var _incomingDepSymbols$g;
let incomingDepUsedSymbolsUpOld = incomingDep.usedSymbolsUp;
incomingDep.usedSymbolsUp = new Map();
let incomingDepSymbols = incomingDep.value.symbols;
if (!incomingDepSymbols) continue;
let hasNamespaceReexport = ((_incomingDepSymbols$g = incomingDepSymbols.get('*')) === null || _incomingDepSymbols$g === void 0 ? void 0 : _incomingDepSymbols$g.local) === '*';
for (let s of incomingDep.usedSymbolsDown) {
if (assetSymbols == null ||
// Assume everything could be provided if symbols are cleared
assetNode.value.bundleBehavior === _types.BundleBehavior.isolated || assetNode.value.bundleBehavior === _types.BundleBehavior.inline || s === '*' || assetNode.usedSymbols.has(s)) {
usedSymbolsUpAmbiguous(incomingDepUsedSymbolsUpOld, incomingDep.usedSymbolsUp, s, {
asset: assetNode.id,
symbol: s
});
} else if (reexportedSymbols.has(s)) {
let reexport = reexportedSymbols.get(s);
let v =
// Forward a reexport only if the current asset is side-effect free and not external
!assetNode.value.sideEffects && reexport != null ? reexport : {
asset: assetNode.id,
symbol: s
};
usedSymbolsUpAmbiguous(incomingDepUsedSymbolsUpOld, incomingDep.usedSymbolsUp, s, v);
} else if (!hasNamespaceReexport) {
var _incomingDep$value$sy, _fromProjectPath, _incomingDep$value$so;
let loc = (_incomingDep$value$sy = incomingDep.value.symbols) === null || _incomingDep$value$sy === void 0 || (_incomingDep$value$sy = _incomingDep$value$sy.get(s)) === null || _incomingDep$value$sy === void 0 ? void 0 : _incomingDep$value$sy.loc;
let [resolutionNodeId] = assetGraph.getNodeIdsConnectedFrom(assetGraph.getNodeIdByContentKey(incomingDep.id));
let resolution = (0, _nullthrows().default)(assetGraph.getNode(resolutionNodeId));
(0, _assert().default)(resolution && (resolution.type === 'asset_group' || resolution.type === 'asset'));
errors.push({
message: (0, _diagnostic().md)`${(0, _projectPath.fromProjectPathRelative)(resolution.value.filePath)} does not export '${s}'`,
origin: '@parcel/core',
codeFrames: loc ? [{
filePath: (_fromProjectPath = (0, _projectPath.fromProjectPath)(options.projectRoot, loc === null || loc === void 0 ? void 0 : loc.filePath)) !== null && _fromProjectPath !== void 0 ? _fromProjectPath : undefined,
language: (_incomingDep$value$so = incomingDep.value.sourceAssetType) !== null && _incomingDep$value$so !== void 0 ? _incomingDep$value$so : undefined,
codeHighlights: [(0, _diagnostic().convertSourceLocationToHighlight)(loc)]
}] : undefined
});
}
}
if (!equalMap(incomingDepUsedSymbolsUpOld, incomingDep.usedSymbolsUp)) {
changedDeps.add(incomingDep);
incomingDep.usedSymbolsUpDirtyUp = true;
}
incomingDep.excluded = false;
if (incomingDep.value.symbols != null && incomingDep.usedSymbolsUp.size === 0) {
let assetGroups = assetGraph.getNodeIdsConnectedFrom(assetGraph.getNodeIdByContentKey(incomingDep.id));
if (assetGroups.length === 1) {
let [assetGroupId] = assetGroups;
let assetGroup = (0, _nullthrows().default)(assetGraph.getNode(assetGroupId));
if (assetGroup.type === 'asset_group' && assetGroup.value.sideEffects === false) {
incomingDep.excluded = true;
}
} else {
(0, _assert().default)(assetGroups.length === 0);
}
}
}
return errors;
});
// Sort usedSymbolsUp so they are a consistent order across builds.
// This ensures a consistent ordering of these symbols when packaging.
// See https://github.com/parcel-bundler/parcel/pull/8212
for (let dep of changedDeps) {
dep.usedSymbolsUp = new Map([...dep.usedSymbolsUp].sort(([a], [b]) => a.localeCompare(b)));
}
return errors;
}
function propagateSymbolsDown(assetGraph, changedAssets, assetGroupsWithRemovedParents, visit) {
if (changedAssets.size === 0 && assetGroupsWithRemovedParents.size === 0) {
return;
}
// We care about changed assets and their changed dependencies. So start with the first changed
// asset or dependency and continue while the symbols change. If the queue becomes empty,
// continue with the next unvisited changed asset.
//
// In the end, nodes, which are neither listed in changedAssets nor in
// assetGroupsWithRemovedParents nor reached via a dirty flag, don't have to be visited at all.
//
// In the worst case, some nodes have to be revisited because we don't want to sort the assets
// into topological order. For example in a diamond graph where the join point is visited twice
// via each parent (the numbers signifiying the order of re/visiting, `...` being unvisited).
// However, this only continues as long as there are changes in the used symbols that influence
// child nodes.
//
// |
// ...
// / \
// 1 4
// \ /
// 2+5
// |
// 3+6
// |
// ...
// |
//
let unreachedAssets = new Set([...changedAssets, ...assetGroupsWithRemovedParents]);
let queue = new Set([setPop(unreachedAssets)]);
while (queue.size > 0) {
let queuedNodeId = setPop(queue);
unreachedAssets.delete(queuedNodeId);
let outgoing = assetGraph.getNodeIdsConnectedFrom(queuedNodeId);
let node = (0, _nullthrows().default)(assetGraph.getNode(queuedNodeId));
let wasNodeDirty = false;
if (node.type === 'dependency' || node.type === 'asset_group') {
wasNodeDirty = node.usedSymbolsDownDirty;
node.usedSymbolsDownDirty = false;
} else if (node.type === 'asset' && node.usedSymbolsDownDirty) {
visit(node, assetGraph.getIncomingDependencies(node.value).map(d => {
let dep = assetGraph.getNodeByContentKey(d.id);
(0, _assert().default)(dep && dep.type === 'dependency');
return dep;
}), outgoing.map(dep => {
let depNode = (0, _nullthrows().default)(assetGraph.getNode(dep));
(0, _assert().default)(depNode.type === 'dependency');
return depNode;
}));
node.usedSymbolsDownDirty = false;
}
for (let child of outgoing) {
let childNode = (0, _nullthrows().default)(assetGraph.getNode(child));
let childDirty = false;
if ((childNode.type === 'asset' || childNode.type === 'asset_group') && wasNodeDirty) {
childNode.usedSymbolsDownDirty = true;
childDirty = true;
} else if (childNode.type === 'dependency') {
childDirty = childNode.usedSymbolsDownDirty;
}
if (childDirty) {
queue.add(child);
}
}
if (queue.size === 0 && unreachedAssets.size > 0) {
queue.add(setPop(unreachedAssets));
}
}
}
function propagateSymbolsUp(assetGraph, changedAssets, changedDepsUsedSymbolsUpDirtyDown, previousErrors, visit) {
var _dirtyDeps;
// For graphs in general (so with cyclic dependencies), some nodes will have to be revisited. So
// run a regular queue-based BFS for anything that's still dirty.
//
// (Previously, there was first a recursive post-order DFS, with the idea that all children of a
// node should be processed first. With a tree, this would result in a minimal amount of work by
// processing every asset exactly once and then the remaining cycles would have been handled
// with the loop. This was slightly faster for initial builds but had O(project) instead of
// O(changes).)
let errors = previousErrors ?
// Some nodes might have been removed since the last build
new Map([...previousErrors].filter(([n]) => assetGraph.hasNode(n))) : new Map();
let changedDepsUsedSymbolsUpDirtyDownAssets = new Set([...[...changedDepsUsedSymbolsUpDirtyDown].reverse().flatMap(id => getDependencyResolution(assetGraph, id)), ...changedAssets]);
// Do a more efficient full traversal (less recomputations) if more than half of the assets
// changed.
let runFullPass =
// If there are n nodes in the graph, then the asset count is approximately
// n/6 (for every asset, there are ~4 dependencies and ~1 asset_group).
assetGraph.nodes.length * (1 / 6) * 0.5 < changedDepsUsedSymbolsUpDirtyDownAssets.size;
let dirtyDeps;
if (runFullPass) {
dirtyDeps = new Set();
let rootNodeId = (0, _nullthrows().default)(assetGraph.rootNodeId, 'A root node is required to traverse');
const nodeVisitor = nodeId => {
let node = (0, _nullthrows().default)(assetGraph.getNode(nodeId));
let outgoing = assetGraph.getNodeIdsConnectedFrom(nodeId);
for (let childId of outgoing) {
let child = (0, _nullthrows().default)(assetGraph.getNode(childId));
if (node.type === 'asset') {
(0, _assert().default)(child.type === 'dependency');
if (child.usedSymbolsUpDirtyUp) {
node.usedSymbolsUpDirty = true;
child.usedSymbolsUpDirtyUp = false;
}
}
}
if (node.type === 'asset') {
let incoming = assetGraph.getIncomingDependencies(node.value).map(d => {
let n = assetGraph.getNodeByContentKey(d.id);
(0, _assert().default)(n && n.type === 'dependency');
return n;
});
for (let dep of incoming) {
if (dep.usedSymbolsUpDirtyDown) {
dep.usedSymbolsUpDirtyDown = false;
node.usedSymbolsUpDirty = true;
}
}
if (node.usedSymbolsUpDirty) {
let e = visit(node, incoming, outgoing.map(depNodeId => {
let depNode = (0, _nullthrows().default)(assetGraph.getNode(depNodeId));
(0, _assert().default)(depNode.type === 'dependency');
return depNode;
}));
if (e.length > 0) {
node.usedSymbolsUpDirty = true;
errors.set(nodeId, e);
} else {
node.usedSymbolsUpDirty = false;
errors.delete(nodeId);
}
}
} else {
if (node.type === 'dependency') {
if (node.usedSymbolsUpDirtyUp) {
dirtyDeps.add(nodeId);
} else {
dirtyDeps.delete(nodeId);
}
}
}
};
assetGraph.postOrderDfsFast(nodeVisitor, rootNodeId);
}
let queue = (_dirtyDeps = dirtyDeps) !== null && _dirtyDeps !== void 0 ? _dirtyDeps : changedDepsUsedSymbolsUpDirtyDownAssets;
while (queue.size > 0) {
let queuedNodeId = setPop(queue);
let node = (0, _nullthrows().default)(assetGraph.getNode(queuedNodeId));
if (node.type === 'asset') {
let incoming = assetGraph.getIncomingDependencies(node.value).map(dep => {
let depNode = assetGraph.getNodeByContentKey(dep.id);
(0, _assert().default)(depNode && depNode.type === 'dependency');
return depNode;
});
for (let dep of incoming) {
if (dep.usedSymbolsUpDirtyDown) {
dep.usedSymbolsUpDirtyDown = false;
node.usedSymbolsUpDirty = true;
}
}
let outgoing = assetGraph.getNodeIdsConnectedFrom(queuedNodeId).map(depNodeId => {
let depNode = (0, _nullthrows().default)(assetGraph.getNode(depNodeId));
(0, _assert().default)(depNode.type === 'dependency');
return depNode;
});
for (let dep of outgoing) {
if (dep.usedSymbolsUpDirtyUp) {
node.usedSymbolsUpDirty = true;
dep.usedSymbolsUpDirtyUp = false;
}
}
if (node.usedSymbolsUpDirty) {
let e = visit(node, incoming, outgoing);
if (e.length > 0) {
node.usedSymbolsUpDirty = true;
errors.set(queuedNodeId, e);
} else {
node.usedSymbolsUpDirty = false;
errors.delete(queuedNodeId);
}
}
for (let i of incoming) {
if (i.usedSymbolsUpDirtyUp) {
queue.add(assetGraph.getNodeIdByContentKey(i.id));
}
}
} else {
let connectedNodes = assetGraph.getNodeIdsConnectedTo(queuedNodeId);
if (connectedNodes.length > 0) {
queue.add(...connectedNodes);
}
}
}
return errors;
}
function getDependencyResolution(graph, depId) {
let depNodeId = graph.getNodeIdByContentKey(depId);
let connected = graph.getNodeIdsConnectedFrom(depNodeId);
(0, _assert().default)(connected.length <= 1);
let child = connected[0];
if (child) {
let childNode = (0, _nullthrows().default)(graph.getNode(child));
if (childNode.type === 'asset_group') {
return graph.getNodeIdsConnectedFrom(child);
} else {
return [child];
}
}
return [];
}
function equalMap(a, b) {
if (a.size !== b.size) return false;
for (let [k, v] of a) {
if (!b.has(k)) return false;
let vB = b.get(k);
if ((vB === null || vB === void 0 ? void 0 : vB.asset) !== (v === null || v === void 0 ? void 0 : v.asset) || (vB === null || vB === void 0 ? void 0 : vB.symbol) !== (v === null || v === void 0 ? void 0 : v.symbol)) return false;
}
return true;
}
function setPop(set) {
let v = (0, _nullthrows().default)(set.values().next().value);
set.delete(v);
return v;
}

View File

@@ -0,0 +1,115 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.PACKAGE_DESCRIPTOR_SCHEMA = exports.ENGINES_SCHEMA = exports.DESCRIPTOR_SCHEMA = exports.COMMON_TARGET_DESCRIPTOR_SCHEMA = void 0;
const ENGINES_SCHEMA = exports.ENGINES_SCHEMA = {
type: 'object',
properties: {
browsers: {
oneOf: [{
type: 'array',
items: {
type: 'string'
}
}, {
type: 'string'
}]
}
},
__forbiddenProperties: ['browser'],
additionalProperties: {
type: 'string'
}
};
const PACKAGE_DESCRIPTOR_SCHEMA = exports.PACKAGE_DESCRIPTOR_SCHEMA = {
type: 'object',
properties: {
context: {
type: 'string',
enum: ['node', 'browser', 'web-worker', 'electron-main', 'electron-renderer', 'service-worker']
},
includeNodeModules: {
oneOf: [{
type: 'boolean'
}, {
type: 'array',
items: {
type: 'string',
__type: 'a wildcard or filepath'
}
}, {
type: 'object',
properties: {},
additionalProperties: {
type: 'boolean'
}
}]
},
outputFormat: {
type: 'string',
enum: ['global', 'esmodule', 'commonjs']
},
distDir: {
type: 'string'
},
publicUrl: {
type: 'string'
},
isLibrary: {
type: 'boolean'
},
source: {
oneOf: [{
type: 'string'
}, {
type: 'array',
items: {
type: 'string'
}
}]
},
sourceMap: {
oneOf: [{
type: 'boolean'
}, {
type: 'object',
properties: {
inlineSources: {
type: 'boolean'
},
sourceRoot: {
type: 'string'
},
inline: {
type: 'boolean'
}
},
additionalProperties: false
}]
},
engines: ENGINES_SCHEMA,
optimize: {
type: 'boolean'
},
scopeHoist: {
type: 'boolean'
}
},
additionalProperties: false
};
const DESCRIPTOR_SCHEMA = exports.DESCRIPTOR_SCHEMA = {
...PACKAGE_DESCRIPTOR_SCHEMA,
properties: {
...PACKAGE_DESCRIPTOR_SCHEMA.properties,
distEntry: {
type: 'string'
}
}
};
const COMMON_TARGET_DESCRIPTOR_SCHEMA = exports.COMMON_TARGET_DESCRIPTOR_SCHEMA = {
oneOf: [PACKAGE_DESCRIPTOR_SCHEMA, {
enum: [false]
}]
};

View File

@@ -0,0 +1,538 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
function _path() {
const data = _interopRequireDefault(require("path"));
_path = function () {
return data;
};
return data;
}
function _stream() {
const data = require("stream");
_stream = function () {
return data;
};
return data;
}
function _nullthrows() {
const data = _interopRequireDefault(require("nullthrows"));
_nullthrows = function () {
return data;
};
return data;
}
function _logger() {
const data = _interopRequireWildcard(require("@parcel/logger"));
_logger = function () {
return data;
};
return data;
}
function _diagnostic() {
const data = _interopRequireWildcard(require("@parcel/diagnostic"));
_diagnostic = function () {
return data;
};
return data;
}
function _utils() {
const data = require("@parcel/utils");
_utils = function () {
return data;
};
return data;
}
function _rust() {
const data = require("@parcel/rust");
_rust = function () {
return data;
};
return data;
}
var _Dependency = require("./Dependency");
var _ParcelConfig = _interopRequireDefault(require("./ParcelConfig"));
var _PathRequest = require("./requests/PathRequest");
var _Asset = require("./public/Asset");
var _UncommittedAsset = _interopRequireDefault(require("./UncommittedAsset"));
var _assetUtils = require("./assetUtils");
var _summarizeRequest = _interopRequireDefault(require("./summarizeRequest"));
var _PluginOptions = _interopRequireDefault(require("./public/PluginOptions"));
var _utils2 = require("./utils");
var _InternalConfig = require("./InternalConfig");
var _ConfigRequest = require("./requests/ConfigRequest");
var _DevDepRequest = require("./requests/DevDepRequest");
var _projectPath = require("./projectPath");
function _assert() {
const data = _interopRequireDefault(require("assert"));
_assert = function () {
return data;
};
return data;
}
function _profiler() {
const data = require("@parcel/profiler");
_profiler = function () {
return data;
};
return data;
}
function _getRequireWildcardCache(e) { if ("function" != typeof WeakMap) return null; var r = new WeakMap(), t = new WeakMap(); return (_getRequireWildcardCache = function (e) { return e ? t : r; })(e); }
function _interopRequireWildcard(e, r) { if (!r && e && e.__esModule) return e; if (null === e || "object" != typeof e && "function" != typeof e) return { default: e }; var t = _getRequireWildcardCache(r); if (t && t.has(e)) return t.get(e); var n = { __proto__: null }, a = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var u in e) if ("default" !== u && Object.prototype.hasOwnProperty.call(e, u)) { var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; } return n.default = e, t && t.set(e, n), n; }
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
// TODO: eventually call path request as sub requests
class Transformation {
constructor({
request,
options,
config,
workerApi
}) {
this.configs = new Map();
this.parcelConfig = config;
this.options = options;
this.request = request;
this.workerApi = workerApi;
this.invalidations = (0, _utils2.createInvalidations)();
this.devDepRequests = new Map();
this.pluginDevDeps = [];
this.resolverRunner = new _PathRequest.ResolverRunner({
config,
options,
previousDevDeps: request.devDeps
});
this.pluginOptions = new _PluginOptions.default((0, _utils2.optionsProxy)(this.options, option => {
this.invalidations.invalidateOnOptionChange.add(option);
}, devDep => {
this.pluginDevDeps.push(devDep);
}));
}
async run() {
let asset = await this.loadAsset();
let existing;
if (!asset.mapBuffer && _utils().SOURCEMAP_EXTENSIONS.has(asset.value.type)) {
// Load existing sourcemaps, this automatically runs the source contents extraction
try {
existing = await asset.loadExistingSourcemap();
} catch (err) {
_logger().default.verbose([{
origin: '@parcel/core',
message: (0, _diagnostic().md)`Could not load existing source map for ${(0, _projectPath.fromProjectPathRelative)(asset.value.filePath)}`
}, {
origin: '@parcel/core',
message: (0, _diagnostic().escapeMarkdown)(err.message)
}]);
}
}
if (existing == null &&
// Don't buffer an entire stream into memory since it may not need sourceContent,
// e.g. large binary files
!(asset.content instanceof _stream().Readable)) {
// If no existing sourcemap was found, initialize asset.sourceContent
// with the original contents. This will be used when the transformer
// calls setMap to ensure the source content is in the sourcemap.
asset.sourceContent = await asset.getCode();
}
(0, _DevDepRequest.invalidateDevDeps)(this.request.invalidDevDeps, this.options, this.parcelConfig);
let pipeline = await this.loadPipeline(this.request.filePath, asset.value.isSource, asset.value.pipeline);
let assets, error;
try {
let results = await this.runPipelines(pipeline, asset);
await Promise.all(results.map(asset => asset.commit()));
assets = results.map(a => a.value);
} catch (e) {
error = e;
}
let configRequests = (0, _ConfigRequest.getConfigRequests)([...this.configs.values(), ...this.resolverRunner.configs.values()]);
let devDepRequests = (0, _DevDepRequest.getWorkerDevDepRequests)([...this.devDepRequests.values(), ...this.resolverRunner.devDepRequests.values()]);
// $FlowFixMe because of $$raw
return {
$$raw: true,
assets,
configRequests,
// When throwing an error, this (de)serialization is done automatically by the WorkerFarm
error: error ? (0, _diagnostic().anyToDiagnostic)(error) : undefined,
invalidations: this.invalidations,
devDepRequests
};
}
async loadAsset() {
let {
filePath,
env,
code,
pipeline,
isSource: isSourceOverride,
sideEffects,
query
} = this.request;
let {
content,
size,
isSource: summarizedIsSource
} = await (0, _summarizeRequest.default)(this.options.inputFS, {
filePath: (0, _projectPath.fromProjectPath)(this.options.projectRoot, filePath),
code
});
// Prefer `isSource` originating from the AssetRequest.
let isSource = isSourceOverride !== null && isSourceOverride !== void 0 ? isSourceOverride : summarizedIsSource;
// If the transformer request passed code, use a hash in addition
// to the filename as the base for the id to ensure it is unique.
let idBase = (0, _projectPath.fromProjectPathRelative)(filePath);
if (code != null) {
idBase += (0, _rust().hashString)(code);
}
return new _UncommittedAsset.default({
idBase,
value: (0, _assetUtils.createAsset)(this.options.projectRoot, {
idBase,
filePath,
isSource,
type: _path().default.extname((0, _projectPath.fromProjectPathRelative)(filePath)).slice(1),
pipeline,
env,
query,
stats: {
time: 0,
size
},
sideEffects
}),
options: this.options,
content,
invalidations: this.invalidations
});
}
async runPipelines(pipeline, initialAsset) {
var _await$pipeline$postP;
let initialType = initialAsset.value.type;
let assets;
try {
assets = await this.runPipeline(pipeline, initialAsset);
} finally {
// Add dev dep requests for each transformer
for (let transformer of pipeline.transformers) {
await this.addDevDependency({
specifier: transformer.name,
resolveFrom: transformer.resolveFrom,
range: transformer.range
});
}
// Add dev dep requests for dependencies of transformer plugins
// (via proxied packageManager.require calls).
for (let devDep of this.pluginDevDeps) {
await this.addDevDependency(devDep);
}
}
let finalAssets = [];
for (let asset of assets) {
let nextPipeline;
if (asset.value.type !== initialType) {
nextPipeline = await this.loadNextPipeline({
filePath: initialAsset.value.filePath,
isSource: asset.value.isSource,
newType: asset.value.type,
newPipeline: asset.value.pipeline,
currentPipeline: pipeline
});
}
if (nextPipeline) {
let nextPipelineAssets = await this.runPipelines(nextPipeline, asset);
finalAssets = finalAssets.concat(nextPipelineAssets);
} else {
finalAssets.push(asset);
}
}
if (!pipeline.postProcess) {
return finalAssets;
}
(0, _assert().default)(pipeline.postProcess != null);
let processedFinalAssets = (_await$pipeline$postP = await pipeline.postProcess(finalAssets)) !== null && _await$pipeline$postP !== void 0 ? _await$pipeline$postP : [];
return processedFinalAssets;
}
async addDevDependency(opts) {
let {
specifier,
resolveFrom,
range
} = opts;
let key = `${specifier}:${(0, _projectPath.fromProjectPathRelative)(resolveFrom)}`;
if (this.devDepRequests.has(key)) {
return;
}
// Ensure that the package manager has an entry for this resolution.
try {
await this.options.packageManager.resolve(specifier, (0, _projectPath.fromProjectPath)(this.options.projectRoot, opts.resolveFrom), {
range
});
} catch (err) {
// ignore
}
let devDepRequest = await (0, _DevDepRequest.createDevDependency)(opts, this.request.devDeps, this.options);
this.devDepRequests.set(key, devDepRequest);
}
async runPipeline(pipeline, initialAsset) {
if (pipeline.transformers.length === 0) {
return [initialAsset];
}
let initialType = initialAsset.value.type;
let inputAssets = [initialAsset];
let resultingAssets = [];
let finalAssets = [];
for (let transformer of pipeline.transformers) {
resultingAssets = [];
for (let asset of inputAssets) {
if (asset.value.type !== initialType && (await this.loadNextPipeline({
filePath: initialAsset.value.filePath,
isSource: asset.value.isSource,
newType: asset.value.type,
newPipeline: asset.value.pipeline,
currentPipeline: pipeline
}))) {
finalAssets.push(asset);
continue;
}
try {
const measurement = _profiler().tracer.createMeasurement(transformer.name, 'transform', (0, _projectPath.fromProjectPathRelative)(initialAsset.value.filePath));
let transformerResults = await this.runTransformer(pipeline, asset, transformer.plugin, transformer.name, transformer.config, transformer.configKeyPath, this.parcelConfig);
measurement && measurement.end();
for (let result of transformerResults) {
if (result instanceof _UncommittedAsset.default) {
resultingAssets.push(result);
continue;
}
resultingAssets.push(asset.createChildAsset(result, transformer.name, this.parcelConfig.filePath, transformer.configKeyPath));
}
} catch (e) {
let diagnostic = (0, _diagnostic().errorToDiagnostic)(e, {
origin: transformer.name,
filePath: (0, _projectPath.fromProjectPath)(this.options.projectRoot, asset.value.filePath)
});
// If this request is a virtual asset that might not exist on the filesystem,
// add the `code` property to each code frame in the diagnostics that match the
// request's filepath. This can't be done by the transformer because it might not
// have access to the original code (e.g. an inline script tag in HTML).
if (this.request.code != null) {
for (let d of diagnostic) {
if (d.codeFrames) {
for (let codeFrame of d.codeFrames) {
if (codeFrame.code == null && codeFrame.filePath === this.request.filePath) {
codeFrame.code = this.request.code;
}
}
}
}
}
throw new (_diagnostic().default)({
diagnostic
});
}
}
inputAssets = resultingAssets;
}
// Make assets with ASTs generate unless they are CSS modules. This parallelizes generation
// and distributes work more evenly across workers than if one worker needed to
// generate all assets in a large bundle during packaging.
await Promise.all(resultingAssets.filter(asset => asset.ast != null && !(this.options.mode === 'production' && asset.value.type === 'css' && asset.value.symbols)).map(async asset => {
if (asset.isASTDirty && asset.generate) {
var _output$map;
let output = await asset.generate();
asset.content = output.content;
asset.mapBuffer = (_output$map = output.map) === null || _output$map === void 0 ? void 0 : _output$map.toBuffer();
}
asset.clearAST();
}));
return finalAssets.concat(resultingAssets);
}
async loadPipeline(filePath, isSource, pipeline) {
let transformers = await this.parcelConfig.getTransformers(filePath, pipeline, this.request.isURL);
for (let transformer of transformers) {
let config = await this.loadTransformerConfig(transformer, isSource);
if (config) {
this.configs.set(transformer.name, config);
}
}
return {
id: transformers.map(t => t.name).join(':'),
transformers: transformers.map(transformer => {
var _this$configs$get;
return {
name: transformer.name,
resolveFrom: transformer.resolveFrom,
config: (_this$configs$get = this.configs.get(transformer.name)) === null || _this$configs$get === void 0 ? void 0 : _this$configs$get.result,
configKeyPath: transformer.keyPath,
plugin: transformer.plugin
};
}),
options: this.options,
pluginOptions: this.pluginOptions,
workerApi: this.workerApi
};
}
async loadNextPipeline({
filePath,
isSource,
newType,
newPipeline,
currentPipeline
}) {
let filePathRelative = (0, _projectPath.fromProjectPathRelative)(filePath);
let nextFilePath = (0, _projectPath.toProjectPathUnsafe)(filePathRelative.slice(0, -_path().default.extname(filePathRelative).length) + '.' + newType);
let nextPipeline = await this.loadPipeline(nextFilePath, isSource, newPipeline);
if (nextPipeline.id === currentPipeline.id) {
return null;
}
return nextPipeline;
}
async loadTransformerConfig(transformer, isSource) {
let loadConfig = transformer.plugin.loadConfig;
if (!loadConfig) {
return;
}
let config = (0, _InternalConfig.createConfig)({
plugin: transformer.name,
isSource,
searchPath: this.request.filePath,
env: this.request.env
});
await (0, _ConfigRequest.loadPluginConfig)(transformer, config, this.options);
for (let devDep of config.devDeps) {
await this.addDevDependency(devDep);
}
return config;
}
async runTransformer(pipeline, asset, transformer, transformerName, preloadedConfig, configKeyPath, parcelConfig) {
var _transformer$parse;
const logger = new (_logger().PluginLogger)({
origin: transformerName
});
const tracer = new (_profiler().PluginTracer)({
origin: transformerName,
category: 'transform'
});
const resolve = async (from, to, options) => {
let result = await this.resolverRunner.resolve((0, _Dependency.createDependency)(this.options.projectRoot, {
env: asset.value.env,
specifier: to,
specifierType: (options === null || options === void 0 ? void 0 : options.specifierType) || 'esm',
packageConditions: options === null || options === void 0 ? void 0 : options.packageConditions,
sourcePath: from
}));
if (result.invalidateOnFileCreate) {
this.invalidations.invalidateOnFileCreate.push(...result.invalidateOnFileCreate.map(i => (0, _utils2.invalidateOnFileCreateToInternal)(this.options.projectRoot, i)));
}
if (result.invalidateOnFileChange) {
for (let filePath of result.invalidateOnFileChange) {
this.invalidations.invalidateOnFileChange.add((0, _projectPath.toProjectPath)(this.options.projectRoot, filePath));
}
}
if (result.diagnostics && result.diagnostics.length > 0) {
throw new (_diagnostic().default)({
diagnostic: result.diagnostics
});
}
return (0, _projectPath.fromProjectPath)(this.options.projectRoot, (0, _nullthrows().default)(result.assetGroup).filePath);
};
// If an ast exists on the asset, but we cannot reuse it,
// use the previous transform to generate code that we can re-parse.
if (asset.ast && asset.isASTDirty && (!transformer.canReuseAST || !transformer.canReuseAST({
ast: asset.ast,
options: pipeline.pluginOptions,
logger,
tracer
})) && asset.generate) {
var _output$map2;
let output = await asset.generate();
asset.content = output.content;
asset.mapBuffer = (_output$map2 = output.map) === null || _output$map2 === void 0 ? void 0 : _output$map2.toBuffer();
}
// Load config for the transformer.
let config = preloadedConfig;
// Parse if there is no AST available from a previous transform.
let parse = (_transformer$parse = transformer.parse) === null || _transformer$parse === void 0 ? void 0 : _transformer$parse.bind(transformer);
if (!asset.ast && parse) {
let ast = await parse({
asset: new _Asset.Asset(asset),
config,
options: pipeline.pluginOptions,
resolve,
logger,
tracer
});
if (ast) {
asset.setAST(ast);
asset.isASTDirty = false;
}
}
// Transform.
let transfomerResult =
// $FlowFixMe the returned IMutableAsset really is a MutableAsset
await transformer.transform({
asset: new _Asset.MutableAsset(asset),
config,
options: pipeline.pluginOptions,
resolve,
logger,
tracer
});
let results = await normalizeAssets(this.options, transfomerResult);
// Create generate and postProcess function that can be called later
asset.generate = () => {
let publicAsset = new _Asset.Asset(asset);
if (transformer.generate && asset.ast) {
let generated = transformer.generate({
asset: publicAsset,
ast: asset.ast,
options: pipeline.pluginOptions,
logger,
tracer
});
asset.clearAST();
return Promise.resolve(generated);
}
throw new Error('Asset has an AST but no generate method is available on the transform');
};
let postProcess = transformer.postProcess;
if (postProcess) {
pipeline.postProcess = async assets => {
let results = await postProcess.call(transformer, {
assets: assets.map(asset => new _Asset.MutableAsset(asset)),
config,
options: pipeline.pluginOptions,
resolve,
logger,
tracer
});
return Promise.all(results.map(result => asset.createChildAsset(result, transformerName, parcelConfig.filePath
// configKeyPath,
)));
};
}
return results;
}
}
exports.default = Transformation;
function normalizeAssets(options, results) {
return results.map(result => {
if (result instanceof _Asset.MutableAsset) {
return (0, _Asset.mutableAssetToUncommittedAsset)(result);
}
return result;
});
}

View File

@@ -0,0 +1,345 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
function _assert() {
const data = _interopRequireDefault(require("assert"));
_assert = function () {
return data;
};
return data;
}
function _stream() {
const data = require("stream");
_stream = function () {
return data;
};
return data;
}
function _sourceMap() {
const data = _interopRequireDefault(require("@parcel/source-map"));
_sourceMap = function () {
return data;
};
return data;
}
function _utils() {
const data = require("@parcel/utils");
_utils = function () {
return data;
};
return data;
}
function _rust() {
const data = require("@parcel/rust");
_rust = function () {
return data;
};
return data;
}
var _serializer = require("./serializer");
var _Dependency = require("./Dependency");
var _Environment = require("./Environment");
var _constants = require("./constants");
var _assetUtils = require("./assetUtils");
var _types = require("./types");
var _utils2 = require("./utils");
var _projectPath = require("./projectPath");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
class UncommittedAsset {
constructor({
value,
options,
content,
mapBuffer,
ast,
isASTDirty,
idBase,
invalidations
}) {
this.value = value;
this.options = options;
this.content = content;
this.mapBuffer = mapBuffer;
this.ast = ast;
this.isASTDirty = isASTDirty || false;
this.idBase = idBase;
this.invalidations = invalidations || (0, _utils2.createInvalidations)();
}
/*
* Prepares the asset for being serialized to the cache by committing its
* content and map of the asset to the cache.
*/
async commit() {
// If there is a dirty AST, clear out any old content and map as these
// must be regenerated later and shouldn't be committed.
if (this.ast != null && this.isASTDirty) {
this.content = null;
this.mapBuffer = null;
}
let size = 0;
let outputHash = '';
let contentKey = this.content == null ? null : this.getCacheKey('content');
let mapKey = this.mapBuffer == null ? null : this.getCacheKey('map');
let astKey = this.ast == null ? null : this.getCacheKey('ast');
// Since we can only read from the stream once, compute the content length
// and hash while it's being written to the cache.
await Promise.all([contentKey != null && this.commitContent(contentKey).then(s => (size = s.size, outputHash = s.hash)), this.mapBuffer != null && mapKey != null && this.options.cache.setBlob(mapKey, this.mapBuffer), astKey != null && this.options.cache.setBlob(astKey, (0, _serializer.serializeRaw)(this.ast))]);
this.value.contentKey = contentKey;
this.value.mapKey = mapKey;
this.value.astKey = astKey;
this.value.outputHash = outputHash;
if (this.content != null) {
this.value.stats.size = size;
}
this.value.isLargeBlob = this.content instanceof _stream().Readable;
this.value.committed = true;
}
async commitContent(contentKey) {
let content = await this.content;
if (content == null) {
return {
size: 0,
hash: ''
};
}
let size = 0;
if (content instanceof _stream().Readable) {
let hash = new (_rust().Hash)();
await this.options.cache.setStream(contentKey, content.pipe(new (_utils().TapStream)(buf => {
hash.writeBuffer(buf);
size += buf.length;
})));
return {
size,
hash: hash.finish()
};
}
let hash;
if (typeof content === 'string') {
hash = (0, _rust().hashString)(content);
size = Buffer.byteLength(content);
} else {
hash = (0, _rust().hashBuffer)(content);
size = content.length;
}
await this.options.cache.setBlob(contentKey, content);
return {
size,
hash
};
}
async getCode() {
if (this.ast != null && this.isASTDirty) {
throw new Error('Cannot call getCode() on an asset with a dirty AST. For transformers, implement canReuseAST() and check asset.isASTDirty.');
}
let content = await this.content;
if (typeof content === 'string' || content instanceof Buffer) {
return content.toString();
} else if (content != null) {
this.content = (0, _utils().bufferStream)(content);
return (await this.content).toString();
}
(0, _assert().default)(false, 'Internal error: missing content');
}
async getBuffer() {
let content = await this.content;
if (content == null) {
return Buffer.alloc(0);
} else if (content instanceof Buffer) {
return content;
} else if (typeof content === 'string') {
return Buffer.from(content);
}
this.content = (0, _utils().bufferStream)(content);
return this.content;
}
getStream() {
var _this$content;
if (this.content instanceof _stream().Readable) {
// Remove content if it's a stream, as it should not be reused.
let content = this.content;
this.content = null;
return content;
}
if (this.content instanceof Promise) {
return (0, _utils().streamFromPromise)(this.content);
}
return (0, _utils().blobToStream)((_this$content = this.content) !== null && _this$content !== void 0 ? _this$content : Buffer.alloc(0));
}
setCode(code) {
this.content = code;
this.clearAST();
}
setBuffer(buffer) {
this.content = buffer;
this.clearAST();
}
setStream(stream) {
this.content = stream;
this.clearAST();
}
async loadExistingSourcemap() {
if (this.map) {
return this.map;
}
let code = await this.getCode();
let map = await (0, _utils().loadSourceMap)((0, _projectPath.fromProjectPath)(this.options.projectRoot, this.value.filePath), code, {
fs: this.options.inputFS,
projectRoot: this.options.projectRoot
});
if (map) {
this.map = map;
this.mapBuffer = map.toBuffer();
this.setCode(code.replace(_utils().SOURCEMAP_RE, ''));
}
return this.map;
}
getMapBuffer() {
return Promise.resolve(this.mapBuffer);
}
async getMap() {
if (this.map == null) {
var _this$mapBuffer;
let mapBuffer = (_this$mapBuffer = this.mapBuffer) !== null && _this$mapBuffer !== void 0 ? _this$mapBuffer : await this.getMapBuffer();
if (mapBuffer) {
// Get sourcemap from flatbuffer
this.map = new (_sourceMap().default)(this.options.projectRoot, mapBuffer);
}
}
return this.map;
}
setMap(map) {
var _this$map;
// If we have sourceContent available, it means this asset is source code without
// a previous source map. Ensure that the map set by the transformer has the original
// source content available.
if (map != null && this.sourceContent != null) {
map.setSourceContent((0, _projectPath.fromProjectPath)(this.options.projectRoot, this.value.filePath),
// $FlowFixMe
this.sourceContent);
this.sourceContent = null;
}
this.map = map;
this.mapBuffer = (_this$map = this.map) === null || _this$map === void 0 ? void 0 : _this$map.toBuffer();
}
getAST() {
return Promise.resolve(this.ast);
}
setAST(ast) {
this.ast = ast;
this.isASTDirty = true;
this.value.astGenerator = {
type: ast.type,
version: ast.version
};
}
clearAST() {
this.ast = null;
this.isASTDirty = false;
this.value.astGenerator = null;
}
getCacheKey(key) {
return (0, _rust().hashString)(_constants.PARCEL_VERSION + key + this.value.id);
}
addDependency(opts) {
// eslint-disable-next-line no-unused-vars
let {
env,
symbols,
...rest
} = opts;
let dep = (0, _Dependency.createDependency)(this.options.projectRoot, {
...rest,
// $FlowFixMe "convert" the $ReadOnlyMaps to the interal mutable one
symbols,
env: (0, _Environment.mergeEnvironments)(this.options.projectRoot, this.value.env, env),
sourceAssetId: this.value.id,
sourcePath: (0, _projectPath.fromProjectPath)(this.options.projectRoot, this.value.filePath)
});
let existing = this.value.dependencies.get(dep.id);
if (existing) {
(0, _Dependency.mergeDependencies)(existing, dep);
} else {
this.value.dependencies.set(dep.id, dep);
}
return dep.id;
}
invalidateOnFileChange(filePath) {
this.invalidations.invalidateOnFileChange.add(filePath);
}
invalidateOnFileCreate(invalidation) {
this.invalidations.invalidateOnFileCreate.push((0, _utils2.invalidateOnFileCreateToInternal)(this.options.projectRoot, invalidation));
}
invalidateOnEnvChange(key) {
this.invalidations.invalidateOnEnvChange.add(key);
}
invalidateOnBuild() {
this.invalidations.invalidateOnBuild = true;
}
invalidateOnStartup() {
this.invalidations.invalidateOnStartup = true;
}
getDependencies() {
return Array.from(this.value.dependencies.values());
}
createChildAsset(result, plugin, configPath, configKeyPath) {
var _result$content, _result$bundleBehavio, _result$isBundleSplit, _result$pipeline, _result$sideEffects;
let content = (_result$content = result.content) !== null && _result$content !== void 0 ? _result$content : null;
let asset = new UncommittedAsset({
value: (0, _assetUtils.createAsset)(this.options.projectRoot, {
idBase: this.idBase,
filePath: this.value.filePath,
type: result.type,
bundleBehavior: (_result$bundleBehavio = result.bundleBehavior) !== null && _result$bundleBehavio !== void 0 ? _result$bundleBehavio : this.value.bundleBehavior == null ? null : _types.BundleBehaviorNames[this.value.bundleBehavior],
isBundleSplittable: (_result$isBundleSplit = result.isBundleSplittable) !== null && _result$isBundleSplit !== void 0 ? _result$isBundleSplit : this.value.isBundleSplittable,
isSource: this.value.isSource,
env: (0, _Environment.mergeEnvironments)(this.options.projectRoot, this.value.env, result.env),
dependencies: this.value.type === result.type ? new Map(this.value.dependencies) : new Map(),
meta: {
...this.value.meta,
...result.meta
},
pipeline: (_result$pipeline = result.pipeline) !== null && _result$pipeline !== void 0 ? _result$pipeline : this.value.type === result.type ? this.value.pipeline : null,
stats: {
time: 0,
size: this.value.stats.size
},
// $FlowFixMe
symbols: result.symbols,
sideEffects: (_result$sideEffects = result.sideEffects) !== null && _result$sideEffects !== void 0 ? _result$sideEffects : this.value.sideEffects,
uniqueKey: result.uniqueKey,
astGenerator: result.ast ? {
type: result.ast.type,
version: result.ast.version
} : null,
plugin,
configPath,
configKeyPath
}),
options: this.options,
content,
ast: result.ast,
isASTDirty: result.ast === this.ast ? this.isASTDirty : true,
mapBuffer: result.map ? result.map.toBuffer() : null,
idBase: this.idBase,
invalidations: this.invalidations
});
let dependencies = result.dependencies;
if (dependencies) {
for (let dep of dependencies) {
asset.addDependency(dep);
}
}
return asset;
}
updateId() {
// $FlowFixMe - this is fine
this.value.id = (0, _assetUtils.createAssetIdFromOptions)(this.value);
}
}
exports.default = UncommittedAsset;

View File

@@ -0,0 +1,215 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
function _path() {
const data = _interopRequireDefault(require("path"));
_path = function () {
return data;
};
return data;
}
function _utils() {
const data = require("@parcel/utils");
_utils = function () {
return data;
};
return data;
}
function _logger() {
const data = _interopRequireWildcard(require("@parcel/logger"));
_logger = function () {
return data;
};
return data;
}
function _diagnostic() {
const data = _interopRequireWildcard(require("@parcel/diagnostic"));
_diagnostic = function () {
return data;
};
return data;
}
var _ParcelConfig = _interopRequireDefault(require("./ParcelConfig"));
var _UncommittedAsset = _interopRequireDefault(require("./UncommittedAsset"));
var _assetUtils = require("./assetUtils");
var _Asset = require("./public/Asset");
var _PluginOptions = _interopRequireDefault(require("./public/PluginOptions"));
var _summarizeRequest = _interopRequireDefault(require("./summarizeRequest"));
var _projectPath = require("./projectPath");
function _profiler() {
const data = require("@parcel/profiler");
_profiler = function () {
return data;
};
return data;
}
function _rust() {
const data = require("@parcel/rust");
_rust = function () {
return data;
};
return data;
}
function _getRequireWildcardCache(e) { if ("function" != typeof WeakMap) return null; var r = new WeakMap(), t = new WeakMap(); return (_getRequireWildcardCache = function (e) { return e ? t : r; })(e); }
function _interopRequireWildcard(e, r) { if (!r && e && e.__esModule) return e; if (null === e || "object" != typeof e && "function" != typeof e) return { default: e }; var t = _getRequireWildcardCache(r); if (t && t.has(e)) return t.get(e); var n = { __proto__: null }, a = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var u in e) if ("default" !== u && Object.prototype.hasOwnProperty.call(e, u)) { var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; } return n.default = e, t && t.set(e, n), n; }
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
class Validation {
allAssets = {};
allValidators = {};
constructor({
config,
dedicatedThread,
options,
requests,
report,
workerApi
}) {
this.dedicatedThread = dedicatedThread !== null && dedicatedThread !== void 0 ? dedicatedThread : false;
this.options = options;
this.parcelConfig = config;
this.report = report;
this.requests = requests;
this.workerApi = workerApi;
}
async run() {
let pluginOptions = new _PluginOptions.default(this.options);
await this.buildAssetsAndValidators();
await Promise.all(Object.keys(this.allValidators).map(async validatorName => {
let assets = this.allAssets[validatorName];
if (assets) {
let plugin = this.allValidators[validatorName];
let validatorLogger = new (_logger().PluginLogger)({
origin: validatorName
});
let validatorTracer = new (_profiler().PluginTracer)({
origin: validatorName,
category: 'validator'
});
let validatorResults = [];
try {
// If the plugin supports the single-threading validateAll method, pass all assets to it.
if (plugin.validateAll && this.dedicatedThread) {
validatorResults = await plugin.validateAll({
assets: assets.map(asset => new _Asset.Asset(asset)),
options: pluginOptions,
logger: validatorLogger,
tracer: validatorTracer,
resolveConfigWithPath: (configNames, assetFilePath) => (0, _utils().resolveConfig)(this.options.inputFS, assetFilePath, configNames, this.options.projectRoot)
});
}
// Otherwise, pass the assets one-at-a-time
else if (plugin.validate && !this.dedicatedThread) {
await Promise.all(assets.map(async input => {
let config = null;
let publicAsset = new _Asset.Asset(input);
if (plugin.getConfig) {
config = await plugin.getConfig({
asset: publicAsset,
options: pluginOptions,
logger: validatorLogger,
tracer: validatorTracer,
resolveConfig: configNames => (0, _utils().resolveConfig)(this.options.inputFS, publicAsset.filePath, configNames, this.options.projectRoot)
});
}
let validatorResult = await plugin.validate({
asset: publicAsset,
options: pluginOptions,
config,
logger: validatorLogger,
tracer: validatorTracer
});
validatorResults.push(validatorResult);
}));
}
this.handleResults(validatorResults);
} catch (e) {
throw new (_diagnostic().default)({
diagnostic: (0, _diagnostic().errorToDiagnostic)(e, {
origin: validatorName
})
});
}
}
}));
}
async buildAssetsAndValidators() {
// Figure out what validators need to be run, and group the assets by the relevant validators.
await Promise.all(this.requests.map(async request => {
this.report({
type: 'validation',
filePath: (0, _projectPath.fromProjectPath)(this.options.projectRoot, request.filePath)
});
let asset = await this.loadAsset(request);
let validators = await this.parcelConfig.getValidators(request.filePath);
for (let validator of validators) {
this.allValidators[validator.name] = validator.plugin;
if (this.allAssets[validator.name]) {
this.allAssets[validator.name].push(asset);
} else {
this.allAssets[validator.name] = [asset];
}
}
}));
}
handleResults(validatorResults) {
let warnings = [];
let errors = [];
validatorResults.forEach(result => {
if (result) {
warnings.push(...result.warnings);
errors.push(...result.errors);
}
});
if (errors.length > 0) {
throw new (_diagnostic().default)({
diagnostic: errors
});
}
if (warnings.length > 0) {
_logger().default.warn(warnings);
}
}
async loadAsset(request) {
let {
filePath,
env,
code,
sideEffects,
query
} = request;
let {
content,
size,
isSource
} = await (0, _summarizeRequest.default)(this.options.inputFS, {
filePath: (0, _projectPath.fromProjectPath)(this.options.projectRoot, request.filePath)
});
// If the transformer request passed code rather than a filename,
// use a hash as the base for the id to ensure it is unique.
let idBase = code != null ? (0, _rust().hashString)(code) : (0, _projectPath.fromProjectPathRelative)(filePath);
return new _UncommittedAsset.default({
idBase,
value: (0, _assetUtils.createAsset)(this.options.projectRoot, {
idBase,
filePath: filePath,
isSource,
type: _path().default.extname((0, _projectPath.fromProjectPathRelative)(filePath)).slice(1),
query,
env: env,
stats: {
time: 0,
size
},
sideEffects: sideEffects
}),
options: this.options,
content
});
}
}
exports.default = Validation;

View File

@@ -0,0 +1,279 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = applyRuntimes;
function _path() {
const data = _interopRequireDefault(require("path"));
_path = function () {
return data;
};
return data;
}
function _assert() {
const data = _interopRequireDefault(require("assert"));
_assert = function () {
return data;
};
return data;
}
function _nullthrows() {
const data = _interopRequireDefault(require("nullthrows"));
_nullthrows = function () {
return data;
};
return data;
}
var _AssetGraph = require("./AssetGraph");
var _BundleGraph = _interopRequireDefault(require("./public/BundleGraph"));
var _BundleGraph2 = _interopRequireWildcard(require("./BundleGraph"));
var _Bundle = require("./public/Bundle");
function _logger() {
const data = require("@parcel/logger");
_logger = function () {
return data;
};
return data;
}
function _rust() {
const data = require("@parcel/rust");
_rust = function () {
return data;
};
return data;
}
function _diagnostic() {
const data = _interopRequireWildcard(require("@parcel/diagnostic"));
_diagnostic = function () {
return data;
};
return data;
}
var _Dependency = require("./public/Dependency");
var _Environment = require("./Environment");
var _AssetGraphRequest = _interopRequireDefault(require("./requests/AssetGraphRequest"));
var _DevDepRequest = require("./requests/DevDepRequest");
var _projectPath = require("./projectPath");
function _profiler() {
const data = require("@parcel/profiler");
_profiler = function () {
return data;
};
return data;
}
function _getRequireWildcardCache(e) { if ("function" != typeof WeakMap) return null; var r = new WeakMap(), t = new WeakMap(); return (_getRequireWildcardCache = function (e) { return e ? t : r; })(e); }
function _interopRequireWildcard(e, r) { if (!r && e && e.__esModule) return e; if (null === e || "object" != typeof e && "function" != typeof e) return { default: e }; var t = _getRequireWildcardCache(r); if (t && t.has(e)) return t.get(e); var n = { __proto__: null }, a = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var u in e) if ("default" !== u && Object.prototype.hasOwnProperty.call(e, u)) { var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; } return n.default = e, t && t.set(e, n), n; }
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function nameRuntimeBundle(bundle, siblingBundle) {
// We don't run custom namers on runtime bundles as the runtime assumes that they are
// located at the same nesting level as their owning bundle. Custom naming could
// be added in future as long as the custom name is validated.
let {
hashReference
} = bundle;
let name = (0, _nullthrows().default)(siblingBundle.name)
// Remove the existing hash from standard file patterns
// e.g. 'main.[hash].js' -> 'main.js' or 'main~[hash].js' -> 'main.js'
.replace(new RegExp(`[\\.~\\-_]?${siblingBundle.hashReference}`), '')
// Ensure the file ends with 'runtime.[hash].js'
.replace(`.${bundle.type}`, `.runtime.${hashReference}.${bundle.type}`);
bundle.name = name;
bundle.displayName = name.replace(hashReference, '[hash]');
}
async function applyRuntimes({
bundleGraph,
config,
options,
pluginOptions,
api,
optionsRef,
previousDevDeps,
devDepRequests,
configs
}) {
let runtimes = await config.getRuntimes();
let connections = [];
// As manifest bundles may be added during runtimes we process them in reverse topological
// sort order. This allows bundles to be added to their bundle groups before they are referenced
// by other bundle groups by loader runtimes
let bundles = [];
bundleGraph.traverseBundles({
exit(bundle) {
bundles.push(bundle);
}
});
for (let bundle of bundles) {
for (let runtime of runtimes) {
let measurement;
try {
var _configs$get;
const namedBundle = _Bundle.NamedBundle.get(bundle, bundleGraph, options);
measurement = _profiler().tracer.createMeasurement(runtime.name, 'applyRuntime', namedBundle.displayName);
let applied = await runtime.plugin.apply({
bundle: namedBundle,
bundleGraph: new _BundleGraph.default(bundleGraph, _Bundle.NamedBundle.get.bind(_Bundle.NamedBundle), options),
config: (_configs$get = configs.get(runtime.name)) === null || _configs$get === void 0 ? void 0 : _configs$get.result,
options: pluginOptions,
logger: new (_logger().PluginLogger)({
origin: runtime.name
}),
tracer: new (_profiler().PluginTracer)({
origin: runtime.name,
category: 'applyRuntime'
})
});
if (applied) {
let runtimeAssets = Array.isArray(applied) ? applied : [applied];
for (let {
code,
dependency,
filePath,
isEntry,
env,
priority
} of runtimeAssets) {
let sourceName = _path().default.join(_path().default.dirname(filePath), `runtime-${(0, _rust().hashString)(code)}.${bundle.type}`);
let assetGroup = {
code,
filePath: (0, _projectPath.toProjectPath)(options.projectRoot, sourceName),
env: (0, _Environment.mergeEnvironments)(options.projectRoot, bundle.env, env),
// Runtime assets should be considered source, as they should be
// e.g. compiled to run in the target environment
isSource: true
};
let connectionBundle = bundle;
if (priority === 'parallel' && !bundle.needsStableName) {
let bundleGroups = bundleGraph.getBundleGroupsContainingBundle(bundle);
connectionBundle = (0, _nullthrows().default)(bundleGraph.createBundle({
type: bundle.type,
needsStableName: false,
env: bundle.env,
target: bundle.target,
uniqueKey: 'runtime-manifest:' + bundle.id,
shouldContentHash: options.shouldContentHash
}));
for (let bundleGroup of bundleGroups) {
bundleGraph.addBundleToBundleGroup(connectionBundle, bundleGroup);
}
bundleGraph.createBundleReference(bundle, connectionBundle);
nameRuntimeBundle(connectionBundle, bundle);
}
connections.push({
bundle: connectionBundle,
assetGroup,
dependency,
isEntry
});
}
}
} catch (e) {
throw new (_diagnostic().default)({
diagnostic: (0, _diagnostic().errorToDiagnostic)(e, {
origin: runtime.name
})
});
} finally {
measurement && measurement.end();
}
}
}
// Correct connection order after generating runtimes in reverse order
connections.reverse();
// Add dev deps for runtime plugins AFTER running them, to account for lazy require().
for (let runtime of runtimes) {
let devDepRequest = await (0, _DevDepRequest.createDevDependency)({
specifier: runtime.name,
resolveFrom: runtime.resolveFrom
}, previousDevDeps, options);
devDepRequests.set(`${devDepRequest.specifier}:${(0, _projectPath.fromProjectPathRelative)(devDepRequest.resolveFrom)}`, devDepRequest);
await (0, _DevDepRequest.runDevDepRequest)(api, devDepRequest);
}
let {
assetGraph: runtimesAssetGraph,
changedAssets
} = await reconcileNewRuntimes(api, connections, optionsRef);
let runtimesGraph = _BundleGraph2.default.fromAssetGraph(runtimesAssetGraph, options.mode === 'production', bundleGraph._publicIdByAssetId, bundleGraph._assetPublicIds);
// Merge the runtimes graph into the main bundle graph.
bundleGraph.merge(runtimesGraph);
for (let [assetId, publicId] of runtimesGraph._publicIdByAssetId) {
bundleGraph._publicIdByAssetId.set(assetId, publicId);
bundleGraph._assetPublicIds.add(publicId);
}
for (let {
bundle,
assetGroup,
dependency,
isEntry
} of connections) {
let assetGroupNode = (0, _AssetGraph.nodeFromAssetGroup)(assetGroup);
let assetGroupAssetNodeIds = runtimesAssetGraph.getNodeIdsConnectedFrom(runtimesAssetGraph.getNodeIdByContentKey(assetGroupNode.id));
(0, _assert().default)(assetGroupAssetNodeIds.length === 1);
let runtimeNodeId = assetGroupAssetNodeIds[0];
let runtimeNode = (0, _nullthrows().default)(runtimesAssetGraph.getNode(runtimeNodeId));
(0, _assert().default)(runtimeNode.type === 'asset');
let resolution = dependency && bundleGraph.getResolvedAsset((0, _Dependency.dependencyToInternalDependency)(dependency), bundle);
let runtimesGraphRuntimeNodeId = runtimesGraph._graph.getNodeIdByContentKey(runtimeNode.id);
let duplicatedContentKeys = new Set();
runtimesGraph._graph.traverse((nodeId, _, actions) => {
let node = (0, _nullthrows().default)(runtimesGraph._graph.getNode(nodeId));
if (node.type !== 'dependency') {
return;
}
let assets = runtimesGraph._graph.getNodeIdsConnectedFrom(nodeId).map(assetNodeId => {
let assetNode = (0, _nullthrows().default)(runtimesGraph._graph.getNode(assetNodeId));
(0, _assert().default)(assetNode.type === 'asset');
return assetNode.value;
});
for (let asset of assets) {
if (bundleGraph.isAssetReachableFromBundle(asset, bundle) || (resolution === null || resolution === void 0 ? void 0 : resolution.id) === asset.id) {
duplicatedContentKeys.add(asset.id);
actions.skipChildren();
}
}
}, runtimesGraphRuntimeNodeId);
let bundleNodeId = bundleGraph._graph.getNodeIdByContentKey(bundle.id);
let bundleGraphRuntimeNodeId = bundleGraph._graph.getNodeIdByContentKey(runtimeNode.id); // the node id is not constant between graphs
runtimesGraph._graph.traverse((nodeId, _, actions) => {
let node = (0, _nullthrows().default)(runtimesGraph._graph.getNode(nodeId));
if (node.type === 'asset' || node.type === 'dependency') {
if (duplicatedContentKeys.has(node.id)) {
actions.skipChildren();
return;
}
const bundleGraphNodeId = bundleGraph._graph.getNodeIdByContentKey(node.id); // the node id is not constant between graphs
bundleGraph._graph.addEdge(bundleNodeId, bundleGraphNodeId, _BundleGraph2.bundleGraphEdgeTypes.contains);
}
}, runtimesGraphRuntimeNodeId);
if (isEntry) {
bundleGraph._graph.addEdge(bundleNodeId, bundleGraphRuntimeNodeId);
bundle.entryAssetIds.unshift(runtimeNode.id);
}
if (dependency == null) {
// Verify this asset won't become an island
(0, _assert().default)(bundleGraph._graph.getNodeIdsConnectedTo(bundleGraphRuntimeNodeId).length > 0, 'Runtime must have an inbound dependency or be an entry');
} else {
let dependencyNodeId = bundleGraph._graph.getNodeIdByContentKey(dependency.id);
bundleGraph._graph.addEdge(dependencyNodeId, bundleGraphRuntimeNodeId);
}
}
return changedAssets;
}
function reconcileNewRuntimes(api, connections, optionsRef) {
let assetGroups = connections.map(t => t.assetGroup);
let request = (0, _AssetGraphRequest.default)({
name: 'Runtimes',
assetGroups,
optionsRef
});
// rebuild the graph
return api.runRequest(request, {
force: true
});
}

View File

@@ -0,0 +1,191 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.createAsset = createAsset;
exports.createAssetIdFromOptions = createAssetIdFromOptions;
exports.generateFromAST = generateFromAST;
exports.getInvalidationHash = getInvalidationHash;
exports.getInvalidationId = getInvalidationId;
function _stream() {
const data = require("stream");
_stream = function () {
return data;
};
return data;
}
function _logger() {
const data = require("@parcel/logger");
_logger = function () {
return data;
};
return data;
}
function _nullthrows() {
const data = _interopRequireDefault(require("nullthrows"));
_nullthrows = function () {
return data;
};
return data;
}
var _CommittedAsset = _interopRequireDefault(require("./CommittedAsset"));
var _UncommittedAsset = _interopRequireDefault(require("./UncommittedAsset"));
var _loadParcelPlugin = _interopRequireDefault(require("./loadParcelPlugin"));
var _Asset = require("./public/Asset");
var _PluginOptions = _interopRequireDefault(require("./public/PluginOptions"));
function _utils() {
const data = require("@parcel/utils");
_utils = function () {
return data;
};
return data;
}
var _utils2 = require("./utils");
var _buildCache = require("./buildCache");
var _projectPath = require("./projectPath");
function _rust() {
const data = require("@parcel/rust");
_rust = function () {
return data;
};
return data;
}
var _types = require("./types");
function _profiler() {
const data = require("@parcel/profiler");
_profiler = function () {
return data;
};
return data;
}
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function createAssetIdFromOptions(options) {
var _options$uniqueKey, _options$pipeline, _options$query;
let uniqueKey = (_options$uniqueKey = options.uniqueKey) !== null && _options$uniqueKey !== void 0 ? _options$uniqueKey : '';
let idBase = options.idBase != null ? options.idBase : (0, _projectPath.fromProjectPathRelative)(options.filePath);
return (0, _rust().hashString)(idBase + options.type + options.env.id + uniqueKey + ':' + ((_options$pipeline = options.pipeline) !== null && _options$pipeline !== void 0 ? _options$pipeline : '') + ':' + ((_options$query = options.query) !== null && _options$query !== void 0 ? _options$query : ''));
}
function createAsset(projectRoot, options) {
var _options$committed, _options$isBundleSpli, _options$sideEffects;
return {
id: options.id != null ? options.id : createAssetIdFromOptions(options),
committed: (_options$committed = options.committed) !== null && _options$committed !== void 0 ? _options$committed : false,
filePath: options.filePath,
query: options.query,
bundleBehavior: options.bundleBehavior ? _types.BundleBehavior[options.bundleBehavior] : null,
isBundleSplittable: (_options$isBundleSpli = options.isBundleSplittable) !== null && _options$isBundleSpli !== void 0 ? _options$isBundleSpli : true,
type: options.type,
contentKey: options.contentKey,
mapKey: options.mapKey,
astKey: options.astKey,
astGenerator: options.astGenerator,
dependencies: options.dependencies || new Map(),
isSource: options.isSource,
outputHash: options.outputHash,
pipeline: options.pipeline,
env: options.env,
meta: options.meta || {},
stats: options.stats,
symbols: options.symbols && new Map([...options.symbols].map(([k, v]) => [k, {
local: v.local,
meta: v.meta,
loc: (0, _utils2.toInternalSourceLocation)(projectRoot, v.loc)
}])),
sideEffects: (_options$sideEffects = options.sideEffects) !== null && _options$sideEffects !== void 0 ? _options$sideEffects : true,
uniqueKey: options.uniqueKey,
plugin: options.plugin,
configPath: options.configPath,
configKeyPath: options.configKeyPath
};
}
const generateResults = new WeakMap();
function generateFromAST(asset) {
let output = generateResults.get(asset.value);
if (output == null) {
output = _generateFromAST(asset);
generateResults.set(asset.value, output);
}
return output;
}
async function _generateFromAST(asset) {
var _plugin$generate;
let ast = await asset.getAST();
if (ast == null) {
throw new Error('Asset has no AST');
}
let pluginName = (0, _nullthrows().default)(asset.value.plugin);
let {
plugin
} = await (0, _loadParcelPlugin.default)(pluginName, (0, _projectPath.fromProjectPath)(asset.options.projectRoot, (0, _nullthrows().default)(asset.value.configPath)), (0, _nullthrows().default)(asset.value.configKeyPath), asset.options);
let generate = (_plugin$generate = plugin.generate) === null || _plugin$generate === void 0 ? void 0 : _plugin$generate.bind(plugin);
if (!generate) {
throw new Error(`${pluginName} does not have a generate method`);
}
let {
content,
map
} = await generate({
asset: new _Asset.Asset(asset),
ast,
options: new _PluginOptions.default(asset.options),
logger: new (_logger().PluginLogger)({
origin: pluginName
}),
tracer: new (_profiler().PluginTracer)({
origin: pluginName,
category: 'asset-generate'
})
});
let mapBuffer = map === null || map === void 0 ? void 0 : map.toBuffer();
// Store the results in the cache so we can avoid generating again next time
await Promise.all([asset.options.cache.setStream((0, _nullthrows().default)(asset.value.contentKey), (0, _utils().blobToStream)(content)), mapBuffer != null && asset.options.cache.setBlob((0, _nullthrows().default)(asset.value.mapKey), mapBuffer)]);
return {
content: content instanceof _stream().Readable ? asset.options.cache.getStream((0, _nullthrows().default)(asset.value.contentKey)) : content,
map
};
}
function getInvalidationId(invalidation) {
switch (invalidation.type) {
case 'file':
return 'file:' + (0, _projectPath.fromProjectPathRelative)(invalidation.filePath);
case 'env':
return 'env:' + invalidation.key;
case 'option':
return 'option:' + invalidation.key;
default:
throw new Error('Unknown invalidation type: ' + invalidation.type);
}
}
const hashCache = (0, _buildCache.createBuildCache)();
async function getInvalidationHash(invalidations, options) {
if (invalidations.length === 0) {
return '';
}
let sortedInvalidations = invalidations.slice().sort((a, b) => getInvalidationId(a) < getInvalidationId(b) ? -1 : 1);
let hashes = '';
for (let invalidation of sortedInvalidations) {
switch (invalidation.type) {
case 'file':
{
// Only recompute the hash of this file if we haven't seen it already during this build.
let fileHash = hashCache.get(invalidation.filePath);
if (fileHash == null) {
fileHash = (0, _utils().hashFile)(options.inputFS, (0, _projectPath.fromProjectPath)(options.projectRoot, invalidation.filePath));
hashCache.set(invalidation.filePath, fileHash);
}
hashes += await fileHash;
break;
}
case 'env':
hashes += invalidation.key + ':' + (options.env[invalidation.key] || '');
break;
case 'option':
hashes += invalidation.key + ':' + (0, _utils2.hashFromOption)(options[invalidation.key]);
break;
default:
throw new Error('Unknown invalidation type: ' + invalidation.type);
}
}
return (0, _rust().hashString)(hashes);
}

View File

@@ -0,0 +1,18 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.clearBuildCaches = clearBuildCaches;
exports.createBuildCache = createBuildCache;
const buildCaches = [];
function createBuildCache() {
let cache = new Map();
buildCaches.push(cache);
return cache;
}
function clearBuildCaches() {
for (let cache of buildCaches) {
cache.clear();
}
}

View File

@@ -0,0 +1,21 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.VALID = exports.STARTUP = exports.PARCEL_VERSION = exports.OPTION_CHANGE = exports.INITIAL_BUILD = exports.HASH_REF_REGEX = exports.HASH_REF_PREFIX = exports.HASH_REF_HASH_LEN = exports.FILE_UPDATE = exports.FILE_DELETE = exports.FILE_CREATE = exports.ERROR = exports.ENV_CHANGE = void 0;
var _package = require("../package.json");
// $FlowFixMe
const PARCEL_VERSION = exports.PARCEL_VERSION = _package.version;
const HASH_REF_PREFIX = exports.HASH_REF_PREFIX = 'HASH_REF_';
const HASH_REF_HASH_LEN = exports.HASH_REF_HASH_LEN = 16;
const HASH_REF_REGEX = exports.HASH_REF_REGEX = new RegExp(`${HASH_REF_PREFIX}\\w{${HASH_REF_HASH_LEN}}`, 'g');
const VALID = exports.VALID = 0;
const INITIAL_BUILD = exports.INITIAL_BUILD = 1 << 0;
const FILE_CREATE = exports.FILE_CREATE = 1 << 1;
const FILE_UPDATE = exports.FILE_UPDATE = 1 << 2;
const FILE_DELETE = exports.FILE_DELETE = 1 << 3;
const ENV_CHANGE = exports.ENV_CHANGE = 1 << 4;
const OPTION_CHANGE = exports.OPTION_CHANGE = 1 << 5;
const STARTUP = exports.STARTUP = 1 << 6;
const ERROR = exports.ERROR = 1 << 7;

View File

@@ -0,0 +1,212 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = dumpGraphToGraphViz;
var _BundleGraph = require("./BundleGraph");
var _RequestTracker = require("./RequestTracker");
function _path() {
const data = _interopRequireDefault(require("path"));
_path = function () {
return data;
};
return data;
}
function _graph() {
const data = require("@parcel/graph");
_graph = function () {
return data;
};
return data;
}
var _projectPath = require("./projectPath");
var _types = require("./types");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const COLORS = {
root: 'gray',
asset: 'green',
dependency: 'orange',
transformer_request: 'cyan',
file: 'gray',
default: 'white'
};
const TYPE_COLORS = {
// bundle graph
bundle: 'blue',
contains: 'grey',
internal_async: 'orange',
references: 'red',
sibling: 'green',
// asset graph
// request graph
invalidated_by_create: 'green',
invalidated_by_create_above: 'orange',
invalidate_by_update: 'cyan',
invalidated_by_delete: 'red'
};
async function dumpGraphToGraphViz(graph, name, edgeTypes) {
var _globalThis$PARCEL_DU;
if ("production" === 'production' && !process.env.PARCEL_BUILD_REPL) {
return;
}
let mode = process.env.PARCEL_BUILD_REPL ? // $FlowFixMe
(_globalThis$PARCEL_DU = globalThis.PARCEL_DUMP_GRAPHVIZ) === null || _globalThis$PARCEL_DU === void 0 ? void 0 : _globalThis$PARCEL_DU.mode : process.env.PARCEL_DUMP_GRAPHVIZ;
// $FlowFixMe[invalid-compare]
if (mode == null || mode == false) {
return;
}
let detailedSymbols = mode === 'symbols';
let GraphVizGraph = require('graphviz/lib/deps/graph').Graph;
let g = new GraphVizGraph(null, 'G');
g.type = 'digraph';
// $FlowFixMe
for (let [id, node] of graph.nodes.entries()) {
if (node == null) continue;
let n = g.addNode(nodeId(id));
// $FlowFixMe default is fine. Not every type needs to be in the map.
n.set('color', COLORS[node.type || 'default']);
n.set('shape', 'box');
n.set('style', 'filled');
let label;
if (typeof node === 'string') {
label = node;
} else if (node.assets) {
var _node$bundleBehavior;
label = `(${nodeId(id)}), (assetIds: ${[...node.assets].map(a => {
let arr = a.filePath.split('/');
return arr[arr.length - 1];
}).join(', ')}) (sourceBundles: ${[...node.sourceBundles].join(', ')}) (bb ${(_node$bundleBehavior = node.bundleBehavior) !== null && _node$bundleBehavior !== void 0 ? _node$bundleBehavior : 'none'})`;
} else if (node.type) {
label = `[${(0, _graph().fromNodeId)(id)}] ${node.type || 'No Type'}: [${node.id}]: `;
if (node.type === 'dependency') {
label += node.value.specifier;
let parts = [];
if (node.value.priority !== _types.Priority.sync) {
var _Object$entries$find;
parts.push((_Object$entries$find = Object.entries(_types.Priority).find(([, v]) => v === node.value.priority)) === null || _Object$entries$find === void 0 ? void 0 : _Object$entries$find[0]);
}
if (node.value.isOptional) parts.push('optional');
if (node.value.specifierType === _types.SpecifierType.url) parts.push('url');
if (node.hasDeferred) parts.push('deferred');
if (node.deferred) parts.push('deferred');
if (node.excluded) parts.push('excluded');
if (parts.length) label += ' (' + parts.join(', ') + ')';
if (node.value.env) label += ` (${getEnvDescription(node.value.env)})`;
let depSymbols = node.value.symbols;
if (detailedSymbols) {
if (depSymbols) {
if (depSymbols.size) {
label += '\\nsymbols: ' + [...depSymbols].map(([e, {
local
}]) => [e, local]).join(';');
}
let weakSymbols = [...depSymbols].filter(([, {
isWeak
}]) => isWeak).map(([s]) => s);
if (weakSymbols.length) {
label += '\\nweakSymbols: ' + weakSymbols.join(',');
}
if (node.usedSymbolsUp.size > 0) {
label += '\\nusedSymbolsUp: ' + [...node.usedSymbolsUp].map(([s, sAsset]) => {
var _sAsset$symbol;
return sAsset ? `${s}(${sAsset.asset}.${(_sAsset$symbol = sAsset.symbol) !== null && _sAsset$symbol !== void 0 ? _sAsset$symbol : ''})` : sAsset === null ? `${s}(external)` : `${s}(ambiguous)`;
}).join(',');
}
if (node.usedSymbolsDown.size > 0) {
label += '\\nusedSymbolsDown: ' + [...node.usedSymbolsDown].join(',');
}
// if (node.usedSymbolsDownDirty) label += '\\nusedSymbolsDownDirty';
// if (node.usedSymbolsUpDirtyDown)
// label += '\\nusedSymbolsUpDirtyDown';
// if (node.usedSymbolsUpDirtyUp) label += '\\nusedSymbolsUpDirtyUp';
} else {
label += '\\nsymbols: cleared';
}
}
} else if (node.type === 'asset') {
label += _path().default.basename((0, _projectPath.fromProjectPathRelative)(node.value.filePath)) + '#' + node.value.type;
if (detailedSymbols) {
if (!node.value.symbols) {
label += '\\nsymbols: cleared';
} else if (node.value.symbols.size) {
label += '\\nsymbols: ' + [...node.value.symbols].map(([e, {
local
}]) => [e, local]).join(';');
}
if (node.usedSymbols.size) {
label += '\\nusedSymbols: ' + [...node.usedSymbols].join(',');
}
// if (node.usedSymbolsDownDirty) label += '\\nusedSymbolsDownDirty';
// if (node.usedSymbolsUpDirty) label += '\\nusedSymbolsUpDirty';
} else {
label += '\\nsymbols: cleared';
}
} else if (node.type === 'asset_group') {
if (node.deferred) label += '(deferred)';
} else if (node.type === 'file') {
label += _path().default.basename(node.id);
} else if (node.type === 'transformer_request') {
label += _path().default.basename(node.value.filePath) + ` (${getEnvDescription(node.value.env)})`;
} else if (node.type === 'bundle') {
var _node$value$bundleBeh;
let parts = [];
if (node.value.needsStableName) parts.push('stable name');
parts.push(node.value.name);
parts.push('bb:' + ((_node$value$bundleBeh = node.value.bundleBehavior) !== null && _node$value$bundleBeh !== void 0 ? _node$value$bundleBeh : 'null'));
if (node.value.isPlaceholder) parts.push('placeholder');
if (parts.length) label += ' (' + parts.join(', ') + ')';
if (node.value.env) label += ` (${getEnvDescription(node.value.env)})`;
} else if (node.type === 'request') {
label = node.requestType + ':' + node.id;
}
}
n.set('label', label);
}
let edgeNames;
if (edgeTypes) {
edgeNames = Object.fromEntries(Object.entries(edgeTypes).map(([k, v]) => [v, k]));
}
for (let edge of graph.getAllEdges()) {
let gEdge = g.addEdge(nodeId(edge.from), nodeId(edge.to));
let color = null;
if (edge.type != 1 && edgeNames) {
color = TYPE_COLORS[edgeNames[edge.type]];
}
if (color != null) {
gEdge.set('color', color);
}
}
if (process.env.PARCEL_BUILD_REPL) {
var _globalThis$PARCEL_DU2;
// $FlowFixMe
(_globalThis$PARCEL_DU2 = globalThis.PARCEL_DUMP_GRAPHVIZ) === null || _globalThis$PARCEL_DU2 === void 0 || _globalThis$PARCEL_DU2.call(globalThis, name, g.to_dot());
} else {
const tempy = require('tempy');
let tmp = tempy.file({
name: `parcel-${name}.png`
});
await g.output('png', tmp);
// eslint-disable-next-line no-console
console.log('Dumped', tmp);
}
}
function nodeId(id) {
// $FlowFixMe
return `node${id}`;
}
function getEnvDescription(env) {
var _description;
let description;
if (typeof env.engines.browsers === 'string') {
description = `${env.context}: ${env.engines.browsers}`;
} else if (Array.isArray(env.engines.browsers)) {
description = `${env.context}: ${env.engines.browsers.join(', ')}`;
} else if (env.engines.node) {
description = `node: ${env.engines.node}`;
} else if (env.engines.electron) {
description = `electron: ${env.engines.electron}`;
}
return (_description = description) !== null && _description !== void 0 ? _description : '';
}

View File

@@ -0,0 +1,81 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "BuildError", {
enumerable: true,
get: function () {
return _Parcel.BuildError;
}
});
Object.defineProperty(exports, "INTERNAL_RESOLVE", {
enumerable: true,
get: function () {
return _Parcel.INTERNAL_RESOLVE;
}
});
Object.defineProperty(exports, "INTERNAL_TRANSFORM", {
enumerable: true,
get: function () {
return _Parcel.INTERNAL_TRANSFORM;
}
});
Object.defineProperty(exports, "Parcel", {
enumerable: true,
get: function () {
return _Parcel.default;
}
});
Object.defineProperty(exports, "createWorkerFarm", {
enumerable: true,
get: function () {
return _Parcel.createWorkerFarm;
}
});
Object.defineProperty(exports, "default", {
enumerable: true,
get: function () {
return _Parcel.default;
}
});
Object.defineProperty(exports, "deserialize", {
enumerable: true,
get: function () {
return _serializer.deserialize;
}
});
Object.defineProperty(exports, "prepareForSerialization", {
enumerable: true,
get: function () {
return _serializer.prepareForSerialization;
}
});
Object.defineProperty(exports, "registerSerializableClass", {
enumerable: true,
get: function () {
return _serializer.registerSerializableClass;
}
});
Object.defineProperty(exports, "restoreDeserializedObject", {
enumerable: true,
get: function () {
return _serializer.restoreDeserializedObject;
}
});
Object.defineProperty(exports, "serialize", {
enumerable: true,
get: function () {
return _serializer.serialize;
}
});
Object.defineProperty(exports, "unregisterSerializableClass", {
enumerable: true,
get: function () {
return _serializer.unregisterSerializableClass;
}
});
var _serializer = require("./serializer");
var _Parcel = _interopRequireWildcard(require("./Parcel"));
function _getRequireWildcardCache(e) { if ("function" != typeof WeakMap) return null; var r = new WeakMap(), t = new WeakMap(); return (_getRequireWildcardCache = function (e) { return e ? t : r; })(e); }
function _interopRequireWildcard(e, r) { if (!r && e && e.__esModule) return e; if (null === e || "object" != typeof e && "function" != typeof e) return { default: e }; var t = _getRequireWildcardCache(r); if (t && t.has(e)) return t.get(e); var n = { __proto__: null }, a = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var u in e) if ("default" !== u && Object.prototype.hasOwnProperty.call(e, u)) { var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; } return n.default = e, t && t.set(e, n), n; }

View File

@@ -0,0 +1,55 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = loadEnv;
function _utils() {
const data = require("@parcel/utils");
_utils = function () {
return data;
};
return data;
}
function _dotenv() {
const data = _interopRequireDefault(require("dotenv"));
_dotenv = function () {
return data;
};
return data;
}
function _dotenvExpand() {
const data = _interopRequireDefault(require("dotenv-expand"));
_dotenvExpand = function () {
return data;
};
return data;
}
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
async function loadEnv(env, fs, filePath, projectRoot) {
var _env$NODE_ENV;
const NODE_ENV = (_env$NODE_ENV = env.NODE_ENV) !== null && _env$NODE_ENV !== void 0 ? _env$NODE_ENV : 'development';
const dotenvFiles = ['.env',
// Don't include `.env.local` for `test` environment
// since normally you expect tests to produce the same
// results for everyone
NODE_ENV === 'test' ? null : '.env.local', `.env.${NODE_ENV}`, `.env.${NODE_ENV}.local`].filter(Boolean);
let envs = await Promise.all(dotenvFiles.map(async dotenvFile => {
const envPath = await (0, _utils().resolveConfig)(fs, filePath, [dotenvFile], projectRoot);
if (envPath == null) {
return;
}
// `ignoreProcessEnv` prevents dotenv-expand from writing values into `process.env`:
// https://github.com/motdotla/dotenv-expand/blob/ddb73d02322fe8522b4e05b73e1c1ad24ea7c14a/lib/main.js#L5
let output = (0, _dotenvExpand().default)({
parsed: _dotenv().default.parse(await fs.readFile(envPath)),
ignoreProcessEnv: true
});
if (output.error != null) {
throw output.error;
}
return output.parsed;
}));
return Object.assign({}, ...envs);
}

View File

@@ -0,0 +1,187 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = loadPlugin;
function _path() {
const data = _interopRequireDefault(require("path"));
_path = function () {
return data;
};
return data;
}
function _semver() {
const data = _interopRequireDefault(require("semver"));
_semver = function () {
return data;
};
return data;
}
function _logger() {
const data = _interopRequireDefault(require("@parcel/logger"));
_logger = function () {
return data;
};
return data;
}
function _nullthrows() {
const data = _interopRequireDefault(require("nullthrows"));
_nullthrows = function () {
return data;
};
return data;
}
function _diagnostic() {
const data = _interopRequireWildcard(require("@parcel/diagnostic"));
_diagnostic = function () {
return data;
};
return data;
}
function _utils() {
const data = require("@parcel/utils");
_utils = function () {
return data;
};
return data;
}
var _projectPath = require("./projectPath");
var _package = require("../package.json");
function _getRequireWildcardCache(e) { if ("function" != typeof WeakMap) return null; var r = new WeakMap(), t = new WeakMap(); return (_getRequireWildcardCache = function (e) { return e ? t : r; })(e); }
function _interopRequireWildcard(e, r) { if (!r && e && e.__esModule) return e; if (null === e || "object" != typeof e && "function" != typeof e) return { default: e }; var t = _getRequireWildcardCache(r); if (t && t.has(e)) return t.get(e); var n = { __proto__: null }, a = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var u in e) if ("default" !== u && Object.prototype.hasOwnProperty.call(e, u)) { var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; } return n.default = e, t && t.set(e, n), n; }
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const NODE_MODULES = `${_path().default.sep}node_modules${_path().default.sep}`;
const CONFIG = Symbol.for('parcel-plugin-config');
async function loadPlugin(pluginName, configPath, keyPath, options) {
let resolveFrom = configPath;
let range;
if (resolveFrom.includes(NODE_MODULES)) {
var _configPkg$config$dep;
// Config packages can reference plugins, but cannot contain other plugins within them.
// This forces every published plugin to be published separately so they can be mixed and matched if needed.
if (pluginName.startsWith('.')) {
let configContents = await options.inputFS.readFile(configPath, 'utf8');
throw new (_diagnostic().default)({
diagnostic: {
message: (0, _diagnostic().md)`Local plugins are not supported in Parcel config packages. Please publish "${pluginName}" as a separate npm package.`,
origin: '@parcel/core',
codeFrames: keyPath ? [{
filePath: configPath,
language: 'json5',
code: configContents,
codeHighlights: (0, _diagnostic().generateJSONCodeHighlights)(configContents, [{
key: keyPath,
type: 'value'
}])
}] : undefined
}
});
}
let configPkg = await (0, _utils().loadConfig)(options.inputFS, resolveFrom, ['package.json'], options.projectRoot);
if (configPkg != null && ((_configPkg$config$dep = configPkg.config.dependencies) === null || _configPkg$config$dep === void 0 ? void 0 : _configPkg$config$dep[pluginName]) == null) {
var _configPkg$config$par;
// If not in the config's dependencies, the plugin will be auto installed with
// the version declared in "parcelDependencies".
range = (_configPkg$config$par = configPkg.config.parcelDependencies) === null || _configPkg$config$par === void 0 ? void 0 : _configPkg$config$par[pluginName];
if (range == null) {
let contents = await options.inputFS.readFile(configPkg.files[0].filePath, 'utf8');
throw new (_diagnostic().default)({
diagnostic: {
message: (0, _diagnostic().md)`Could not determine version of ${pluginName} in ${_path().default.relative(process.cwd(), resolveFrom)}. Either include it in "dependencies" or "parcelDependencies".`,
origin: '@parcel/core',
codeFrames: configPkg.config.dependencies || configPkg.config.parcelDependencies ? [{
filePath: configPkg.files[0].filePath,
language: 'json5',
code: contents,
codeHighlights: (0, _diagnostic().generateJSONCodeHighlights)(contents, [{
key: configPkg.config.parcelDependencies ? '/parcelDependencies' : '/dependencies',
type: 'key'
}])
}] : undefined
}
});
}
// Resolve from project root if not in the config's dependencies.
resolveFrom = _path().default.join(options.projectRoot, 'index');
}
}
let resolved, pkg;
try {
({
resolved,
pkg
} = await options.packageManager.resolve(pluginName, resolveFrom, {
shouldAutoInstall: options.shouldAutoInstall,
range
}));
} catch (err) {
if (err.code !== 'MODULE_NOT_FOUND') {
throw err;
}
let configContents = await options.inputFS.readFile(configPath, 'utf8');
let alternatives = await (0, _utils().findAlternativeNodeModules)(options.inputFS, pluginName, _path().default.dirname(resolveFrom));
throw new (_diagnostic().default)({
diagnostic: {
message: (0, _diagnostic().md)`Cannot find Parcel plugin "${pluginName}"`,
origin: '@parcel/core',
codeFrames: keyPath ? [{
filePath: configPath,
language: 'json5',
code: configContents,
codeHighlights: (0, _diagnostic().generateJSONCodeHighlights)(configContents, [{
key: keyPath,
type: 'value',
message: (0, _diagnostic().md)`Cannot find module "${pluginName}"${alternatives[0] ? `, did you mean "${alternatives[0]}"?` : ''}`
}])
}] : undefined
}
});
}
if (!pluginName.startsWith('.')) {
// Validate the engines.parcel field in the plugin's package.json
let parcelVersionRange = pkg && pkg.engines && pkg.engines.parcel;
if (!parcelVersionRange) {
_logger().default.warn({
origin: '@parcel/core',
message: `The plugin "${pluginName}" needs to specify a \`package.json#engines.parcel\` field with the supported Parcel version range.`
});
}
if (parcelVersionRange && !_semver().default.satisfies(_package.version, parcelVersionRange)) {
let pkgFile = (0, _nullthrows().default)(await (0, _utils().resolveConfig)(options.inputFS, resolved, ['package.json'], options.projectRoot));
let pkgContents = await options.inputFS.readFile(pkgFile, 'utf8');
throw new (_diagnostic().default)({
diagnostic: {
message: (0, _diagnostic().md)`The plugin "${pluginName}" is not compatible with the current version of Parcel. Requires "${parcelVersionRange}" but the current version is "${_package.version}".`,
origin: '@parcel/core',
codeFrames: [{
filePath: pkgFile,
language: 'json5',
code: pkgContents,
codeHighlights: (0, _diagnostic().generateJSONCodeHighlights)(pkgContents, [{
key: '/engines/parcel'
}])
}]
}
});
}
}
let plugin = await options.packageManager.require(pluginName, resolveFrom, {
shouldAutoInstall: options.shouldAutoInstall
});
plugin = plugin.default ? plugin.default : plugin;
if (!plugin) {
throw new Error(`Plugin ${pluginName} has no exports.`);
}
plugin = plugin[CONFIG];
if (!plugin) {
throw new Error(`Plugin ${pluginName} is not a valid Parcel plugin, should export an instance of a Parcel plugin ex. "export default new Reporter({ ... })".`);
}
return {
plugin,
version: (0, _nullthrows().default)(pkg).version,
resolveFrom: (0, _projectPath.toProjectPath)(options.projectRoot, resolveFrom),
range
};
}

View File

@@ -0,0 +1,86 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.fromProjectPath = void 0;
exports.fromProjectPathRelative = fromProjectPathRelative;
exports.joinProjectPath = joinProjectPath;
exports.toProjectPath = void 0;
exports.toProjectPathUnsafe = toProjectPathUnsafe;
function _path() {
const data = _interopRequireDefault(require("path"));
_path = function () {
return data;
};
return data;
}
function _utils() {
const data = require("@parcel/utils");
_utils = function () {
return data;
};
return data;
}
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
/**
* A path that's relative to the project root.
*/
function toProjectPath_(projectRoot, p) {
if (p == null) {
return p;
}
// If the file is outside the project root, store an absolute path rather
// than a relative one. This way if the project root is moved, the file
// references still work. Accessing files outside the project root is not
// portable anyway.
let relative = (0, _utils().relativePath)(projectRoot, p, false);
if (relative.startsWith('..')) {
return process.platform === 'win32' ? (0, _utils().normalizeSeparators)(p) : p;
}
return relative;
}
const toProjectPath = exports.toProjectPath = toProjectPath_;
function fromProjectPath_(projectRoot, p) {
if (p == null) {
return null;
}
// Project paths use normalized unix separators, so we only need to
// convert them on Windows.
let projectPath = process.platform === 'win32' ? _path().default.normalize(p) : p;
// If the path is absolute (e.g. outside the project root), just return it.
if (_path().default.isAbsolute(projectPath)) {
return projectPath;
}
// Add separator if needed. Doing this manunally is much faster than path.join.
if (projectRoot[projectRoot.length - 1] !== _path().default.sep) {
return projectRoot + _path().default.sep + projectPath;
}
return projectRoot + projectPath;
}
const fromProjectPath = exports.fromProjectPath = fromProjectPath_;
/**
* Returns a path relative to the project root. This should be used when computing cache keys
*/
function fromProjectPathRelative(p) {
return p;
}
/**
* This function should be avoided, it doesn't change the actual value.
*/
function toProjectPathUnsafe(p) {
return p;
}
/**
* Joins a project root with relative paths (similar to `path.join`)
*/
function joinProjectPath(a, ...b) {
return _path().default.posix.join(a, ...b);
}

View File

@@ -0,0 +1,261 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.MutableAsset = exports.Asset = void 0;
exports.assetFromValue = assetFromValue;
exports.assetToAssetValue = assetToAssetValue;
exports.mutableAssetToUncommittedAsset = mutableAssetToUncommittedAsset;
function _nullthrows() {
const data = _interopRequireDefault(require("nullthrows"));
_nullthrows = function () {
return data;
};
return data;
}
var _Environment = _interopRequireDefault(require("./Environment"));
var _Dependency = require("./Dependency");
var _Symbols = require("./Symbols");
var _UncommittedAsset = _interopRequireDefault(require("../UncommittedAsset"));
var _CommittedAsset = _interopRequireDefault(require("../CommittedAsset"));
var _Environment2 = require("../Environment");
var _projectPath = require("../projectPath");
var _types = require("../types");
var _utils = require("../utils");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const inspect = Symbol.for('nodejs.util.inspect.custom');
const uncommittedAssetValueToAsset = new WeakMap();
const committedAssetValueToAsset = new WeakMap();
const assetValueToMutableAsset = new WeakMap();
const _assetToAssetValue = new WeakMap();
const _mutableAssetToUncommittedAsset = new WeakMap();
function assetToAssetValue(asset) {
return (0, _nullthrows().default)(_assetToAssetValue.get(asset));
}
function mutableAssetToUncommittedAsset(mutableAsset) {
return (0, _nullthrows().default)(_mutableAssetToUncommittedAsset.get(mutableAsset));
}
function assetFromValue(value, options) {
return new Asset(value.committed ? new _CommittedAsset.default(value, options) : new _UncommittedAsset.default({
value,
options
}));
}
class BaseAsset {
#asset;
#query /*: ?URLSearchParams */;
constructor(asset) {
this.#asset = asset;
_assetToAssetValue.set(this, asset.value);
}
// $FlowFixMe[unsupported-syntax]
[inspect]() {
return `Asset(${this.filePath})`;
}
get id() {
return this.#asset.value.id;
}
get type() {
return this.#asset.value.type;
}
get env() {
return new _Environment.default(this.#asset.value.env, this.#asset.options);
}
get fs() {
return this.#asset.options.inputFS;
}
get filePath() {
return (0, _projectPath.fromProjectPath)(this.#asset.options.projectRoot, this.#asset.value.filePath);
}
get query() {
if (!this.#query) {
var _this$asset$value$que;
this.#query = new URLSearchParams((_this$asset$value$que = this.#asset.value.query) !== null && _this$asset$value$que !== void 0 ? _this$asset$value$que : '');
}
return this.#query;
}
get meta() {
return this.#asset.value.meta;
}
get bundleBehavior() {
let bundleBehavior = this.#asset.value.bundleBehavior;
return bundleBehavior == null ? null : _types.BundleBehaviorNames[bundleBehavior];
}
get isBundleSplittable() {
return this.#asset.value.isBundleSplittable;
}
get isSource() {
return this.#asset.value.isSource;
}
get sideEffects() {
return this.#asset.value.sideEffects;
}
get symbols() {
return new _Symbols.AssetSymbols(this.#asset.options, this.#asset.value);
}
get uniqueKey() {
return this.#asset.value.uniqueKey;
}
get astGenerator() {
return this.#asset.value.astGenerator;
}
get pipeline() {
return this.#asset.value.pipeline;
}
getDependencies() {
return this.#asset.getDependencies().map(dep => (0, _Dependency.getPublicDependency)(dep, this.#asset.options));
}
getCode() {
return this.#asset.getCode();
}
getBuffer() {
return this.#asset.getBuffer();
}
getStream() {
return this.#asset.getStream();
}
getMap() {
return this.#asset.getMap();
}
getAST() {
return this.#asset.getAST();
}
getMapBuffer() {
return this.#asset.getMapBuffer();
}
}
class Asset extends BaseAsset {
#asset /*: CommittedAsset | UncommittedAsset */;
#env /*: ?Environment */;
constructor(asset) {
let assetValueToAsset = asset.value.committed ? committedAssetValueToAsset : uncommittedAssetValueToAsset;
let existing = assetValueToAsset.get(asset.value);
if (existing != null) {
return existing;
}
super(asset);
this.#asset = asset;
assetValueToAsset.set(asset.value, this);
return this;
}
get env() {
var _this$env;
(_this$env = this.#env) !== null && _this$env !== void 0 ? _this$env : this.#env = new _Environment.default(this.#asset.value.env, this.#asset.options);
return this.#env;
}
get stats() {
return this.#asset.value.stats;
}
}
exports.Asset = Asset;
class MutableAsset extends BaseAsset {
#asset /*: UncommittedAsset */;
constructor(asset) {
let existing = assetValueToMutableAsset.get(asset.value);
if (existing != null) {
return existing;
}
super(asset);
this.#asset = asset;
assetValueToMutableAsset.set(asset.value, this);
_mutableAssetToUncommittedAsset.set(this, asset);
return this;
}
setMap(map) {
this.#asset.setMap(map);
}
get type() {
return this.#asset.value.type;
}
set type(type) {
if (type !== this.#asset.value.type) {
this.#asset.value.type = type;
this.#asset.updateId();
}
}
get bundleBehavior() {
let bundleBehavior = this.#asset.value.bundleBehavior;
return bundleBehavior == null ? null : _types.BundleBehaviorNames[bundleBehavior];
}
set bundleBehavior(bundleBehavior) {
this.#asset.value.bundleBehavior = bundleBehavior ? _types.BundleBehavior[bundleBehavior] : null;
}
get isBundleSplittable() {
return this.#asset.value.isBundleSplittable;
}
set isBundleSplittable(isBundleSplittable) {
this.#asset.value.isBundleSplittable = isBundleSplittable;
}
get sideEffects() {
return this.#asset.value.sideEffects;
}
set sideEffects(sideEffects) {
this.#asset.value.sideEffects = sideEffects;
}
get uniqueKey() {
return this.#asset.value.uniqueKey;
}
set uniqueKey(uniqueKey) {
if (this.#asset.value.uniqueKey != null) {
throw new Error("Cannot change an asset's uniqueKey after it has been set.");
}
this.#asset.value.uniqueKey = uniqueKey;
}
get symbols() {
return new _Symbols.MutableAssetSymbols(this.#asset.options, this.#asset.value);
}
addDependency(dep) {
return this.#asset.addDependency(dep);
}
invalidateOnFileChange(filePath) {
this.#asset.invalidateOnFileChange((0, _projectPath.toProjectPath)(this.#asset.options.projectRoot, filePath));
}
invalidateOnFileCreate(invalidation) {
this.#asset.invalidateOnFileCreate(invalidation);
}
invalidateOnEnvChange(env) {
this.#asset.invalidateOnEnvChange(env);
}
invalidateOnStartup() {
this.#asset.invalidateOnStartup();
}
invalidateOnBuild() {
this.#asset.invalidateOnBuild();
}
isASTDirty() {
return this.#asset.isASTDirty;
}
setBuffer(buffer) {
this.#asset.setBuffer(buffer);
}
setCode(code) {
this.#asset.setCode(code);
}
setStream(stream) {
this.#asset.setStream(stream);
}
setAST(ast) {
return this.#asset.setAST(ast);
}
addURLDependency(url, opts) {
return this.addDependency({
specifier: url,
specifierType: 'url',
priority: 'lazy',
...opts
});
}
setEnvironment(env) {
this.#asset.value.env = (0, _Environment2.createEnvironment)({
...env,
loc: (0, _utils.toInternalSourceLocation)(this.#asset.options.projectRoot, env.loc)
});
this.#asset.updateId();
}
}
exports.MutableAsset = MutableAsset;

View File

@@ -0,0 +1,231 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.PackagedBundle = exports.NamedBundle = exports.Bundle = void 0;
exports.bundleToInternalBundle = bundleToInternalBundle;
exports.bundleToInternalBundleGraph = bundleToInternalBundleGraph;
function _assert() {
const data = _interopRequireDefault(require("assert"));
_assert = function () {
return data;
};
return data;
}
function _nullthrows() {
const data = _interopRequireDefault(require("nullthrows"));
_nullthrows = function () {
return data;
};
return data;
}
function _utils() {
const data = require("@parcel/utils");
_utils = function () {
return data;
};
return data;
}
var _Asset = require("./Asset");
function _graph() {
const data = require("@parcel/graph");
_graph = function () {
return data;
};
return data;
}
var _Environment = _interopRequireDefault(require("./Environment"));
var _Dependency = require("./Dependency");
var _Target = _interopRequireDefault(require("./Target"));
var _types = require("../types");
var _projectPath = require("../projectPath");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const internalBundleToBundle = new (_utils().DefaultWeakMap)(() => new (_utils().DefaultWeakMap)(() => new WeakMap()));
const internalBundleToNamedBundle = new (_utils().DefaultWeakMap)(() => new (_utils().DefaultWeakMap)(() => new WeakMap()));
const internalBundleToPackagedBundle = new (_utils().DefaultWeakMap)(() => new (_utils().DefaultWeakMap)(() => new WeakMap()));
// Friendly access for other modules within this package that need access
// to the internal bundle.
const _bundleToInternalBundle = new WeakMap();
function bundleToInternalBundle(bundle) {
return (0, _nullthrows().default)(_bundleToInternalBundle.get(bundle));
}
const _bundleToInternalBundleGraph = new WeakMap();
function bundleToInternalBundleGraph(bundle) {
return (0, _nullthrows().default)(_bundleToInternalBundleGraph.get(bundle));
}
// Require this private object to be present when invoking these constructors,
// preventing others from using them. They should use the static `get` method.
let _private = {};
class Bundle {
#bundle /*: InternalBundle */;
#bundleGraph /*: BundleGraph */;
#options /*: ParcelOptions */;
constructor(sentinel, bundle, bundleGraph, options) {
if (sentinel !== _private) {
throw new Error('Unexpected public usage');
}
this.#bundle = bundle;
this.#bundleGraph = bundleGraph;
this.#options = options;
}
static get(internalBundle, bundleGraph, options) {
let existingMap = internalBundleToBundle.get(options).get(bundleGraph);
let existing = existingMap.get(internalBundle);
if (existing != null) {
return existing;
}
let bundle = new Bundle(_private, internalBundle, bundleGraph, options);
_bundleToInternalBundle.set(bundle, internalBundle);
_bundleToInternalBundleGraph.set(bundle, bundleGraph);
existingMap.set(internalBundle, bundle);
return bundle;
}
get id() {
return this.#bundle.id;
}
get hashReference() {
return this.#bundle.hashReference;
}
get type() {
return this.#bundle.type;
}
get env() {
return new _Environment.default(this.#bundle.env, this.#options);
}
get needsStableName() {
return this.#bundle.needsStableName;
}
get bundleBehavior() {
let bundleBehavior = this.#bundle.bundleBehavior;
return bundleBehavior != null ? _types.BundleBehaviorNames[bundleBehavior] : null;
}
get isSplittable() {
return this.#bundle.isSplittable;
}
get manualSharedBundle() {
return this.#bundle.manualSharedBundle;
}
get target() {
return new _Target.default(this.#bundle.target, this.#options);
}
hasAsset(asset) {
return this.#bundleGraph.bundleHasAsset(this.#bundle, (0, _Asset.assetToAssetValue)(asset));
}
hasDependency(dep) {
return this.#bundleGraph.bundleHasDependency(this.#bundle, (0, _Dependency.dependencyToInternalDependency)(dep));
}
getEntryAssets() {
return this.#bundle.entryAssetIds.map(id => {
let assetNode = this.#bundleGraph._graph.getNodeByContentKey(id);
(0, _assert().default)(assetNode != null && assetNode.type === 'asset');
return (0, _Asset.assetFromValue)(assetNode.value, this.#options);
});
}
getMainEntry() {
if (this.#bundle.mainEntryId != null) {
let assetNode = this.#bundleGraph._graph.getNodeByContentKey(this.#bundle.mainEntryId);
(0, _assert().default)(assetNode != null && assetNode.type === 'asset');
return (0, _Asset.assetFromValue)(assetNode.value, this.#options);
}
}
traverse(visit) {
return this.#bundleGraph.traverseBundle(this.#bundle, (0, _graph().mapVisitor)(node => {
if (node.type === 'asset') {
return {
type: 'asset',
value: (0, _Asset.assetFromValue)(node.value, this.#options)
};
} else if (node.type === 'dependency') {
return {
type: 'dependency',
value: (0, _Dependency.getPublicDependency)(node.value, this.#options)
};
}
}, visit));
}
traverseAssets(visit, startAsset) {
return this.#bundleGraph.traverseAssets(this.#bundle, (0, _graph().mapVisitor)(asset => (0, _Asset.assetFromValue)(asset, this.#options), visit), startAsset ? (0, _Asset.assetToAssetValue)(startAsset) : undefined);
}
}
exports.Bundle = Bundle;
class NamedBundle extends Bundle {
#bundle /*: InternalBundle */;
#bundleGraph /*: BundleGraph */;
#options /*: ParcelOptions */;
constructor(sentinel, bundle, bundleGraph, options) {
super(sentinel, bundle, bundleGraph, options);
this.#bundle = bundle; // Repeating for flow
this.#bundleGraph = bundleGraph; // Repeating for flow
this.#options = options;
}
static get(internalBundle, bundleGraph, options) {
let existingMap = internalBundleToNamedBundle.get(options).get(bundleGraph);
let existing = existingMap.get(internalBundle);
if (existing != null) {
return existing;
}
let namedBundle = new NamedBundle(_private, internalBundle, bundleGraph, options);
_bundleToInternalBundle.set(namedBundle, internalBundle);
_bundleToInternalBundleGraph.set(namedBundle, bundleGraph);
existingMap.set(internalBundle, namedBundle);
return namedBundle;
}
get name() {
return (0, _nullthrows().default)(this.#bundle.name);
}
get displayName() {
return (0, _nullthrows().default)(this.#bundle.displayName);
}
get publicId() {
return (0, _nullthrows().default)(this.#bundle.publicId);
}
}
exports.NamedBundle = NamedBundle;
class PackagedBundle extends NamedBundle {
#bundle /*: InternalBundle */;
#bundleGraph /*: BundleGraph */;
#options /*: ParcelOptions */;
#bundleInfo /*: ?PackagedBundleInfo */;
constructor(sentinel, bundle, bundleGraph, options) {
super(sentinel, bundle, bundleGraph, options);
this.#bundle = bundle; // Repeating for flow
this.#bundleGraph = bundleGraph; // Repeating for flow
this.#options = options; // Repeating for flow
}
static get(internalBundle, bundleGraph, options) {
let existingMap = internalBundleToPackagedBundle.get(options).get(bundleGraph);
let existing = existingMap.get(internalBundle);
if (existing != null) {
return existing;
}
let packagedBundle = new PackagedBundle(_private, internalBundle, bundleGraph, options);
_bundleToInternalBundle.set(packagedBundle, internalBundle);
_bundleToInternalBundleGraph.set(packagedBundle, bundleGraph);
existingMap.set(internalBundle, packagedBundle);
return packagedBundle;
}
static getWithInfo(internalBundle, bundleGraph, options, bundleInfo) {
let packagedBundle = PackagedBundle.get(internalBundle, bundleGraph, options);
packagedBundle.#bundleInfo = bundleInfo;
return packagedBundle;
}
get filePath() {
return (0, _projectPath.fromProjectPath)(this.#options.projectRoot, (0, _nullthrows().default)(this.#bundleInfo).filePath);
}
get type() {
// The bundle type may be overridden in the packager.
// However, inline bundles will not have a bundleInfo here since they are not written to the filesystem.
return this.#bundleInfo ? this.#bundleInfo.type : this.#bundle.type;
}
get stats() {
return (0, _nullthrows().default)(this.#bundleInfo).stats;
}
}
exports.PackagedBundle = PackagedBundle;

View File

@@ -0,0 +1,190 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.bundleGraphToInternalBundleGraph = bundleGraphToInternalBundleGraph;
exports.default = void 0;
function _assert() {
const data = _interopRequireDefault(require("assert"));
_assert = function () {
return data;
};
return data;
}
function _nullthrows() {
const data = _interopRequireDefault(require("nullthrows"));
_nullthrows = function () {
return data;
};
return data;
}
function _graph() {
const data = require("@parcel/graph");
_graph = function () {
return data;
};
return data;
}
var _Asset = require("./Asset");
var _Bundle = require("./Bundle");
var _Dependency = _interopRequireWildcard(require("./Dependency"));
var _Target = require("./Target");
var _utils = require("../utils");
var _BundleGroup = _interopRequireWildcard(require("./BundleGroup"));
function _getRequireWildcardCache(e) { if ("function" != typeof WeakMap) return null; var r = new WeakMap(), t = new WeakMap(); return (_getRequireWildcardCache = function (e) { return e ? t : r; })(e); }
function _interopRequireWildcard(e, r) { if (!r && e && e.__esModule) return e; if (null === e || "object" != typeof e && "function" != typeof e) return { default: e }; var t = _getRequireWildcardCache(r); if (t && t.has(e)) return t.get(e); var n = { __proto__: null }, a = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var u in e) if ("default" !== u && Object.prototype.hasOwnProperty.call(e, u)) { var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; } return n.default = e, t && t.set(e, n), n; }
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
// Friendly access for other modules within this package that need access
// to the internal bundle.
const _bundleGraphToInternalBundleGraph = new WeakMap();
function bundleGraphToInternalBundleGraph(bundleGraph) {
return (0, _nullthrows().default)(_bundleGraphToInternalBundleGraph.get(bundleGraph));
}
class BundleGraph {
#graph;
#options;
#createBundle;
constructor(graph, createBundle, options) {
this.#graph = graph;
this.#options = options;
this.#createBundle = createBundle;
// $FlowFixMe
_bundleGraphToInternalBundleGraph.set(this, graph);
}
getAssetById(id) {
return (0, _Asset.assetFromValue)(this.#graph.getAssetById(id), this.#options);
}
getAssetPublicId(asset) {
return this.#graph.getAssetPublicId((0, _Asset.assetToAssetValue)(asset));
}
isDependencySkipped(dep) {
return this.#graph.isDependencySkipped((0, _Dependency.dependencyToInternalDependency)(dep));
}
getResolvedAsset(dep, bundle) {
let resolution = this.#graph.getResolvedAsset((0, _Dependency.dependencyToInternalDependency)(dep), bundle && (0, _Bundle.bundleToInternalBundle)(bundle));
if (resolution) {
return (0, _Asset.assetFromValue)(resolution, this.#options);
}
}
getIncomingDependencies(asset) {
return this.#graph.getIncomingDependencies((0, _Asset.assetToAssetValue)(asset)).map(dep => (0, _Dependency.getPublicDependency)(dep, this.#options));
}
getAssetWithDependency(dep) {
let asset = this.#graph.getAssetWithDependency((0, _Dependency.dependencyToInternalDependency)(dep));
if (asset) {
return (0, _Asset.assetFromValue)(asset, this.#options);
}
}
getBundleGroupsContainingBundle(bundle) {
return this.#graph.getBundleGroupsContainingBundle((0, _Bundle.bundleToInternalBundle)(bundle)).map(bundleGroup => new _BundleGroup.default(bundleGroup, this.#options));
}
getReferencedBundles(bundle, opts) {
return this.#graph.getReferencedBundles((0, _Bundle.bundleToInternalBundle)(bundle), opts).map(bundle => this.#createBundle(bundle, this.#graph, this.#options));
}
resolveAsyncDependency(dependency, bundle) {
let resolved = this.#graph.resolveAsyncDependency((0, _Dependency.dependencyToInternalDependency)(dependency), bundle && (0, _Bundle.bundleToInternalBundle)(bundle));
if (resolved == null) {
return;
} else if (resolved.type === 'bundle_group') {
return {
type: 'bundle_group',
value: new _BundleGroup.default(resolved.value, this.#options)
};
}
return {
type: 'asset',
value: (0, _Asset.assetFromValue)(resolved.value, this.#options)
};
}
getReferencedBundle(dependency, bundle) {
let result = this.#graph.getReferencedBundle((0, _Dependency.dependencyToInternalDependency)(dependency), (0, _Bundle.bundleToInternalBundle)(bundle));
if (result != null) {
return this.#createBundle(result, this.#graph, this.#options);
}
}
getDependencies(asset) {
return this.#graph.getDependencies((0, _Asset.assetToAssetValue)(asset)).map(dep => (0, _Dependency.getPublicDependency)(dep, this.#options));
}
isAssetReachableFromBundle(asset, bundle) {
return this.#graph.isAssetReachableFromBundle((0, _Asset.assetToAssetValue)(asset), (0, _Bundle.bundleToInternalBundle)(bundle));
}
isAssetReferenced(bundle, asset) {
return this.#graph.isAssetReferenced((0, _Bundle.bundleToInternalBundle)(bundle), (0, _Asset.assetToAssetValue)(asset));
}
hasParentBundleOfType(bundle, type) {
return this.#graph.hasParentBundleOfType((0, _Bundle.bundleToInternalBundle)(bundle), type);
}
getBundlesInBundleGroup(bundleGroup, opts) {
return this.#graph.getBundlesInBundleGroup((0, _BundleGroup.bundleGroupToInternalBundleGroup)(bundleGroup), opts).map(bundle => this.#createBundle(bundle, this.#graph, this.#options));
}
getBundles(opts) {
return this.#graph.getBundles(opts).map(bundle => this.#createBundle(bundle, this.#graph, this.#options));
}
isEntryBundleGroup(bundleGroup) {
return this.#graph.isEntryBundleGroup((0, _BundleGroup.bundleGroupToInternalBundleGroup)(bundleGroup));
}
getChildBundles(bundle) {
return this.#graph.getChildBundles((0, _Bundle.bundleToInternalBundle)(bundle)).map(bundle => this.#createBundle(bundle, this.#graph, this.#options));
}
getParentBundles(bundle) {
return this.#graph.getParentBundles((0, _Bundle.bundleToInternalBundle)(bundle)).map(bundle => this.#createBundle(bundle, this.#graph, this.#options));
}
getSymbolResolution(asset, symbol, boundary) {
let res = this.#graph.getSymbolResolution((0, _Asset.assetToAssetValue)(asset), symbol, boundary ? (0, _Bundle.bundleToInternalBundle)(boundary) : null);
return {
asset: (0, _Asset.assetFromValue)(res.asset, this.#options),
exportSymbol: res.exportSymbol,
symbol: res.symbol,
loc: (0, _utils.fromInternalSourceLocation)(this.#options.projectRoot, res.loc)
};
}
getExportedSymbols(asset, boundary) {
let res = this.#graph.getExportedSymbols((0, _Asset.assetToAssetValue)(asset), boundary ? (0, _Bundle.bundleToInternalBundle)(boundary) : null);
return res.map(e => ({
asset: (0, _Asset.assetFromValue)(e.asset, this.#options),
exportSymbol: e.exportSymbol,
symbol: e.symbol,
loc: (0, _utils.fromInternalSourceLocation)(this.#options.projectRoot, e.loc),
exportAs: e.exportAs
}));
}
traverse(visit, start, opts) {
return this.#graph.traverse((0, _graph().mapVisitor)((node, actions) => {
// Skipping unused dependencies here is faster than doing an isDependencySkipped check inside the visitor
// because the node needs to be re-looked up by id from the hashmap.
if (opts !== null && opts !== void 0 && opts.skipUnusedDependencies && node.type === 'dependency' && (node.hasDeferred || node.excluded)) {
actions.skipChildren();
return null;
}
return node.type === 'asset' ? {
type: 'asset',
value: (0, _Asset.assetFromValue)(node.value, this.#options)
} : {
type: 'dependency',
value: (0, _Dependency.getPublicDependency)(node.value, this.#options)
};
}, visit), start ? (0, _Asset.assetToAssetValue)(start) : undefined);
}
traverseBundles(visit, startBundle) {
return this.#graph.traverseBundles((0, _graph().mapVisitor)(bundle => this.#createBundle(bundle, this.#graph, this.#options), visit), startBundle == null ? undefined : (0, _Bundle.bundleToInternalBundle)(startBundle));
}
getBundlesWithAsset(asset) {
return this.#graph.getBundlesWithAsset((0, _Asset.assetToAssetValue)(asset)).map(bundle => this.#createBundle(bundle, this.#graph, this.#options));
}
getBundlesWithDependency(dependency) {
return this.#graph.getBundlesWithDependency((0, _Dependency.dependencyToInternalDependency)(dependency)).map(bundle => this.#createBundle(bundle, this.#graph, this.#options));
}
getUsedSymbols(v) {
if (v instanceof _Asset.Asset) {
return this.#graph.getUsedSymbolsAsset((0, _Asset.assetToAssetValue)(v));
} else {
(0, _assert().default)(v instanceof _Dependency.default);
return this.#graph.getUsedSymbolsDependency((0, _Dependency.dependencyToInternalDependency)(v));
}
}
getEntryRoot(target) {
return this.#graph.getEntryRoot(this.#options.projectRoot, (0, _Target.targetToInternalTarget)(target));
}
}
exports.default = BundleGraph;

View File

@@ -0,0 +1,44 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.bundleGroupToInternalBundleGroup = bundleGroupToInternalBundleGroup;
exports.default = void 0;
function _nullthrows() {
const data = _interopRequireDefault(require("nullthrows"));
_nullthrows = function () {
return data;
};
return data;
}
var _Target = _interopRequireDefault(require("./Target"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const internalBundleGroupToBundleGroup = new WeakMap();
const _bundleGroupToInternalBundleGroup = new WeakMap();
function bundleGroupToInternalBundleGroup(target) {
return (0, _nullthrows().default)(_bundleGroupToInternalBundleGroup.get(target));
}
class BundleGroup {
#bundleGroup /*: InternalBundleGroup */;
#options /*: ParcelOptions */;
constructor(bundleGroup, options) {
let existing = internalBundleGroupToBundleGroup.get(bundleGroup);
if (existing != null) {
return existing;
}
this.#bundleGroup = bundleGroup;
this.#options = options;
_bundleGroupToInternalBundleGroup.set(this, bundleGroup);
internalBundleGroupToBundleGroup.set(bundleGroup, this);
return this;
}
get target() {
return new _Target.default(this.#bundleGroup.target, this.#options);
}
get entryAssetId() {
return this.#bundleGroup.entryAssetId;
}
}
exports.default = BundleGroup;

View File

@@ -0,0 +1,192 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
function _assert() {
const data = _interopRequireDefault(require("assert"));
_assert = function () {
return data;
};
return data;
}
function _path() {
const data = _interopRequireDefault(require("path"));
_path = function () {
return data;
};
return data;
}
function _utils() {
const data = require("@parcel/utils");
_utils = function () {
return data;
};
return data;
}
var _Environment = _interopRequireDefault(require("./Environment"));
var _projectPath = require("../projectPath");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const internalConfigToConfig = new (_utils().DefaultWeakMap)(() => new WeakMap());
class PublicConfig {
#config /*: Config */;
#pkg /*: ?PackageJSON */;
#pkgFilePath /*: ?FilePath */;
#options /*: ParcelOptions */;
constructor(config, options) {
let existing = internalConfigToConfig.get(options).get(config);
if (existing != null) {
return existing;
}
this.#config = config;
this.#options = options;
internalConfigToConfig.get(options).set(config, this);
return this;
}
get env() {
return new _Environment.default(this.#config.env, this.#options);
}
get searchPath() {
return (0, _projectPath.fromProjectPath)(this.#options.projectRoot, this.#config.searchPath);
}
get result() {
return this.#config.result;
}
get isSource() {
return this.#config.isSource;
}
// $FlowFixMe
setResult(result) {
this.#config.result = result;
}
setCacheKey(cacheKey) {
this.#config.cacheKey = cacheKey;
}
invalidateOnFileChange(filePath) {
this.#config.invalidateOnFileChange.add((0, _projectPath.toProjectPath)(this.#options.projectRoot, filePath));
}
addDevDependency(devDep) {
var _devDep$additionalInv;
this.#config.devDeps.push({
...devDep,
resolveFrom: (0, _projectPath.toProjectPath)(this.#options.projectRoot, devDep.resolveFrom),
additionalInvalidations: (_devDep$additionalInv = devDep.additionalInvalidations) === null || _devDep$additionalInv === void 0 ? void 0 : _devDep$additionalInv.map(i => ({
...i,
resolveFrom: (0, _projectPath.toProjectPath)(this.#options.projectRoot, i.resolveFrom)
}))
});
}
invalidateOnFileCreate(invalidation) {
if (invalidation.glob != null) {
// $FlowFixMe
this.#config.invalidateOnFileCreate.push(invalidation);
} else if (invalidation.filePath != null) {
this.#config.invalidateOnFileCreate.push({
filePath: (0, _projectPath.toProjectPath)(this.#options.projectRoot, invalidation.filePath)
});
} else {
(0, _assert().default)(invalidation.aboveFilePath != null);
this.#config.invalidateOnFileCreate.push({
// $FlowFixMe
fileName: invalidation.fileName,
aboveFilePath: (0, _projectPath.toProjectPath)(this.#options.projectRoot, invalidation.aboveFilePath)
});
}
}
invalidateOnEnvChange(env) {
this.#config.invalidateOnEnvChange.add(env);
}
invalidateOnStartup() {
this.#config.invalidateOnStartup = true;
}
invalidateOnBuild() {
this.#config.invalidateOnBuild = true;
}
async getConfigFrom(searchPath, fileNames, options) {
let packageKey = options === null || options === void 0 ? void 0 : options.packageKey;
if (packageKey != null) {
let pkg = await this.getConfigFrom(searchPath, ['package.json']);
if (pkg && pkg.contents[packageKey]) {
return {
contents: pkg.contents[packageKey],
filePath: pkg.filePath
};
}
}
if (fileNames.length === 0) {
return null;
}
// Invalidate when any of the file names are created above the search path.
for (let fileName of fileNames) {
this.invalidateOnFileCreate({
fileName,
aboveFilePath: searchPath
});
}
let parse = options && options.parse;
let configFilePath = await (0, _utils().resolveConfig)(this.#options.inputFS, searchPath, fileNames, this.#options.projectRoot);
if (configFilePath == null) {
return null;
}
if (!options || !options.exclude) {
this.invalidateOnFileChange(configFilePath);
}
// If this is a JavaScript file, load it with the package manager.
let extname = _path().default.extname(configFilePath);
if (extname === '.js' || extname === '.cjs' || extname === '.mjs') {
let specifier = (0, _utils().relativePath)(_path().default.dirname(searchPath), configFilePath);
// Add dev dependency so we reload the config and any dependencies in watch mode.
this.addDevDependency({
specifier,
resolveFrom: searchPath
});
// Invalidate on startup in case the config is non-deterministic,
// e.g. uses unknown environment variables, reads from the filesystem, etc.
this.invalidateOnStartup();
let config = await this.#options.packageManager.require(specifier, searchPath);
if (
// $FlowFixMe
Object.prototype.toString.call(config) === '[object Module]' && config.default != null) {
// Native ESM config. Try to use a default export, otherwise fall back to the whole namespace.
config = config.default;
}
return {
contents: config,
filePath: configFilePath
};
}
let conf = await (0, _utils().readConfig)(this.#options.inputFS, configFilePath, parse == null ? null : {
parse
});
if (conf == null) {
return null;
}
return {
contents: conf.config,
filePath: configFilePath
};
}
getConfig(filePaths, options) {
return this.getConfigFrom(this.searchPath, filePaths, options);
}
async getPackage() {
if (this.#pkg) {
return this.#pkg;
}
let pkgConfig = await this.getConfig(['package.json']);
if (!pkgConfig) {
return null;
}
this.#pkg = pkgConfig.contents;
this.#pkgFilePath = pkgConfig.filePath;
return this.#pkg;
}
}
exports.default = PublicConfig;

View File

@@ -0,0 +1,132 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
exports.dependencyToInternalDependency = dependencyToInternalDependency;
exports.getPublicDependency = getPublicDependency;
var _types = require("../types");
function _nullthrows() {
const data = _interopRequireDefault(require("nullthrows"));
_nullthrows = function () {
return data;
};
return data;
}
var _Environment = _interopRequireDefault(require("./Environment"));
var _Target = _interopRequireDefault(require("./Target"));
var _Symbols = require("./Symbols");
var _projectPath = require("../projectPath");
var _utils = require("../utils");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const SpecifierTypeNames = Object.keys(_types.SpecifierType);
const PriorityNames = Object.keys(_types.Priority);
const inspect = Symbol.for('nodejs.util.inspect.custom');
const internalDependencyToDependency = new WeakMap();
const _dependencyToInternalDependency = new WeakMap();
function dependencyToInternalDependency(dependency) {
return (0, _nullthrows().default)(_dependencyToInternalDependency.get(dependency));
}
function getPublicDependency(dep, options) {
let existing = internalDependencyToDependency.get(dep);
if (existing != null) {
return existing;
}
return new Dependency(dep, options);
}
class Dependency {
#dep /*: InternalDependency */;
#options /*: ParcelOptions */;
constructor(dep, options) {
this.#dep = dep;
this.#options = options;
_dependencyToInternalDependency.set(this, dep);
internalDependencyToDependency.set(dep, this);
return this;
}
// $FlowFixMe
[inspect]() {
return `Dependency(${String(this.sourcePath)} -> ${this.specifier})`;
}
get id() {
return this.#dep.id;
}
get specifier() {
return this.#dep.specifier;
}
get specifierType() {
return SpecifierTypeNames[this.#dep.specifierType];
}
get priority() {
return PriorityNames[this.#dep.priority];
}
get needsStableName() {
return this.#dep.needsStableName;
}
get bundleBehavior() {
let bundleBehavior = this.#dep.bundleBehavior;
return bundleBehavior == null ? null : _types.BundleBehaviorNames[bundleBehavior];
}
get isEntry() {
return this.#dep.isEntry;
}
get isOptional() {
return this.#dep.isOptional;
}
get loc() {
return (0, _utils.fromInternalSourceLocation)(this.#options.projectRoot, this.#dep.loc);
}
get env() {
return new _Environment.default(this.#dep.env, this.#options);
}
get packageConditions() {
// Merge custom conditions with conditions stored as bitflags.
// Order is not important because exports conditions are resolved
// in the order they are declared in the package.json.
let conditions = this.#dep.customPackageConditions;
if (this.#dep.packageConditions) {
conditions = conditions ? [...conditions] : [];
for (let key in _types.ExportsCondition) {
if (this.#dep.packageConditions & _types.ExportsCondition[key]) {
conditions.push(key);
}
}
}
return conditions;
}
get meta() {
return this.#dep.meta;
}
get symbols() {
return new _Symbols.MutableDependencySymbols(this.#options, this.#dep);
}
get target() {
let target = this.#dep.target;
return target ? new _Target.default(target, this.#options) : null;
}
get sourceAssetId() {
// TODO: does this need to be public?
return this.#dep.sourceAssetId;
}
get sourcePath() {
// TODO: does this need to be public?
return (0, _projectPath.fromProjectPath)(this.#options.projectRoot, this.#dep.sourcePath);
}
get sourceAssetType() {
return this.#dep.sourceAssetType;
}
get resolveFrom() {
var _this$dep$resolveFrom;
return (0, _projectPath.fromProjectPath)(this.#options.projectRoot, (_this$dep$resolveFrom = this.#dep.resolveFrom) !== null && _this$dep$resolveFrom !== void 0 ? _this$dep$resolveFrom : this.#dep.sourcePath);
}
get range() {
return this.#dep.range;
}
get pipeline() {
return this.#dep.pipeline;
}
}
exports.default = Dependency;

View File

@@ -0,0 +1,244 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = exports.ISOLATED_ENVS = exports.BROWSER_ENVS = void 0;
exports.environmentToInternalEnvironment = environmentToInternalEnvironment;
function _nullthrows() {
const data = _interopRequireDefault(require("nullthrows"));
_nullthrows = function () {
return data;
};
return data;
}
function _browserslist() {
const data = _interopRequireDefault(require("browserslist"));
_browserslist = function () {
return data;
};
return data;
}
function _semver() {
const data = _interopRequireDefault(require("semver"));
_semver = function () {
return data;
};
return data;
}
var _utils = require("../utils");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const inspect = Symbol.for('nodejs.util.inspect.custom');
const BROWSER_ENVS = exports.BROWSER_ENVS = new Set(['browser', 'web-worker', 'service-worker', 'worklet', 'electron-renderer']);
const ELECTRON_ENVS = new Set(['electron-main', 'electron-renderer']);
const NODE_ENVS = new Set(['node', ...ELECTRON_ENVS]);
const WORKER_ENVS = new Set(['web-worker', 'service-worker']);
const ISOLATED_ENVS = exports.ISOLATED_ENVS = new Set([...WORKER_ENVS, 'worklet']);
const ALL_BROWSERS = ['chrome', 'and_chr', 'edge', 'firefox', 'and_ff', 'safari', 'ios', 'samsung', 'opera', 'ie', 'op_mini', 'blackberry', 'op_mob', 'ie_mob', 'and_uc', 'and_qq', 'baidu', 'kaios'];
// See require("caniuse-api").getSupport(<feature name>)
const supportData = {
esmodules: {
edge: '16',
firefox: '60',
chrome: '61',
safari: '10.1',
opera: '48',
ios: '10.3',
android: '76',
and_chr: '76',
and_ff: '68',
samsung: '8.2',
and_qq: '10.4',
op_mob: '64'
},
'dynamic-import': {
edge: '76',
firefox: '67',
chrome: '63',
safari: '11.1',
opera: '50',
ios: '11.3',
android: '63',
and_chr: '63',
and_ff: '67',
samsung: '8',
and_qq: '10.4',
op_mob: '64'
},
'worker-module': {
edge: '80',
chrome: '80',
opera: '67',
android: '81',
and_chr: '86'
},
'service-worker-module': {
// TODO: Safari 14.1??
},
'import-meta-url': {
edge: '79',
firefox: '62',
chrome: '64',
safari: '11.1',
opera: '51',
ios: '12',
android: '64',
and_chr: '64',
and_ff: '62',
samsung: '9.2',
and_qq: '10.4',
op_mob: '64'
},
'arrow-functions': {
chrome: '47',
opera: '34',
edge: '13',
firefox: '45',
safari: '10',
node: '6',
ios: '10',
samsung: '5',
electron: '0.36',
android: '50',
qq: '10.4',
baidu: '7.12',
kaios: '2.5',
and_chr: '50',
and_qq: '12.12',
op_mob: '64'
},
'global-this': {
chrome: '75',
edge: '79',
safari: '12.1',
firefox: '65',
opera: '58',
node: '12',
and_chr: '71',
ios: '12.2',
android: '71',
samsung: '10.1'
}
};
const internalEnvironmentToEnvironment = new WeakMap();
const _environmentToInternalEnvironment = new WeakMap();
function environmentToInternalEnvironment(environment) {
return (0, _nullthrows().default)(_environmentToInternalEnvironment.get(environment));
}
class Environment {
#environment /*: InternalEnvironment */;
#options /*: ParcelOptions */;
constructor(env, options) {
let existing = internalEnvironmentToEnvironment.get(env);
if (existing != null) {
return existing;
}
this.#environment = env;
this.#options = options;
_environmentToInternalEnvironment.set(this, env);
internalEnvironmentToEnvironment.set(env, this);
return this;
}
get id() {
return this.#environment.id;
}
get context() {
return this.#environment.context;
}
get engines() {
return this.#environment.engines;
}
get includeNodeModules() {
return this.#environment.includeNodeModules;
}
get outputFormat() {
return this.#environment.outputFormat;
}
get sourceType() {
return this.#environment.sourceType;
}
get isLibrary() {
return this.#environment.isLibrary;
}
get shouldOptimize() {
return this.#environment.shouldOptimize;
}
get shouldScopeHoist() {
return this.#environment.shouldScopeHoist;
}
get sourceMap() {
return this.#environment.sourceMap;
}
get loc() {
return (0, _utils.fromInternalSourceLocation)(this.#options.projectRoot, this.#environment.loc);
}
// $FlowFixMe[unsupported-syntax]
[inspect]() {
return `Env(${this.#environment.context})`;
}
isBrowser() {
return BROWSER_ENVS.has(this.#environment.context);
}
isNode() {
return NODE_ENVS.has(this.#environment.context);
}
isElectron() {
return ELECTRON_ENVS.has(this.#environment.context);
}
isIsolated() {
return ISOLATED_ENVS.has(this.#environment.context);
}
isWorker() {
return WORKER_ENVS.has(this.#environment.context);
}
isWorklet() {
return this.#environment.context === 'worklet';
}
matchesEngines(minVersions, defaultValue = false) {
// Determine if the environment matches some minimum version requirements.
// For browsers, we run a browserslist query with and without the minimum
// required browsers and compare the lists. For node, we just check semver.
if (this.isBrowser() && this.engines.browsers != null) {
let targetBrowsers = this.engines.browsers;
let browsers = targetBrowsers != null && !Array.isArray(targetBrowsers) ? [targetBrowsers] : targetBrowsers;
// If outputting esmodules, exclude browsers without support.
if (this.outputFormat === 'esmodule') {
browsers = [...browsers, ...getExcludedBrowsers(supportData.esmodules)];
}
let matchedBrowsers = (0, _browserslist().default)(browsers);
if (matchedBrowsers.length === 0) {
return false;
}
let minBrowsers = getExcludedBrowsers(minVersions);
let withoutMinBrowsers = (0, _browserslist().default)([...browsers, ...minBrowsers]);
return matchedBrowsers.length === withoutMinBrowsers.length;
} else if (this.isNode() && this.engines.node != null) {
return minVersions.node != null && !_semver().default.intersects(`< ${minVersions.node}`, this.engines.node);
}
return defaultValue;
}
supports(feature, defaultValue) {
let engines = supportData[feature];
if (!engines) {
throw new Error('Unknown environment feature: ' + feature);
}
return this.matchesEngines(engines, defaultValue);
}
}
exports.default = Environment;
function getExcludedBrowsers(minVersions) {
let browsers = [];
for (let browser of ALL_BROWSERS) {
let version = minVersions[browser];
if (version) {
browsers.push(`not ${browser} < ${version}`);
} else {
browsers.push(`not ${browser} > 0`);
}
}
return browsers;
}

View File

@@ -0,0 +1,185 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
function _assert() {
const data = _interopRequireDefault(require("assert"));
_assert = function () {
return data;
};
return data;
}
function _nullthrows() {
const data = _interopRequireDefault(require("nullthrows"));
_nullthrows = function () {
return data;
};
return data;
}
function _rust() {
const data = require("@parcel/rust");
_rust = function () {
return data;
};
return data;
}
var _BundleGraph = _interopRequireDefault(require("./BundleGraph"));
var _BundleGraph2 = _interopRequireWildcard(require("../BundleGraph"));
var _Bundle = require("./Bundle");
var _Asset = require("./Asset");
var _utils = require("../utils");
var _Dependency = _interopRequireWildcard(require("./Dependency"));
var _Environment = require("./Environment");
var _Target = require("./Target");
var _constants = require("../constants");
var _projectPath = require("../projectPath");
var _types = require("../types");
var _BundleGroup = _interopRequireWildcard(require("./BundleGroup"));
function _getRequireWildcardCache(e) { if ("function" != typeof WeakMap) return null; var r = new WeakMap(), t = new WeakMap(); return (_getRequireWildcardCache = function (e) { return e ? t : r; })(e); }
function _interopRequireWildcard(e, r) { if (!r && e && e.__esModule) return e; if (null === e || "object" != typeof e && "function" != typeof e) return { default: e }; var t = _getRequireWildcardCache(r); if (t && t.has(e)) return t.get(e); var n = { __proto__: null }, a = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var u in e) if ("default" !== u && Object.prototype.hasOwnProperty.call(e, u)) { var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; } return n.default = e, t && t.set(e, n), n; }
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
class MutableBundleGraph extends _BundleGraph.default {
#graph /*: InternalBundleGraph */;
#options /*: ParcelOptions */;
#bundlePublicIds /*: Set<string> */ = new Set();
constructor(graph, options) {
super(graph, _Bundle.Bundle.get.bind(_Bundle.Bundle), options);
this.#graph = graph;
this.#options = options;
}
addAssetToBundle(asset, bundle) {
this.#graph.addAssetToBundle((0, _Asset.assetToAssetValue)(asset), (0, _Bundle.bundleToInternalBundle)(bundle));
}
addAssetGraphToBundle(asset, bundle, shouldSkipDependency) {
this.#graph.addAssetGraphToBundle((0, _Asset.assetToAssetValue)(asset), (0, _Bundle.bundleToInternalBundle)(bundle), shouldSkipDependency ? d => shouldSkipDependency(new _Dependency.default(d, this.#options)) : undefined);
}
addEntryToBundle(asset, bundle, shouldSkipDependency) {
this.#graph.addEntryToBundle((0, _Asset.assetToAssetValue)(asset), (0, _Bundle.bundleToInternalBundle)(bundle), shouldSkipDependency ? d => shouldSkipDependency(new _Dependency.default(d, this.#options)) : undefined);
}
createBundleGroup(dependency, target) {
let dependencyNode = this.#graph._graph.getNodeByContentKey(dependency.id);
if (!dependencyNode) {
throw new Error('Dependency not found');
}
(0, _assert().default)(dependencyNode.type === 'dependency');
let resolved = this.#graph.getResolvedAsset((0, _Dependency.dependencyToInternalDependency)(dependency));
if (!resolved) {
throw new Error('Dependency did not resolve to an asset ' + dependency.id);
}
let bundleGroup = {
target: (0, _Target.targetToInternalTarget)(target),
entryAssetId: resolved.id
};
let bundleGroupKey = (0, _utils.getBundleGroupId)(bundleGroup);
let bundleGroupNodeId = this.#graph._graph.hasContentKey(bundleGroupKey) ? this.#graph._graph.getNodeIdByContentKey(bundleGroupKey) : this.#graph._graph.addNodeByContentKey(bundleGroupKey, {
id: bundleGroupKey,
type: 'bundle_group',
value: bundleGroup
});
let dependencyNodeId = this.#graph._graph.getNodeIdByContentKey(dependencyNode.id);
let resolvedNodeId = this.#graph._graph.getNodeIdByContentKey(resolved.id);
let assetNodes = this.#graph._graph.getNodeIdsConnectedFrom(dependencyNodeId);
this.#graph._graph.addEdge(dependencyNodeId, bundleGroupNodeId);
this.#graph._graph.replaceNodeIdsConnectedTo(bundleGroupNodeId, assetNodes);
this.#graph._graph.addEdge(dependencyNodeId, resolvedNodeId, _BundleGraph2.bundleGraphEdgeTypes.references);
if (
// This check is needed for multiple targets, when we go over the same nodes twice
this.#graph._graph.hasEdge(dependencyNodeId, resolvedNodeId, _BundleGraph2.bundleGraphEdgeTypes.null)) {
//nullEdgeType
this.#graph._graph.removeEdge(dependencyNodeId, resolvedNodeId);
}
if (dependency.isEntry) {
this.#graph._graph.addEdge((0, _nullthrows().default)(this.#graph._graph.rootNodeId), bundleGroupNodeId, _BundleGraph2.bundleGraphEdgeTypes.bundle);
} else {
let inboundBundleNodeIds = this.#graph._graph.getNodeIdsConnectedTo(dependencyNodeId, _BundleGraph2.bundleGraphEdgeTypes.contains);
for (let inboundBundleNodeId of inboundBundleNodeIds) {
var _this$graph$_graph$ge;
(0, _assert().default)(((_this$graph$_graph$ge = this.#graph._graph.getNode(inboundBundleNodeId)) === null || _this$graph$_graph$ge === void 0 ? void 0 : _this$graph$_graph$ge.type) === 'bundle');
this.#graph._graph.addEdge(inboundBundleNodeId, bundleGroupNodeId, _BundleGraph2.bundleGraphEdgeTypes.bundle);
}
}
return new _BundleGroup.default(bundleGroup, this.#options);
}
removeBundleGroup(bundleGroup) {
this.#graph.removeBundleGroup((0, _BundleGroup.bundleGroupToInternalBundleGroup)(bundleGroup));
}
internalizeAsyncDependency(bundle, dependency) {
this.#graph.internalizeAsyncDependency((0, _Bundle.bundleToInternalBundle)(bundle), (0, _Dependency.dependencyToInternalDependency)(dependency));
}
createBundle(opts) {
var _opts$bundleBehavior;
let entryAsset = opts.entryAsset ? (0, _Asset.assetToAssetValue)(opts.entryAsset) : null;
let target = (0, _Target.targetToInternalTarget)(opts.target);
let bundleId = (0, _rust().hashString)('bundle:' + (opts.entryAsset ? opts.entryAsset.id : opts.uniqueKey) + (0, _projectPath.fromProjectPathRelative)(target.distDir) + ((_opts$bundleBehavior = opts.bundleBehavior) !== null && _opts$bundleBehavior !== void 0 ? _opts$bundleBehavior : ''));
let existing = this.#graph._graph.getNodeByContentKey(bundleId);
if (existing != null) {
(0, _assert().default)(existing.type === 'bundle');
return _Bundle.Bundle.get(existing.value, this.#graph, this.#options);
}
let publicId = (0, _utils.getPublicId)(bundleId, existing => this.#bundlePublicIds.has(existing));
this.#bundlePublicIds.add(publicId);
let isPlaceholder = false;
if (entryAsset) {
let entryAssetNode = this.#graph._graph.getNodeByContentKey(entryAsset.id);
(0, _assert().default)((entryAssetNode === null || entryAssetNode === void 0 ? void 0 : entryAssetNode.type) === 'asset', 'Entry asset does not exist');
isPlaceholder = entryAssetNode.requested === false;
}
let bundleNode = {
type: 'bundle',
id: bundleId,
value: {
id: bundleId,
hashReference: this.#options.shouldContentHash ? _constants.HASH_REF_PREFIX + bundleId : bundleId.slice(-8),
type: opts.entryAsset ? opts.entryAsset.type : opts.type,
env: opts.env ? (0, _Environment.environmentToInternalEnvironment)(opts.env) : (0, _nullthrows().default)(entryAsset).env,
entryAssetIds: entryAsset ? [entryAsset.id] : [],
mainEntryId: entryAsset === null || entryAsset === void 0 ? void 0 : entryAsset.id,
pipeline: opts.entryAsset ? opts.entryAsset.pipeline : opts.pipeline,
needsStableName: opts.needsStableName,
bundleBehavior: opts.bundleBehavior != null ? _types.BundleBehavior[opts.bundleBehavior] : null,
isSplittable: opts.entryAsset ? opts.entryAsset.isBundleSplittable : opts.isSplittable,
isPlaceholder,
target,
name: null,
displayName: null,
publicId,
manualSharedBundle: opts.manualSharedBundle
}
};
let bundleNodeId = this.#graph._graph.addNodeByContentKey(bundleId, bundleNode);
if (opts.entryAsset) {
this.#graph._graph.addEdge(bundleNodeId, this.#graph._graph.getNodeIdByContentKey(opts.entryAsset.id));
}
return _Bundle.Bundle.get(bundleNode.value, this.#graph, this.#options);
}
addBundleToBundleGroup(bundle, bundleGroup) {
this.#graph.addBundleToBundleGroup((0, _Bundle.bundleToInternalBundle)(bundle), (0, _BundleGroup.bundleGroupToInternalBundleGroup)(bundleGroup));
}
createAssetReference(dependency, asset, bundle) {
return this.#graph.createAssetReference((0, _Dependency.dependencyToInternalDependency)(dependency), (0, _Asset.assetToAssetValue)(asset), (0, _Bundle.bundleToInternalBundle)(bundle));
}
createBundleReference(from, to) {
return this.#graph.createBundleReference((0, _Bundle.bundleToInternalBundle)(from), (0, _Bundle.bundleToInternalBundle)(to));
}
getDependencyAssets(dependency) {
return this.#graph.getDependencyAssets((0, _Dependency.dependencyToInternalDependency)(dependency)).map(asset => (0, _Asset.assetFromValue)(asset, this.#options));
}
getBundleGroupsContainingBundle(bundle) {
return this.#graph.getBundleGroupsContainingBundle((0, _Bundle.bundleToInternalBundle)(bundle)).map(bundleGroup => new _BundleGroup.default(bundleGroup, this.#options));
}
getParentBundlesOfBundleGroup(bundleGroup) {
return this.#graph.getParentBundlesOfBundleGroup((0, _BundleGroup.bundleGroupToInternalBundleGroup)(bundleGroup)).map(bundle => _Bundle.Bundle.get(bundle, this.#graph, this.#options));
}
getTotalSize(asset) {
return this.#graph.getTotalSize((0, _Asset.assetToAssetValue)(asset));
}
isAssetReachableFromBundle(asset, bundle) {
return this.#graph.isAssetReachableFromBundle((0, _Asset.assetToAssetValue)(asset), (0, _Bundle.bundleToInternalBundle)(bundle));
}
removeAssetGraphFromBundle(asset, bundle) {
this.#graph.removeAssetGraphFromBundle((0, _Asset.assetToAssetValue)(asset), (0, _Bundle.bundleToInternalBundle)(bundle));
}
}
exports.default = MutableBundleGraph;

View File

@@ -0,0 +1,65 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
let parcelOptionsToPluginOptions = new WeakMap();
class PluginOptions {
#options /*: ParcelOptions */;
constructor(options) {
let existing = parcelOptionsToPluginOptions.get(options);
if (existing != null) {
return existing;
}
this.#options = options;
parcelOptionsToPluginOptions.set(options, this);
return this;
}
get instanceId() {
return this.#options.instanceId;
}
get mode() {
return this.#options.mode;
}
get env() {
return this.#options.env;
}
get hmrOptions() {
return this.#options.hmrOptions;
}
get serveOptions() {
return this.#options.serveOptions;
}
get shouldBuildLazily() {
return this.#options.shouldBuildLazily;
}
get shouldAutoInstall() {
return this.#options.shouldAutoInstall;
}
get logLevel() {
return this.#options.logLevel;
}
get cacheDir() {
// TODO: remove this. Probably bad if there are other types of caches.
// Maybe expose the Cache object instead?
return this.#options.cacheDir;
}
get projectRoot() {
return this.#options.projectRoot;
}
get inputFS() {
return this.#options.inputFS;
}
get outputFS() {
return this.#options.outputFS;
}
get packageManager() {
return this.#options.packageManager;
}
get detailedReport() {
return this.#options.detailedReport;
}
}
exports.default = PluginOptions;

View File

@@ -0,0 +1,247 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.MutableDependencySymbols = exports.MutableAssetSymbols = exports.AssetSymbols = void 0;
function _nullthrows() {
const data = _interopRequireDefault(require("nullthrows"));
_nullthrows = function () {
return data;
};
return data;
}
var _utils = require("../utils");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const EMPTY_ITERABLE = {
[Symbol.iterator]() {
return EMPTY_ITERATOR;
}
};
const EMPTY_ITERATOR = {
next() {
return {
done: true
};
}
};
const inspect = Symbol.for('nodejs.util.inspect.custom');
let valueToSymbols = new WeakMap();
class AssetSymbols {
/*::
@@iterator(): Iterator<[ISymbol, {|local: ISymbol, loc: ?SourceLocation, meta?: ?Meta|}]> { return ({}: any); }
*/
#value;
#options;
constructor(options, asset) {
let existing = valueToSymbols.get(asset);
if (existing != null) {
return existing;
}
this.#value = asset;
this.#options = options;
valueToSymbols.set(asset, this);
return this;
}
hasExportSymbol(exportSymbol) {
var _this$value$symbols;
return Boolean((_this$value$symbols = this.#value.symbols) === null || _this$value$symbols === void 0 ? void 0 : _this$value$symbols.has(exportSymbol));
}
hasLocalSymbol(local) {
if (this.#value.symbols == null) {
return false;
}
for (let s of this.#value.symbols.values()) {
if (local === s.local) return true;
}
return false;
}
get(exportSymbol) {
var _this$value$symbols2;
return fromInternalAssetSymbol(this.#options.projectRoot, (_this$value$symbols2 = this.#value.symbols) === null || _this$value$symbols2 === void 0 ? void 0 : _this$value$symbols2.get(exportSymbol));
}
get isCleared() {
return this.#value.symbols == null;
}
exportSymbols() {
var _this$value$symbols$k, _this$value$symbols3;
return (_this$value$symbols$k = (_this$value$symbols3 = this.#value.symbols) === null || _this$value$symbols3 === void 0 ? void 0 : _this$value$symbols3.keys()) !== null && _this$value$symbols$k !== void 0 ? _this$value$symbols$k : [];
}
// $FlowFixMe
[Symbol.iterator]() {
return this.#value.symbols ? this.#value.symbols[Symbol.iterator]() : EMPTY_ITERATOR;
}
// $FlowFixMe
[inspect]() {
return `AssetSymbols(${this.#value.symbols ? [...this.#value.symbols].map(([s, {
local
}]) => `${s}:${local}`).join(', ') : null})`;
}
}
exports.AssetSymbols = AssetSymbols;
let valueToMutableAssetSymbols = new WeakMap();
class MutableAssetSymbols {
/*::
@@iterator(): Iterator<[ISymbol, {|local: ISymbol, loc: ?SourceLocation, meta?: ?Meta|}]> { return ({}: any); }
*/
#value;
#options;
constructor(options, asset) {
let existing = valueToMutableAssetSymbols.get(asset);
if (existing != null) {
return existing;
}
this.#value = asset;
this.#options = options;
return this;
}
// immutable
hasExportSymbol(exportSymbol) {
var _this$value$symbols4;
return Boolean((_this$value$symbols4 = this.#value.symbols) === null || _this$value$symbols4 === void 0 ? void 0 : _this$value$symbols4.has(exportSymbol));
}
hasLocalSymbol(local) {
if (this.#value.symbols == null) {
return false;
}
for (let s of this.#value.symbols.values()) {
if (local === s.local) return true;
}
return false;
}
get(exportSymbol) {
var _this$value$symbols5;
return fromInternalAssetSymbol(this.#options.projectRoot, (_this$value$symbols5 = this.#value.symbols) === null || _this$value$symbols5 === void 0 ? void 0 : _this$value$symbols5.get(exportSymbol));
}
get isCleared() {
return this.#value.symbols == null;
}
exportSymbols() {
// $FlowFixMe
return this.#value.symbols.keys();
}
// $FlowFixMe
[Symbol.iterator]() {
return this.#value.symbols ? this.#value.symbols[Symbol.iterator]() : EMPTY_ITERATOR;
}
// $FlowFixMe
[inspect]() {
return `MutableAssetSymbols(${this.#value.symbols ? [...this.#value.symbols].map(([s, {
local
}]) => `${s}:${local}`).join(', ') : null})`;
}
// mutating
ensure() {
if (this.#value.symbols == null) {
this.#value.symbols = new Map();
}
}
set(exportSymbol, local, loc, meta) {
(0, _nullthrows().default)(this.#value.symbols).set(exportSymbol, {
local,
loc: (0, _utils.toInternalSourceLocation)(this.#options.projectRoot, loc),
meta
});
}
delete(exportSymbol) {
(0, _nullthrows().default)(this.#value.symbols).delete(exportSymbol);
}
}
exports.MutableAssetSymbols = MutableAssetSymbols;
let valueToMutableDependencySymbols = new WeakMap();
class MutableDependencySymbols {
/*::
@@iterator(): Iterator<[ISymbol, {|local: ISymbol, loc: ?SourceLocation, isWeak: boolean, meta?: ?Meta|}]> { return ({}: any); }
*/
#value;
#options;
constructor(options, dep) {
let existing = valueToMutableDependencySymbols.get(dep);
if (existing != null) {
return existing;
}
this.#value = dep;
this.#options = options;
return this;
}
// immutable:
hasExportSymbol(exportSymbol) {
var _this$value$symbols6;
return Boolean((_this$value$symbols6 = this.#value.symbols) === null || _this$value$symbols6 === void 0 ? void 0 : _this$value$symbols6.has(exportSymbol));
}
hasLocalSymbol(local) {
if (this.#value.symbols) {
for (let s of this.#value.symbols.values()) {
if (local === s.local) return true;
}
}
return false;
}
get(exportSymbol) {
return fromInternalDependencySymbol(this.#options.projectRoot, (0, _nullthrows().default)(this.#value.symbols).get(exportSymbol));
}
get isCleared() {
return this.#value.symbols == null;
}
exportSymbols() {
// $FlowFixMe
return this.#value.symbols ? this.#value.symbols.keys() : EMPTY_ITERABLE;
}
// $FlowFixMe
[Symbol.iterator]() {
return this.#value.symbols ? this.#value.symbols[Symbol.iterator]() : EMPTY_ITERATOR;
}
// $FlowFixMe
[inspect]() {
return `MutableDependencySymbols(${this.#value.symbols ? [...this.#value.symbols].map(([s, {
local,
isWeak
}]) => `${s}:${local}${isWeak ? '?' : ''}`).join(', ') : null})`;
}
// mutating:
ensure() {
if (this.#value.symbols == null) {
this.#value.symbols = new Map();
}
}
set(exportSymbol, local, loc, isWeak) {
var _symbols$get$isWeak, _symbols$get;
let symbols = (0, _nullthrows().default)(this.#value.symbols);
symbols.set(exportSymbol, {
local,
loc: (0, _utils.toInternalSourceLocation)(this.#options.projectRoot, loc),
isWeak: ((_symbols$get$isWeak = (_symbols$get = symbols.get(exportSymbol)) === null || _symbols$get === void 0 ? void 0 : _symbols$get.isWeak) !== null && _symbols$get$isWeak !== void 0 ? _symbols$get$isWeak : true) && (isWeak !== null && isWeak !== void 0 ? isWeak : false)
});
}
delete(exportSymbol) {
(0, _nullthrows().default)(this.#value.symbols).delete(exportSymbol);
}
}
exports.MutableDependencySymbols = MutableDependencySymbols;
function fromInternalAssetSymbol(projectRoot, value) {
return value && {
local: value.local,
meta: value.meta,
loc: (0, _utils.fromInternalSourceLocation)(projectRoot, value.loc)
};
}
function fromInternalDependencySymbol(projectRoot, value) {
return value && {
local: value.local,
meta: value.meta,
isWeak: value.isWeak,
loc: (0, _utils.fromInternalSourceLocation)(projectRoot, value.loc)
};
}

View File

@@ -0,0 +1,64 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
exports.targetToInternalTarget = targetToInternalTarget;
function _nullthrows() {
const data = _interopRequireDefault(require("nullthrows"));
_nullthrows = function () {
return data;
};
return data;
}
var _Environment = _interopRequireDefault(require("./Environment"));
var _projectPath = require("../projectPath");
var _utils = require("../utils");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const inspect = Symbol.for('nodejs.util.inspect.custom');
const internalTargetToTarget = new WeakMap();
const _targetToInternalTarget = new WeakMap();
function targetToInternalTarget(target) {
return (0, _nullthrows().default)(_targetToInternalTarget.get(target));
}
class Target {
#target /*: TargetValue */;
#options /*: ParcelOptions */;
constructor(target, options) {
let existing = internalTargetToTarget.get(target);
if (existing != null) {
return existing;
}
this.#target = target;
this.#options = options;
_targetToInternalTarget.set(this, target);
internalTargetToTarget.set(target, this);
return this;
}
get distEntry() {
return this.#target.distEntry;
}
get distDir() {
return (0, _projectPath.fromProjectPath)(this.#options.projectRoot, this.#target.distDir);
}
get env() {
return new _Environment.default(this.#target.env, this.#options);
}
get name() {
return this.#target.name;
}
get publicUrl() {
return this.#target.publicUrl;
}
get loc() {
return (0, _utils.fromInternalSourceLocation)(this.#options.projectRoot, this.#target.loc);
}
// $FlowFixMe[unsupported-syntax]
[inspect]() {
return `Target(${this.name} - ${this.env[inspect]()})`;
}
}
exports.default = Target;

View File

@@ -0,0 +1,45 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.registerCoreWithSerializer = registerCoreWithSerializer;
function _graph() {
const data = require("@parcel/graph");
_graph = function () {
return data;
};
return data;
}
var _serializer = require("./serializer");
var _AssetGraph = _interopRequireDefault(require("./AssetGraph"));
var _BundleGraph = _interopRequireDefault(require("./BundleGraph"));
var _ParcelConfig = _interopRequireDefault(require("./ParcelConfig"));
var _RequestTracker = require("./RequestTracker");
var _Config = _interopRequireDefault(require("./public/Config"));
var _package = _interopRequireDefault(require("../package.json"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
let coreRegistered;
function registerCoreWithSerializer() {
if (coreRegistered) {
return;
}
const packageVersion = _package.default.version;
if (typeof packageVersion !== 'string') {
throw new Error('Expected package version to be a string');
}
// $FlowFixMe[incompatible-cast]
for (let [name, ctor] of Object.entries({
AssetGraph: _AssetGraph.default,
Config: _Config.default,
BundleGraph: _BundleGraph.default,
Graph: _graph().Graph,
ParcelConfig: _ParcelConfig.default,
RequestGraph: _RequestTracker.RequestGraph
// $FlowFixMe[unclear-type]
})) {
(0, _serializer.registerSerializableClass)(packageVersion + ':' + name, ctor);
}
coreRegistered = true;
}

View File

@@ -0,0 +1,398 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.AssetGraphBuilder = void 0;
exports.default = createAssetGraphRequest;
function _assert() {
const data = _interopRequireDefault(require("assert"));
_assert = function () {
return data;
};
return data;
}
function _nullthrows() {
const data = _interopRequireDefault(require("nullthrows"));
_nullthrows = function () {
return data;
};
return data;
}
function _utils() {
const data = require("@parcel/utils");
_utils = function () {
return data;
};
return data;
}
function _rust() {
const data = require("@parcel/rust");
_rust = function () {
return data;
};
return data;
}
function _diagnostic() {
const data = _interopRequireDefault(require("@parcel/diagnostic"));
_diagnostic = function () {
return data;
};
return data;
}
var _types = require("../types");
var _AssetGraph = _interopRequireDefault(require("../AssetGraph"));
var _constants = require("../constants");
var _EntryRequest = _interopRequireDefault(require("./EntryRequest"));
var _TargetRequest = _interopRequireDefault(require("./TargetRequest"));
var _AssetRequest = _interopRequireDefault(require("./AssetRequest"));
var _PathRequest = _interopRequireDefault(require("./PathRequest"));
var _projectPath = require("../projectPath");
var _dumpGraphToGraphViz = _interopRequireDefault(require("../dumpGraphToGraphViz"));
var _SymbolPropagation = require("../SymbolPropagation");
var _RequestTracker = require("../RequestTracker");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function createAssetGraphRequest(input) {
return {
type: _RequestTracker.requestTypes.asset_graph_request,
id: input.name,
run: async input => {
let prevResult = await input.api.getPreviousResult();
let builder = new AssetGraphBuilder(input, prevResult);
let assetGraphRequest = await await builder.build();
// early break for incremental bundling if production or flag is off;
if (!input.options.shouldBundleIncrementally || input.options.mode === 'production') {
assetGraphRequest.assetGraph.safeToIncrementallyBundle = false;
}
return assetGraphRequest;
},
input
};
}
const typesWithRequests = new Set(['entry_specifier', 'entry_file', 'dependency', 'asset_group']);
class AssetGraphBuilder {
assetRequests = [];
constructor({
input,
api,
options
}, prevResult) {
var _prevResult$assetGrap, _prevResult$assetGrou, _prevResult$previousS, _prevResult$changedAs, _prevResult$changedAs2, _JSON$stringify;
let {
entries,
assetGroups,
optionsRef,
name,
requestedAssetIds,
shouldBuildLazily,
lazyIncludes,
lazyExcludes
} = input;
let assetGraph = (_prevResult$assetGrap = prevResult === null || prevResult === void 0 ? void 0 : prevResult.assetGraph) !== null && _prevResult$assetGrap !== void 0 ? _prevResult$assetGrap : new _AssetGraph.default();
assetGraph.safeToIncrementallyBundle = true;
assetGraph.setRootConnections({
entries,
assetGroups
});
this.assetGroupsWithRemovedParents = (_prevResult$assetGrou = prevResult === null || prevResult === void 0 ? void 0 : prevResult.assetGroupsWithRemovedParents) !== null && _prevResult$assetGrou !== void 0 ? _prevResult$assetGrou : new Set();
this.previousSymbolPropagationErrors = (_prevResult$previousS = prevResult === null || prevResult === void 0 ? void 0 : prevResult.previousSymbolPropagationErrors) !== null && _prevResult$previousS !== void 0 ? _prevResult$previousS : new Map();
this.changedAssets = (_prevResult$changedAs = prevResult === null || prevResult === void 0 ? void 0 : prevResult.changedAssets) !== null && _prevResult$changedAs !== void 0 ? _prevResult$changedAs : new Map();
this.changedAssetsPropagation = (_prevResult$changedAs2 = prevResult === null || prevResult === void 0 ? void 0 : prevResult.changedAssetsPropagation) !== null && _prevResult$changedAs2 !== void 0 ? _prevResult$changedAs2 : new Set();
this.assetGraph = assetGraph;
this.optionsRef = optionsRef;
this.options = options;
this.api = api;
this.name = name;
this.requestedAssetIds = requestedAssetIds !== null && requestedAssetIds !== void 0 ? requestedAssetIds : new Set();
this.shouldBuildLazily = shouldBuildLazily !== null && shouldBuildLazily !== void 0 ? shouldBuildLazily : false;
this.lazyIncludes = lazyIncludes !== null && lazyIncludes !== void 0 ? lazyIncludes : [];
this.lazyExcludes = lazyExcludes !== null && lazyExcludes !== void 0 ? lazyExcludes : [];
this.cacheKey = (0, _rust().hashString)(`${_constants.PARCEL_VERSION}${name}${(_JSON$stringify = JSON.stringify(entries)) !== null && _JSON$stringify !== void 0 ? _JSON$stringify : ''}${options.mode}${options.shouldBuildLazily ? 'lazy' : 'eager'}`) + '-AssetGraph';
this.isSingleChangeRebuild = api.getInvalidSubRequests().filter(req => req.requestType === 'asset_request').length === 1;
this.queue = new (_utils().PromiseQueue)();
assetGraph.onNodeRemoved = nodeId => {
this.assetGroupsWithRemovedParents.delete(nodeId);
// This needs to mark all connected nodes that doesn't become orphaned
// due to replaceNodesConnectedTo to make sure that the symbols of
// nodes from which at least one parent was removed are updated.
let node = (0, _nullthrows().default)(assetGraph.getNode(nodeId));
if (assetGraph.isOrphanedNode(nodeId) && node.type === 'dependency') {
let children = assetGraph.getNodeIdsConnectedFrom(nodeId);
for (let child of children) {
let childNode = (0, _nullthrows().default)(assetGraph.getNode(child));
(0, _assert().default)(childNode.type === 'asset_group' || childNode.type === 'asset');
childNode.usedSymbolsDownDirty = true;
this.assetGroupsWithRemovedParents.add(child);
}
}
};
}
async build() {
let errors = [];
let rootNodeId = (0, _nullthrows().default)(this.assetGraph.rootNodeId, 'A root node is required to traverse');
let visited = new Set([rootNodeId]);
const visit = nodeId => {
if (errors.length > 0) {
return;
}
if (this.shouldSkipRequest(nodeId)) {
visitChildren(nodeId);
} else {
// ? do we need to visit children inside of the promise that is queued?
this.queueCorrespondingRequest(nodeId, errors).then(() => visitChildren(nodeId));
}
};
const visitChildren = nodeId => {
for (let childNodeId of this.assetGraph.getNodeIdsConnectedFrom(nodeId)) {
let child = (0, _nullthrows().default)(this.assetGraph.getNode(childNodeId));
if ((!visited.has(childNodeId) || child.hasDeferred) && this.shouldVisitChild(nodeId, childNodeId)) {
visited.add(childNodeId);
visit(childNodeId);
}
}
};
visit(rootNodeId);
await this.queue.run();
if (errors.length) {
this.api.storeResult({
assetGraph: this.assetGraph,
changedAssets: this.changedAssets,
changedAssetsPropagation: this.changedAssetsPropagation,
assetGroupsWithRemovedParents: this.assetGroupsWithRemovedParents,
previousSymbolPropagationErrors: undefined,
assetRequests: []
}, this.cacheKey);
// TODO: eventually support multiple errors since requests could reject in parallel
throw errors[0];
}
if (this.assetGraph.nodes.length > 1) {
await (0, _dumpGraphToGraphViz.default)(this.assetGraph, 'AssetGraph_' + this.name + '_before_prop');
try {
let errors = (0, _SymbolPropagation.propagateSymbols)({
options: this.options,
assetGraph: this.assetGraph,
changedAssetsPropagation: this.changedAssetsPropagation,
assetGroupsWithRemovedParents: this.assetGroupsWithRemovedParents,
previousErrors: this.previousSymbolPropagationErrors
});
this.changedAssetsPropagation.clear();
if (errors.size > 0) {
this.api.storeResult({
assetGraph: this.assetGraph,
changedAssets: this.changedAssets,
changedAssetsPropagation: this.changedAssetsPropagation,
assetGroupsWithRemovedParents: this.assetGroupsWithRemovedParents,
previousSymbolPropagationErrors: errors,
assetRequests: []
}, this.cacheKey);
// Just throw the first error. Since errors can bubble (e.g. reexporting a reexported symbol also fails),
// determining which failing export is the root cause is nontrivial (because of circular dependencies).
throw new (_diagnostic().default)({
diagnostic: [...errors.values()][0]
});
}
} catch (e) {
await (0, _dumpGraphToGraphViz.default)(this.assetGraph, 'AssetGraph_' + this.name + '_failed');
throw e;
}
}
await (0, _dumpGraphToGraphViz.default)(this.assetGraph, 'AssetGraph_' + this.name);
this.api.storeResult({
assetGraph: this.assetGraph,
changedAssets: new Map(),
changedAssetsPropagation: this.changedAssetsPropagation,
assetGroupsWithRemovedParents: undefined,
previousSymbolPropagationErrors: undefined,
assetRequests: []
}, this.cacheKey);
return {
assetGraph: this.assetGraph,
changedAssets: this.changedAssets,
changedAssetsPropagation: this.changedAssetsPropagation,
assetGroupsWithRemovedParents: undefined,
previousSymbolPropagationErrors: undefined,
assetRequests: this.assetRequests
};
}
shouldVisitChild(nodeId, childNodeId) {
if (this.shouldBuildLazily) {
let node = (0, _nullthrows().default)(this.assetGraph.getNode(nodeId));
let childNode = (0, _nullthrows().default)(this.assetGraph.getNode(childNodeId));
if (node.type === 'asset' && childNode.type === 'dependency') {
// This logic will set `node.requested` to `true` if the node is in the list of requested asset ids
// (i.e. this is an entry of a (probably) placeholder bundle that wasn't previously requested)
//
// Otherwise, if this node either is explicitly not requested, or has had it's requested attribute deleted,
// it will determine whether this node is an "async child" - that is, is it a (probably)
// dynamic import(). If so, it will explicitly have it's `node.requested` set to `false`
//
// If it's not requested, but it's not an async child then it's `node.requested` is deleted (undefined)
// by default with lazy compilation all nodes are lazy
let isNodeLazy = true;
// For conditional lazy building - if this node matches the `lazyInclude` globs that means we want
// only those nodes to be treated as lazy - that means if this node does _NOT_ match that glob, then we
// also consider it not lazy (so it gets marked as requested).
const relativePath = (0, _projectPath.fromProjectPathRelative)(node.value.filePath);
if (this.lazyIncludes.length > 0) {
isNodeLazy = this.lazyIncludes.some(lazyIncludeRegex => relativePath.match(lazyIncludeRegex));
}
// Excludes override includes, so a node is _not_ lazy if it is included in the exclude list.
if (this.lazyExcludes.length > 0 && isNodeLazy) {
isNodeLazy = !this.lazyExcludes.some(lazyExcludeRegex => relativePath.match(lazyExcludeRegex));
}
if (this.requestedAssetIds.has(node.value.id) || !isNodeLazy) {
node.requested = true;
} else if (!node.requested) {
let isAsyncChild = this.assetGraph.getIncomingDependencies(node.value).every(dep => dep.isEntry || dep.priority !== _types.Priority.sync);
if (isAsyncChild) {
node.requested = !isNodeLazy;
} else {
delete node.requested;
}
}
let previouslyDeferred = childNode.deferred;
childNode.deferred = node.requested === false;
// The child dependency node we're now evaluating should not be deferred if it's parent
// is explicitly not requested (requested = false, but not requested = undefined)
//
// if we weren't previously deferred but we are now, then this dependency node's parents should also
// be marked as deferred
//
// if we were previously deferred but we not longer are, then then all parents should no longer be
// deferred either
if (!previouslyDeferred && childNode.deferred) {
this.assetGraph.markParentsWithHasDeferred(childNodeId);
} else if (previouslyDeferred && !childNode.deferred) {
// Mark Asset and Dependency as dirty for symbol propagation as it was
// previously deferred and it's used symbols may have changed
this.changedAssetsPropagation.add(node.id);
node.usedSymbolsDownDirty = true;
this.changedAssetsPropagation.add(childNode.id);
childNode.usedSymbolsDownDirty = true;
this.assetGraph.unmarkParentsWithHasDeferred(childNodeId);
}
// We `shouldVisitChild` if the childNode is not deferred
return !childNode.deferred;
}
}
return this.assetGraph.shouldVisitChild(nodeId, childNodeId);
}
shouldSkipRequest(nodeId) {
let node = (0, _nullthrows().default)(this.assetGraph.getNode(nodeId));
return node.complete === true || !typesWithRequests.has(node.type) || node.correspondingRequest != null && this.api.canSkipSubrequest(node.correspondingRequest);
}
queueCorrespondingRequest(nodeId, errors) {
let promise;
let node = (0, _nullthrows().default)(this.assetGraph.getNode(nodeId));
switch (node.type) {
case 'entry_specifier':
promise = this.runEntryRequest(node.value);
break;
case 'entry_file':
promise = this.runTargetRequest(node.value);
break;
case 'dependency':
promise = this.runPathRequest(node.value);
break;
case 'asset_group':
promise = this.runAssetRequest(node.value);
break;
default:
throw new Error(`Can not queue corresponding request of node with type ${node.type}`);
}
return this.queue.add(() => promise.then(null, error => errors.push(error)));
}
async runEntryRequest(input) {
let prevEntries = this.assetGraph.safeToIncrementallyBundle ? this.assetGraph.getEntryAssets().map(asset => asset.id).sort() : [];
let request = (0, _EntryRequest.default)(input);
let result = await this.api.runRequest(request, {
force: true
});
this.assetGraph.resolveEntry(request.input, result.entries, request.id);
if (this.assetGraph.safeToIncrementallyBundle) {
let currentEntries = this.assetGraph.getEntryAssets().map(asset => asset.id).sort();
let didEntriesChange = prevEntries.length !== currentEntries.length || prevEntries.every((entryId, index) => entryId === currentEntries[index]);
if (didEntriesChange) {
this.assetGraph.safeToIncrementallyBundle = false;
}
}
}
async runTargetRequest(input) {
let request = (0, _TargetRequest.default)(input);
let targets = await this.api.runRequest(request, {
force: true
});
this.assetGraph.resolveTargets(request.input, targets, request.id);
}
async runPathRequest(input) {
let request = (0, _PathRequest.default)({
dependency: input,
name: this.name
});
let result = await this.api.runRequest(request, {
force: true
});
this.assetGraph.resolveDependency(input, result, request.id);
}
async runAssetRequest(input) {
this.assetRequests.push(input);
let request = (0, _AssetRequest.default)({
...input,
name: this.name,
optionsRef: this.optionsRef,
isSingleChangeRebuild: this.isSingleChangeRebuild
});
let assets = await this.api.runRequest(request, {
force: true
});
if (assets != null) {
for (let asset of assets) {
if (this.assetGraph.safeToIncrementallyBundle) {
let otherAsset = this.assetGraph.getNodeByContentKey(asset.id);
if (otherAsset != null) {
(0, _assert().default)(otherAsset.type === 'asset');
if (!this._areDependenciesEqualForAssets(asset, otherAsset.value)) {
this.assetGraph.safeToIncrementallyBundle = false;
}
} else {
// adding a new entry or dependency
this.assetGraph.safeToIncrementallyBundle = false;
}
}
this.changedAssets.set(asset.id, asset);
this.changedAssetsPropagation.add(asset.id);
}
this.assetGraph.resolveAssetGroup(input, assets, request.id);
} else {
this.assetGraph.safeToIncrementallyBundle = false;
}
this.isSingleChangeRebuild = false;
}
/**
* Used for incremental bundling of modified assets
*/
_areDependenciesEqualForAssets(asset, otherAsset) {
let assetDependencies = Array.from(asset === null || asset === void 0 ? void 0 : asset.dependencies.keys()).sort();
let otherAssetDependencies = Array.from(otherAsset === null || otherAsset === void 0 ? void 0 : otherAsset.dependencies.keys()).sort();
if (assetDependencies.length !== otherAssetDependencies.length) {
return false;
}
return assetDependencies.every((key, index) => {
var _asset$dependencies$g, _otherAsset$dependenc;
if (key !== otherAssetDependencies[index]) {
return false;
}
return (0, _utils().setEqual)(new Set(asset === null || asset === void 0 || (_asset$dependencies$g = asset.dependencies.get(key)) === null || _asset$dependencies$g === void 0 || (_asset$dependencies$g = _asset$dependencies$g.symbols) === null || _asset$dependencies$g === void 0 ? void 0 : _asset$dependencies$g.keys()), new Set(otherAsset === null || otherAsset === void 0 || (_otherAsset$dependenc = otherAsset.dependencies.get(key)) === null || _otherAsset$dependenc === void 0 || (_otherAsset$dependenc = _otherAsset$dependenc.symbols) === null || _otherAsset$dependenc === void 0 ? void 0 : _otherAsset$dependenc.keys()));
});
}
}
exports.AssetGraphBuilder = AssetGraphBuilder;

View File

@@ -0,0 +1,139 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = createAssetRequest;
function _nullthrows() {
const data = _interopRequireDefault(require("nullthrows"));
_nullthrows = function () {
return data;
};
return data;
}
function _diagnostic() {
const data = _interopRequireDefault(require("@parcel/diagnostic"));
_diagnostic = function () {
return data;
};
return data;
}
function _rust() {
const data = require("@parcel/rust");
_rust = function () {
return data;
};
return data;
}
var _ParcelConfigRequest = _interopRequireDefault(require("./ParcelConfigRequest"));
var _DevDepRequest = require("./DevDepRequest");
var _ConfigRequest = require("./ConfigRequest");
var _projectPath = require("../projectPath");
var _ReporterRunner = require("../ReporterRunner");
var _RequestTracker = require("../RequestTracker");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function createAssetRequest(input) {
return {
type: _RequestTracker.requestTypes.asset_request,
id: getId(input),
run,
input
};
}
const type = 'asset_request';
function getId(input) {
var _input$code, _input$pipeline, _input$query;
// eslint-disable-next-line no-unused-vars
let {
optionsRef,
...hashInput
} = input;
return (0, _rust().hashString)(type + (0, _projectPath.fromProjectPathRelative)(input.filePath) + input.env.id + String(input.isSource) + String(input.sideEffects) + ((_input$code = input.code) !== null && _input$code !== void 0 ? _input$code : '') + ':' + ((_input$pipeline = input.pipeline) !== null && _input$pipeline !== void 0 ? _input$pipeline : '') + ':' + ((_input$query = input.query) !== null && _input$query !== void 0 ? _input$query : ''));
}
async function run({
input,
api,
farm,
invalidateReason,
options
}) {
(0, _ReporterRunner.report)({
type: 'buildProgress',
phase: 'transforming',
filePath: (0, _projectPath.fromProjectPath)(options.projectRoot, input.filePath)
});
api.invalidateOnFileUpdate(input.filePath);
let start = Date.now();
let {
optionsRef,
...rest
} = input;
let {
cachePath
} = (0, _nullthrows().default)(await api.runRequest((0, _ParcelConfigRequest.default)()));
let previousDevDepRequests = new Map(await Promise.all(api.getSubRequests().filter(req => req.requestType === _RequestTracker.requestTypes.dev_dep_request).map(async req => [req.id, (0, _nullthrows().default)(await api.getRequestResult(req.id))])));
let request = {
...rest,
invalidateReason,
devDeps: new Map([...previousDevDepRequests.entries()].filter(([id]) => api.canSkipSubrequest(id)).map(([, req]) => [`${req.specifier}:${(0, _projectPath.fromProjectPathRelative)(req.resolveFrom)}`, req.hash])),
invalidDevDeps: await Promise.all([...previousDevDepRequests.entries()].filter(([id]) => !api.canSkipSubrequest(id)).flatMap(([, req]) => {
var _req$additionalInvali;
return [{
specifier: req.specifier,
resolveFrom: req.resolveFrom
}, ...((_req$additionalInvali = req.additionalInvalidations) !== null && _req$additionalInvali !== void 0 ? _req$additionalInvali : []).map(i => ({
specifier: i.specifier,
resolveFrom: i.resolveFrom
}))];
}))
};
let {
assets,
configRequests,
error,
invalidations,
devDepRequests
} = await farm.createHandle('runTransform', input.isSingleChangeRebuild)({
configCachePath: cachePath,
optionsRef,
request
});
let time = Date.now() - start;
if (assets) {
for (let asset of assets) {
asset.stats.time = time;
}
}
for (let filePath of invalidations.invalidateOnFileChange) {
api.invalidateOnFileUpdate(filePath);
api.invalidateOnFileDelete(filePath);
}
for (let invalidation of invalidations.invalidateOnFileCreate) {
api.invalidateOnFileCreate(invalidation);
}
for (let env of invalidations.invalidateOnEnvChange) {
api.invalidateOnEnvChange(env);
}
for (let option of invalidations.invalidateOnOptionChange) {
api.invalidateOnOptionChange(option);
}
if (invalidations.invalidateOnStartup) {
api.invalidateOnStartup();
}
if (invalidations.invalidateOnBuild) {
api.invalidateOnBuild();
}
for (let devDepRequest of devDepRequests) {
await (0, _DevDepRequest.runDevDepRequest)(api, devDepRequest);
}
for (let configRequest of configRequests) {
await (0, _ConfigRequest.runConfigRequest)(api, configRequest);
}
if (error != null) {
throw new (_diagnostic().default)({
diagnostic: error
});
} else {
return (0, _nullthrows().default)(assets);
}
}

View File

@@ -0,0 +1,417 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = createBundleGraphRequest;
function _assert() {
const data = _interopRequireDefault(require("assert"));
_assert = function () {
return data;
};
return data;
}
function _nullthrows() {
const data = _interopRequireDefault(require("nullthrows"));
_nullthrows = function () {
return data;
};
return data;
}
function _logger() {
const data = require("@parcel/logger");
_logger = function () {
return data;
};
return data;
}
function _diagnostic() {
const data = _interopRequireWildcard(require("@parcel/diagnostic"));
_diagnostic = function () {
return data;
};
return data;
}
var _AssetGraph = _interopRequireDefault(require("../AssetGraph"));
var _BundleGraph = _interopRequireDefault(require("../public/BundleGraph"));
var _BundleGraph2 = _interopRequireWildcard(require("../BundleGraph"));
var _MutableBundleGraph = _interopRequireDefault(require("../public/MutableBundleGraph"));
var _Bundle = require("../public/Bundle");
var _ReporterRunner = require("../ReporterRunner");
var _dumpGraphToGraphViz = _interopRequireDefault(require("../dumpGraphToGraphViz"));
function _utils() {
const data = require("@parcel/utils");
_utils = function () {
return data;
};
return data;
}
function _rust() {
const data = require("@parcel/rust");
_rust = function () {
return data;
};
return data;
}
var _PluginOptions = _interopRequireDefault(require("../public/PluginOptions"));
var _applyRuntimes = _interopRequireDefault(require("../applyRuntimes"));
var _constants = require("../constants");
var _utils2 = require("../utils");
var _ParcelConfigRequest = _interopRequireWildcard(require("./ParcelConfigRequest"));
var _DevDepRequest = require("./DevDepRequest");
var _InternalConfig = require("../InternalConfig");
var _ConfigRequest = require("./ConfigRequest");
var _projectPath = require("../projectPath");
var _AssetGraphRequest = _interopRequireDefault(require("./AssetGraphRequest"));
function _profiler() {
const data = require("@parcel/profiler");
_profiler = function () {
return data;
};
return data;
}
var _RequestTracker = require("../RequestTracker");
function _getRequireWildcardCache(e) { if ("function" != typeof WeakMap) return null; var r = new WeakMap(), t = new WeakMap(); return (_getRequireWildcardCache = function (e) { return e ? t : r; })(e); }
function _interopRequireWildcard(e, r) { if (!r && e && e.__esModule) return e; if (null === e || "object" != typeof e && "function" != typeof e) return { default: e }; var t = _getRequireWildcardCache(r); if (t && t.has(e)) return t.get(e); var n = { __proto__: null }, a = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var u in e) if ("default" !== u && Object.prototype.hasOwnProperty.call(e, u)) { var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; } return n.default = e, t && t.set(e, n), n; }
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function createBundleGraphRequest(input) {
return {
type: _RequestTracker.requestTypes.bundle_graph_request,
id: 'BundleGraph',
run: async input => {
let {
options,
api,
invalidateReason
} = input;
let {
optionsRef,
requestedAssetIds,
signal
} = input.input;
let measurement = _profiler().tracer.createMeasurement('building');
let request = (0, _AssetGraphRequest.default)({
name: 'Main',
entries: options.entries,
optionsRef,
shouldBuildLazily: options.shouldBuildLazily,
lazyIncludes: options.lazyIncludes,
lazyExcludes: options.lazyExcludes,
requestedAssetIds
});
let {
assetGraph,
changedAssets,
assetRequests
} = await api.runRequest(request, {
force: options.shouldBuildLazily && requestedAssetIds.size > 0
});
measurement && measurement.end();
(0, _utils2.assertSignalNotAborted)(signal);
// If any subrequests are invalid (e.g. dev dep requests or config requests),
// bail on incremental bundling. We also need to invalidate for option changes,
// which are hoisted to direct invalidations on the bundle graph request.
let subRequestsInvalid = Boolean(invalidateReason & _constants.OPTION_CHANGE) || input.api.getSubRequests().some(req => !input.api.canSkipSubrequest(req.id));
if (subRequestsInvalid) {
assetGraph.safeToIncrementallyBundle = false;
}
let configResult = (0, _nullthrows().default)(await input.api.runRequest((0, _ParcelConfigRequest.default)()));
(0, _utils2.assertSignalNotAborted)(signal);
let parcelConfig = (0, _ParcelConfigRequest.getCachedParcelConfig)(configResult, input.options);
let {
devDeps,
invalidDevDeps
} = await (0, _DevDepRequest.getDevDepRequests)(input.api);
(0, _DevDepRequest.invalidateDevDeps)(invalidDevDeps, input.options, parcelConfig);
let bundlingMeasurement = _profiler().tracer.createMeasurement('bundling');
let builder = new BundlerRunner(input, parcelConfig, devDeps);
let res = await builder.bundle({
graph: assetGraph,
changedAssets: changedAssets,
assetRequests
});
bundlingMeasurement && bundlingMeasurement.end();
for (let [id, asset] of changedAssets) {
res.changedAssets.set(id, asset);
}
await (0, _dumpGraphToGraphViz.default)(
// $FlowFixMe Added in Flow 0.121.0 upgrade in #4381 (Windows only)
res.bundleGraph._graph, 'BundleGraph', _BundleGraph2.bundleGraphEdgeTypes);
return res;
},
input
};
}
class BundlerRunner {
constructor({
input,
api,
options
}, config, previousDevDeps) {
var _JSON$stringify;
this.options = options;
this.api = api;
this.optionsRef = input.optionsRef;
this.config = config;
this.previousDevDeps = previousDevDeps;
this.devDepRequests = new Map();
this.configs = new Map();
this.pluginOptions = new _PluginOptions.default((0, _utils2.optionsProxy)(this.options, api.invalidateOnOptionChange));
this.cacheKey = (0, _rust().hashString)(`${_constants.PARCEL_VERSION}:BundleGraph:${(_JSON$stringify = JSON.stringify(options.entries)) !== null && _JSON$stringify !== void 0 ? _JSON$stringify : ''}${options.mode}${options.shouldBuildLazily ? 'lazy' : 'eager'}`) + '-BundleGraph';
}
async loadConfigs() {
// Load all configs up front so we can use them in the cache key
let bundler = await this.config.getBundler();
await this.loadConfig(bundler);
let namers = await this.config.getNamers();
for (let namer of namers) {
await this.loadConfig(namer);
}
let runtimes = await this.config.getRuntimes();
for (let runtime of runtimes) {
await this.loadConfig(runtime);
}
}
async loadConfig(plugin) {
let config = (0, _InternalConfig.createConfig)({
plugin: plugin.name,
searchPath: (0, _projectPath.toProjectPathUnsafe)('index')
});
await (0, _ConfigRequest.loadPluginConfig)(plugin, config, this.options);
await (0, _ConfigRequest.runConfigRequest)(this.api, config);
for (let devDep of config.devDeps) {
let devDepRequest = await (0, _DevDepRequest.createDevDependency)(devDep, this.previousDevDeps, this.options);
await this.runDevDepRequest(devDepRequest);
}
this.configs.set(plugin.name, config);
}
async runDevDepRequest(devDepRequest) {
let {
specifier,
resolveFrom
} = devDepRequest;
let key = `${specifier}:${(0, _projectPath.fromProjectPathRelative)(resolveFrom)}`;
this.devDepRequests.set(key, devDepRequest);
await (0, _DevDepRequest.runDevDepRequest)(this.api, devDepRequest);
}
async bundle({
graph,
changedAssets,
assetRequests
}) {
(0, _ReporterRunner.report)({
type: 'buildProgress',
phase: 'bundling'
});
await this.loadConfigs();
let plugin = await this.config.getBundler();
let {
plugin: bundler,
name,
resolveFrom
} = plugin;
// if a previous asset graph hash is passed in, check if the bundle graph is also available
let previousBundleGraphResult;
if (graph.safeToIncrementallyBundle) {
try {
previousBundleGraphResult = await this.api.getPreviousResult();
} catch {
// if the bundle graph had an error or was removed, don't fail the build
}
}
if (previousBundleGraphResult == null) {
graph.safeToIncrementallyBundle = false;
}
let internalBundleGraph;
let logger = new (_logger().PluginLogger)({
origin: name
});
let tracer = new (_profiler().PluginTracer)({
origin: name,
category: 'bundle'
});
try {
if (previousBundleGraphResult) {
internalBundleGraph = previousBundleGraphResult.bundleGraph;
for (let changedAssetId of changedAssets.keys()) {
// Copy over the whole node to also have correct symbol data
let changedAssetNode = (0, _nullthrows().default)(graph.getNodeByContentKey(changedAssetId));
(0, _assert().default)(changedAssetNode.type === 'asset');
internalBundleGraph.updateAsset(changedAssetNode);
}
} else {
var _this$configs$get;
internalBundleGraph = _BundleGraph2.default.fromAssetGraph(graph, this.options.mode === 'production');
(0, _assert().default)(internalBundleGraph != null); // ensures the graph was created
await (0, _dumpGraphToGraphViz.default)(
// $FlowFixMe
internalBundleGraph._graph, 'before_bundle', _BundleGraph2.bundleGraphEdgeTypes);
let mutableBundleGraph = new _MutableBundleGraph.default(internalBundleGraph, this.options);
let measurement;
let measurementFilename;
if (tracer.enabled) {
measurementFilename = graph.getEntryAssets().map(asset => (0, _projectPath.fromProjectPathRelative)(asset.filePath)).join(', ');
measurement = tracer.createMeasurement(plugin.name, 'bundling:bundle', measurementFilename);
}
// this the normal bundle workflow (bundle, optimizing, run-times, naming)
await bundler.bundle({
bundleGraph: mutableBundleGraph,
config: (_this$configs$get = this.configs.get(plugin.name)) === null || _this$configs$get === void 0 ? void 0 : _this$configs$get.result,
options: this.pluginOptions,
logger,
tracer
});
measurement && measurement.end();
if (this.pluginOptions.mode === 'production') {
let optimizeMeasurement;
try {
var _this$configs$get2;
if (tracer.enabled) {
optimizeMeasurement = tracer.createMeasurement(plugin.name, 'bundling:optimize', (0, _nullthrows().default)(measurementFilename));
}
await bundler.optimize({
bundleGraph: mutableBundleGraph,
config: (_this$configs$get2 = this.configs.get(plugin.name)) === null || _this$configs$get2 === void 0 ? void 0 : _this$configs$get2.result,
options: this.pluginOptions,
logger
});
} catch (e) {
throw new (_diagnostic().default)({
diagnostic: (0, _diagnostic().errorToDiagnostic)(e, {
origin: plugin.name
})
});
} finally {
optimizeMeasurement && optimizeMeasurement.end();
await (0, _dumpGraphToGraphViz.default)(
// $FlowFixMe[incompatible-call]
internalBundleGraph._graph, 'after_optimize');
}
}
// Add dev dependency for the bundler. This must be done AFTER running it due to
// the potential for lazy require() that aren't executed until the request runs.
let devDepRequest = await (0, _DevDepRequest.createDevDependency)({
specifier: name,
resolveFrom
}, this.previousDevDeps, this.options);
await this.runDevDepRequest(devDepRequest);
}
} catch (e) {
if (internalBundleGraph != null) {
this.api.storeResult({
bundleGraph: internalBundleGraph,
changedAssets: new Map(),
assetRequests: []
}, this.cacheKey);
}
throw new (_diagnostic().default)({
diagnostic: (0, _diagnostic().errorToDiagnostic)(e, {
origin: name
})
});
} finally {
(0, _assert().default)(internalBundleGraph != null); // ensures the graph was created
await (0, _dumpGraphToGraphViz.default)(
// $FlowFixMe[incompatible-call]
internalBundleGraph._graph, 'after_bundle', _BundleGraph2.bundleGraphEdgeTypes);
}
let changedRuntimes = new Map();
if (!previousBundleGraphResult) {
let namers = await this.config.getNamers();
// inline bundles must still be named so the PackagerRunner
// can match them to the correct packager/optimizer plugins.
let bundles = internalBundleGraph.getBundles({
includeInline: true
});
await Promise.all(bundles.map(bundle => this.nameBundle(namers, bundle, internalBundleGraph)));
changedRuntimes = await (0, _applyRuntimes.default)({
bundleGraph: internalBundleGraph,
api: this.api,
config: this.config,
options: this.options,
optionsRef: this.optionsRef,
pluginOptions: this.pluginOptions,
previousDevDeps: this.previousDevDeps,
devDepRequests: this.devDepRequests,
configs: this.configs
});
// Add dev deps for namers, AFTER running them to account for lazy require().
for (let namer of namers) {
let devDepRequest = await (0, _DevDepRequest.createDevDependency)({
specifier: namer.name,
resolveFrom: namer.resolveFrom
}, this.previousDevDeps, this.options);
await this.runDevDepRequest(devDepRequest);
}
this.validateBundles(internalBundleGraph);
// Pre-compute the hashes for each bundle so they are only computed once and shared between workers.
internalBundleGraph.getBundleGraphHash();
}
await (0, _dumpGraphToGraphViz.default)(
// $FlowFixMe
internalBundleGraph._graph, 'after_runtimes', _BundleGraph2.bundleGraphEdgeTypes);
this.api.storeResult({
bundleGraph: internalBundleGraph,
changedAssets: new Map(),
assetRequests: []
}, this.cacheKey);
return {
bundleGraph: internalBundleGraph,
changedAssets: changedRuntimes,
assetRequests
};
}
validateBundles(bundleGraph) {
let bundles = bundleGraph.getBundles();
let bundleNames = bundles.map(b => (0, _projectPath.joinProjectPath)(b.target.distDir, (0, _nullthrows().default)(b.name)));
_assert().default.deepEqual(bundleNames, (0, _utils().unique)(bundleNames), 'Bundles must have unique name. Conflicting names: ' + [...(0, _utils().setDifference)(new Set(bundleNames), new Set((0, _utils().unique)(bundleNames)))].join());
}
async nameBundle(namers, internalBundle, internalBundleGraph) {
let bundle = _Bundle.Bundle.get(internalBundle, internalBundleGraph, this.options);
let bundleGraph = new _BundleGraph.default(internalBundleGraph, _Bundle.NamedBundle.get.bind(_Bundle.NamedBundle), this.options);
for (let namer of namers) {
let measurement;
try {
var _this$configs$get3;
measurement = _profiler().tracer.createMeasurement(namer.name, 'namer', bundle.id);
let name = await namer.plugin.name({
bundle,
bundleGraph,
config: (_this$configs$get3 = this.configs.get(namer.name)) === null || _this$configs$get3 === void 0 ? void 0 : _this$configs$get3.result,
options: this.pluginOptions,
logger: new (_logger().PluginLogger)({
origin: namer.name
}),
tracer: new (_profiler().PluginTracer)({
origin: namer.name,
category: 'namer'
})
});
if (name != null) {
internalBundle.name = name;
let {
hashReference
} = internalBundle;
internalBundle.displayName = name.includes(hashReference) ? name.replace(hashReference, '[hash]') : name;
return;
}
} catch (e) {
throw new (_diagnostic().default)({
diagnostic: (0, _diagnostic().errorToDiagnostic)(e, {
origin: namer.name
})
});
} finally {
measurement && measurement.end();
}
}
throw new Error('Unable to name bundle');
}
}

View File

@@ -0,0 +1,163 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.getConfigHash = getConfigHash;
exports.getConfigRequests = getConfigRequests;
exports.loadPluginConfig = loadPluginConfig;
exports.runConfigRequest = runConfigRequest;
var _serializer = require("../serializer.js");
function _logger() {
const data = require("@parcel/logger");
_logger = function () {
return data;
};
return data;
}
var _PluginOptions = _interopRequireDefault(require("../public/PluginOptions"));
function _diagnostic() {
const data = _interopRequireWildcard(require("@parcel/diagnostic"));
_diagnostic = function () {
return data;
};
return data;
}
var _Config = _interopRequireDefault(require("../public/Config"));
var _utils = require("../utils");
var _assetUtils = require("../assetUtils");
function _rust() {
const data = require("@parcel/rust");
_rust = function () {
return data;
};
return data;
}
function _profiler() {
const data = require("@parcel/profiler");
_profiler = function () {
return data;
};
return data;
}
var _RequestTracker = require("../RequestTracker");
function _getRequireWildcardCache(e) { if ("function" != typeof WeakMap) return null; var r = new WeakMap(), t = new WeakMap(); return (_getRequireWildcardCache = function (e) { return e ? t : r; })(e); }
function _interopRequireWildcard(e, r) { if (!r && e && e.__esModule) return e; if (null === e || "object" != typeof e && "function" != typeof e) return { default: e }; var t = _getRequireWildcardCache(r); if (t && t.has(e)) return t.get(e); var n = { __proto__: null }, a = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var u in e) if ("default" !== u && Object.prototype.hasOwnProperty.call(e, u)) { var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; } return n.default = e, t && t.set(e, n), n; }
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
async function loadPluginConfig(loadedPlugin, config, options) {
let loadConfig = loadedPlugin.plugin.loadConfig;
if (!loadConfig) {
return;
}
try {
config.result = await loadConfig({
config: new _Config.default(config, options),
options: new _PluginOptions.default((0, _utils.optionsProxy)(options, option => {
config.invalidateOnOptionChange.add(option);
})),
logger: new (_logger().PluginLogger)({
origin: loadedPlugin.name
}),
tracer: new (_profiler().PluginTracer)({
origin: loadedPlugin.name,
category: 'loadConfig'
})
});
} catch (e) {
throw new (_diagnostic().default)({
diagnostic: (0, _diagnostic().errorToDiagnostic)(e, {
origin: loadedPlugin.name
})
});
}
}
async function runConfigRequest(api, configRequest) {
let {
invalidateOnFileChange,
invalidateOnFileCreate,
invalidateOnEnvChange,
invalidateOnOptionChange,
invalidateOnStartup,
invalidateOnBuild
} = configRequest;
// If there are no invalidations, then no need to create a node.
if (invalidateOnFileChange.size === 0 && invalidateOnFileCreate.length === 0 && invalidateOnOptionChange.size === 0 && !invalidateOnStartup && !invalidateOnBuild) {
return;
}
await api.runRequest({
id: 'config_request:' + configRequest.id,
type: _RequestTracker.requestTypes.config_request,
run: ({
api
}) => {
for (let filePath of invalidateOnFileChange) {
api.invalidateOnFileUpdate(filePath);
api.invalidateOnFileDelete(filePath);
}
for (let invalidation of invalidateOnFileCreate) {
api.invalidateOnFileCreate(invalidation);
}
for (let env of invalidateOnEnvChange) {
api.invalidateOnEnvChange(env);
}
for (let option of invalidateOnOptionChange) {
api.invalidateOnOptionChange(option);
}
if (invalidateOnStartup) {
api.invalidateOnStartup();
}
if (invalidateOnBuild) {
api.invalidateOnBuild();
}
},
input: null
});
}
async function getConfigHash(config, pluginName, options) {
if (config.result == null) {
return '';
}
let hash = new (_rust().Hash)();
hash.writeString(config.id);
// If there is no result hash set by the transformer, default to hashing the included
// files if any, otherwise try to hash the config result itself.
if (config.cacheKey == null) {
if (config.invalidateOnFileChange.size > 0) {
hash.writeString(await (0, _assetUtils.getInvalidationHash)([...config.invalidateOnFileChange].map(filePath => ({
type: 'file',
filePath
})), options));
} else if (config.result != null) {
try {
hash.writeBuffer((0, _serializer.serializeRaw)(config.result));
} catch (err) {
throw new (_diagnostic().default)({
diagnostic: {
message: 'Config result is not hashable because it contains non-serializable objects. Please use config.setCacheKey to set the hash manually.',
origin: pluginName
}
});
}
}
} else {
var _config$cacheKey;
hash.writeString((_config$cacheKey = config.cacheKey) !== null && _config$cacheKey !== void 0 ? _config$cacheKey : '');
}
return hash.finish();
}
function getConfigRequests(configs) {
return configs.filter(config => {
// No need to send to the graph if there are no invalidations.
return config.invalidateOnFileChange.size > 0 || config.invalidateOnFileCreate.length > 0 || config.invalidateOnEnvChange.size > 0 || config.invalidateOnOptionChange.size > 0 || config.invalidateOnStartup || config.invalidateOnBuild;
}).map(config => ({
id: config.id,
invalidateOnFileChange: config.invalidateOnFileChange,
invalidateOnFileCreate: config.invalidateOnFileCreate,
invalidateOnEnvChange: config.invalidateOnEnvChange,
invalidateOnOptionChange: config.invalidateOnOptionChange,
invalidateOnStartup: config.invalidateOnStartup,
invalidateOnBuild: config.invalidateOnBuild
}));
}

View File

@@ -0,0 +1,167 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.createDevDependency = createDevDependency;
exports.getDevDepRequests = getDevDepRequests;
exports.getWorkerDevDepRequests = getWorkerDevDepRequests;
exports.invalidateDevDeps = invalidateDevDeps;
exports.runDevDepRequest = runDevDepRequest;
function _nullthrows() {
const data = _interopRequireDefault(require("nullthrows"));
_nullthrows = function () {
return data;
};
return data;
}
var _assetUtils = require("../assetUtils");
var _buildCache = require("../buildCache");
var _utils = require("../utils");
var _projectPath = require("../projectPath");
var _RequestTracker = require("../RequestTracker");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
// A cache of dev dep requests keyed by invalidations.
// If the package manager returns the same invalidation object, then
// we can reuse the dev dep request rather than recomputing the project
// paths and hashes.
const devDepRequestCache = new WeakMap();
async function createDevDependency(opts, requestDevDeps, options) {
let {
specifier,
resolveFrom,
additionalInvalidations
} = opts;
let key = `${specifier}:${(0, _projectPath.fromProjectPathRelative)(resolveFrom)}`;
// If the request sent us a hash, we know the dev dep and all of its dependencies didn't change.
// Reuse the same hash in the response. No need to send back invalidations as the request won't
// be re-run anyway.
let hash = requestDevDeps.get(key);
if (hash != null) {
return {
specifier,
resolveFrom,
hash
};
}
let resolveFromAbsolute = (0, _projectPath.fromProjectPath)(options.projectRoot, resolveFrom);
// Ensure that the package manager has an entry for this resolution.
try {
await options.packageManager.resolve(specifier, resolveFromAbsolute);
} catch (err) {
// ignore
}
let invalidations = options.packageManager.getInvalidations(specifier, resolveFromAbsolute);
let cached = devDepRequestCache.get(invalidations);
if (cached != null) {
return cached;
}
let invalidateOnFileChangeProject = [...invalidations.invalidateOnFileChange].map(f => (0, _projectPath.toProjectPath)(options.projectRoot, f));
// It is possible for a transformer to have multiple different hashes due to
// different dependencies (e.g. conditional requires) so we must always
// recompute the hash and compare rather than only sending a transformer
// dev dependency once.
hash = await (0, _assetUtils.getInvalidationHash)(invalidateOnFileChangeProject.map(f => ({
type: 'file',
filePath: f
})), options);
let devDepRequest = {
specifier,
resolveFrom,
hash,
invalidateOnFileCreate: invalidations.invalidateOnFileCreate.map(i => (0, _utils.invalidateOnFileCreateToInternal)(options.projectRoot, i)),
invalidateOnFileChange: new Set(invalidateOnFileChangeProject),
invalidateOnStartup: invalidations.invalidateOnStartup,
additionalInvalidations
};
devDepRequestCache.set(invalidations, devDepRequest);
return devDepRequest;
}
async function getDevDepRequests(api) {
let previousDevDepRequests = new Map(await Promise.all(api.getSubRequests().filter(req => req.requestType === _RequestTracker.requestTypes.dev_dep_request).map(async req => [req.id, (0, _nullthrows().default)(await api.getRequestResult(req.id))])));
return {
devDeps: new Map([...previousDevDepRequests.entries()].filter(([id]) => api.canSkipSubrequest(id)).map(([, req]) => [`${req.specifier}:${(0, _projectPath.fromProjectPathRelative)(req.resolveFrom)}`, req.hash])),
invalidDevDeps: await Promise.all([...previousDevDepRequests.entries()].filter(([id]) => !api.canSkipSubrequest(id)).flatMap(([, req]) => {
var _req$additionalInvali;
return [{
specifier: req.specifier,
resolveFrom: req.resolveFrom
}, ...((_req$additionalInvali = req.additionalInvalidations) !== null && _req$additionalInvali !== void 0 ? _req$additionalInvali : []).map(i => ({
specifier: i.specifier,
resolveFrom: i.resolveFrom
}))];
}))
};
}
// Tracks dev deps that have been invalidated during this build
// so we don't invalidate the require cache more than once.
const invalidatedDevDeps = (0, _buildCache.createBuildCache)();
function invalidateDevDeps(invalidDevDeps, options, config) {
for (let {
specifier,
resolveFrom
} of invalidDevDeps) {
let key = `${specifier}:${(0, _projectPath.fromProjectPathRelative)(resolveFrom)}`;
if (!invalidatedDevDeps.has(key)) {
config.invalidatePlugin(specifier);
options.packageManager.invalidate(specifier, (0, _projectPath.fromProjectPath)(options.projectRoot, resolveFrom));
invalidatedDevDeps.set(key, true);
}
}
}
async function runDevDepRequest(api, devDepRequest) {
await api.runRequest({
id: 'dev_dep_request:' + devDepRequest.specifier + ':' + devDepRequest.hash,
type: _RequestTracker.requestTypes.dev_dep_request,
run: ({
api
}) => {
for (let filePath of (0, _nullthrows().default)(devDepRequest.invalidateOnFileChange)) {
api.invalidateOnFileUpdate(filePath);
api.invalidateOnFileDelete(filePath);
}
for (let invalidation of (0, _nullthrows().default)(devDepRequest.invalidateOnFileCreate)) {
api.invalidateOnFileCreate(invalidation);
}
if (devDepRequest.invalidateOnStartup) {
api.invalidateOnStartup();
}
api.storeResult({
specifier: devDepRequest.specifier,
resolveFrom: devDepRequest.resolveFrom,
hash: devDepRequest.hash,
additionalInvalidations: devDepRequest.additionalInvalidations
});
},
input: null
});
}
// A cache of plugin dependency hashes that we've already sent to the main thread.
// Automatically cleared before each build.
const pluginCache = (0, _buildCache.createBuildCache)();
function getWorkerDevDepRequests(devDepRequests) {
return devDepRequests.map(devDepRequest => {
// If we've already sent a matching transformer + hash to the main thread during this build,
// there's no need to repeat ourselves.
let {
specifier,
resolveFrom,
hash
} = devDepRequest;
if (hash === pluginCache.get(specifier)) {
return {
specifier,
resolveFrom,
hash
};
} else {
pluginCache.set(specifier, hash);
return devDepRequest;
}
});
}

View File

@@ -0,0 +1,266 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.EntryResolver = void 0;
exports.default = createEntryRequest;
function _utils() {
const data = require("@parcel/utils");
_utils = function () {
return data;
};
return data;
}
function _diagnostic() {
const data = _interopRequireWildcard(require("@parcel/diagnostic"));
_diagnostic = function () {
return data;
};
return data;
}
function _path() {
const data = _interopRequireDefault(require("path"));
_path = function () {
return data;
};
return data;
}
function _jsonSourcemap() {
const data = require("@mischnic/json-sourcemap");
_jsonSourcemap = function () {
return data;
};
return data;
}
var _RequestTracker = require("../RequestTracker");
var _projectPath = require("../projectPath");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _getRequireWildcardCache(e) { if ("function" != typeof WeakMap) return null; var r = new WeakMap(), t = new WeakMap(); return (_getRequireWildcardCache = function (e) { return e ? t : r; })(e); }
function _interopRequireWildcard(e, r) { if (!r && e && e.__esModule) return e; if (null === e || "object" != typeof e && "function" != typeof e) return { default: e }; var t = _getRequireWildcardCache(r); if (t && t.has(e)) return t.get(e); var n = { __proto__: null }, a = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var u in e) if ("default" !== u && Object.prototype.hasOwnProperty.call(e, u)) { var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; } return n.default = e, t && t.set(e, n), n; }
const type = 'entry_request';
function createEntryRequest(input) {
return {
id: `${type}:${(0, _projectPath.fromProjectPathRelative)(input)}`,
type: _RequestTracker.requestTypes.entry_request,
run,
input
};
}
async function run({
input,
api,
options
}) {
let entryResolver = new EntryResolver(options);
let filePath = (0, _projectPath.fromProjectPath)(options.projectRoot, input);
let result = await entryResolver.resolveEntry(filePath);
// Connect files like package.json that affect the entry
// resolution so we invalidate when they change.
for (let file of result.files) {
api.invalidateOnFileUpdate(file.filePath);
api.invalidateOnFileDelete(file.filePath);
}
// If the entry specifier is a glob, add a glob node so
// we invalidate when a new file matches.
if ((0, _utils().isGlob)(filePath)) {
api.invalidateOnFileCreate({
glob: input
});
}
// Invalidate whenever an entry is deleted.
// If the entry was a glob, we'll re-evaluate it, and otherwise
// a proper entry error will be thrown.
for (let entry of result.entries) {
api.invalidateOnFileDelete(entry.filePath);
}
return result;
}
async function assertFile(fs, entry, relativeSource, pkgFilePath, keyPath, options) {
let source = _path().default.join(entry, relativeSource);
let stat;
try {
stat = await fs.stat(source);
} catch (err) {
let contents = await fs.readFile(pkgFilePath, 'utf8');
let alternatives = await (0, _utils().findAlternativeFiles)(fs, relativeSource, entry, options.projectRoot, false);
throw new (_diagnostic().default)({
diagnostic: {
origin: '@parcel/core',
message: (0, _diagnostic().md)`${_path().default.relative(process.cwd(), source)} does not exist.`,
codeFrames: [{
filePath: pkgFilePath,
codeHighlights: (0, _diagnostic().generateJSONCodeHighlights)(contents, [{
key: keyPath,
type: 'value'
}])
}],
hints: alternatives.map(r => {
return (0, _diagnostic().md)`Did you mean '__${r}__'?`;
})
}
});
}
if (!stat.isFile()) {
let contents = await fs.readFile(pkgFilePath, 'utf8');
throw new (_diagnostic().default)({
diagnostic: {
origin: '@parcel/core',
message: (0, _diagnostic().md)`${_path().default.relative(process.cwd(), source)} is not a file.`,
codeFrames: [{
filePath: pkgFilePath,
codeHighlights: (0, _diagnostic().generateJSONCodeHighlights)(contents, [{
key: keyPath,
type: 'value'
}])
}]
}
});
}
}
class EntryResolver {
constructor(options) {
this.options = options;
}
async resolveEntry(entry) {
let stat;
try {
stat = await this.options.inputFS.stat(entry);
} catch (err) {
if (!(0, _utils().isGlob)(entry)) {
throw new (_diagnostic().default)({
diagnostic: {
message: (0, _diagnostic().md)`Entry ${entry} does not exist`
}
});
}
let files = await (0, _utils().glob)(entry, this.options.inputFS, {
absolute: true,
onlyFiles: false
});
let results = await Promise.all(files.map(f => this.resolveEntry(_path().default.normalize(f))));
return results.reduce((p, res) => ({
entries: p.entries.concat(res.entries),
files: p.files.concat(res.files)
}), {
entries: [],
files: []
});
}
if (stat.isDirectory()) {
let pkg = await this.readPackage(entry);
if (pkg) {
let {
filePath
} = pkg;
let entries = [];
let files = [{
filePath: (0, _projectPath.toProjectPath)(this.options.projectRoot, filePath)
}];
let targetsWithSources = 0;
if (pkg.targets) {
for (let targetName in pkg.targets) {
let target = pkg.targets[targetName];
if (target.source != null) {
targetsWithSources++;
let targetSources = Array.isArray(target.source) ? target.source : [target.source];
let i = 0;
for (let relativeSource of targetSources) {
let source = _path().default.join(entry, relativeSource);
let keyPath = `/targets/${targetName}/source${Array.isArray(target.source) ? `/${i}` : ''}`;
await assertFile(this.options.inputFS, entry, relativeSource, filePath, keyPath, this.options);
entries.push({
filePath: (0, _projectPath.toProjectPath)(this.options.projectRoot, source),
packagePath: (0, _projectPath.toProjectPath)(this.options.projectRoot, entry),
target: targetName,
loc: {
filePath: (0, _projectPath.toProjectPath)(this.options.projectRoot, pkg.filePath),
...(0, _diagnostic().getJSONSourceLocation)(pkg.map.pointers[keyPath], 'value')
}
});
i++;
}
}
}
}
let allTargetsHaveSource = targetsWithSources > 0 && pkg != null && pkg.targets != null && Object.keys(pkg.targets).length === targetsWithSources;
if (!allTargetsHaveSource && pkg.source != null) {
let pkgSources = Array.isArray(pkg.source) ? pkg.source : [pkg.source];
let i = 0;
for (let pkgSource of pkgSources) {
let source = _path().default.join(_path().default.dirname(filePath), pkgSource);
let keyPath = `/source${Array.isArray(pkg.source) ? `/${i}` : ''}`;
await assertFile(this.options.inputFS, entry, pkgSource, filePath, keyPath, this.options);
entries.push({
filePath: (0, _projectPath.toProjectPath)(this.options.projectRoot, source),
packagePath: (0, _projectPath.toProjectPath)(this.options.projectRoot, entry),
loc: {
filePath: (0, _projectPath.toProjectPath)(this.options.projectRoot, pkg.filePath),
...(0, _diagnostic().getJSONSourceLocation)(pkg.map.pointers[keyPath], 'value')
}
});
i++;
}
}
// Only return if we found any valid entries
if (entries.length && files.length) {
return {
entries,
files
};
}
}
throw new (_diagnostic().default)({
diagnostic: {
message: (0, _diagnostic().md)`Could not find entry: ${entry}`
}
});
} else if (stat.isFile()) {
let projectRoot = this.options.projectRoot;
let packagePath = (0, _utils().isDirectoryInside)(this.options.inputFS.cwd(), projectRoot) ? this.options.inputFS.cwd() : projectRoot;
return {
entries: [{
filePath: (0, _projectPath.toProjectPath)(this.options.projectRoot, entry),
packagePath: (0, _projectPath.toProjectPath)(this.options.projectRoot, packagePath)
}],
files: []
};
}
throw new (_diagnostic().default)({
diagnostic: {
message: (0, _diagnostic().md)`Unknown entry: ${entry}`
}
});
}
async readPackage(entry) {
let content, pkg;
let pkgFile = _path().default.join(entry, 'package.json');
try {
content = await this.options.inputFS.readFile(pkgFile, 'utf8');
} catch (err) {
return null;
}
try {
pkg = JSON.parse(content);
} catch (err) {
// TODO: code frame?
throw new (_diagnostic().default)({
diagnostic: {
message: (0, _diagnostic().md)`Error parsing ${_path().default.relative(this.options.inputFS.cwd(), pkgFile)}: ${err.message}`
}
});
}
return {
...pkg,
filePath: pkgFile,
map: (0, _jsonSourcemap().parse)(content, undefined, {
tabWidth: 1
})
};
}
}
exports.EntryResolver = EntryResolver;

View File

@@ -0,0 +1,88 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.createPackageRequest = createPackageRequest;
var _RequestTracker = require("../RequestTracker");
function _nullthrows() {
const data = _interopRequireDefault(require("nullthrows"));
_nullthrows = function () {
return data;
};
return data;
}
var _ConfigRequest = require("./ConfigRequest");
var _DevDepRequest = require("./DevDepRequest");
var _ParcelConfigRequest = _interopRequireDefault(require("./ParcelConfigRequest"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function createPackageRequest(input) {
return {
type: _RequestTracker.requestTypes.package_request,
id: input.bundleGraph.getHash(input.bundle),
run,
input
};
}
async function run({
input,
api,
farm
}) {
let {
bundleGraphReference,
optionsRef,
bundle,
useMainThread
} = input;
let runPackage = farm.createHandle('runPackage', useMainThread);
let start = Date.now();
let {
devDeps,
invalidDevDeps
} = await (0, _DevDepRequest.getDevDepRequests)(api);
let {
cachePath
} = (0, _nullthrows().default)(await api.runRequest((0, _ParcelConfigRequest.default)()));
let {
devDepRequests,
configRequests,
bundleInfo,
invalidations
} = await runPackage({
bundle,
bundleGraphReference,
optionsRef,
configCachePath: cachePath,
previousDevDeps: devDeps,
invalidDevDeps,
previousInvalidations: api.getInvalidations()
});
for (let devDepRequest of devDepRequests) {
await (0, _DevDepRequest.runDevDepRequest)(api, devDepRequest);
}
for (let configRequest of configRequests) {
await (0, _ConfigRequest.runConfigRequest)(api, configRequest);
}
for (let invalidation of invalidations) {
switch (invalidation.type) {
case 'file':
api.invalidateOnFileUpdate(invalidation.filePath);
api.invalidateOnFileDelete(invalidation.filePath);
break;
case 'env':
api.invalidateOnEnvChange(invalidation.key);
break;
case 'option':
api.invalidateOnOptionChange(invalidation.key);
break;
default:
throw new Error(`Unknown invalidation type: ${invalidation.type}`);
}
}
// $FlowFixMe[cannot-write] time is marked read-only, but this is the exception
bundleInfo.time = Date.now() - start;
api.storeResult(bundleInfo);
return bundleInfo;
}

View File

@@ -0,0 +1,78 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = createParcelBuildRequest;
var _BundleGraphRequest = _interopRequireDefault(require("./BundleGraphRequest"));
var _WriteBundlesRequest = _interopRequireDefault(require("./WriteBundlesRequest"));
var _utils = require("../utils");
var _dumpGraphToGraphViz = _interopRequireDefault(require("../dumpGraphToGraphViz"));
var _BundleGraph = require("../BundleGraph");
var _ReporterRunner = require("../ReporterRunner");
var _BundleGraph2 = _interopRequireDefault(require("../public/BundleGraph"));
var _Bundle = require("../public/Bundle");
var _Asset = require("../public/Asset");
function _profiler() {
const data = require("@parcel/profiler");
_profiler = function () {
return data;
};
return data;
}
var _RequestTracker = require("../RequestTracker");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function createParcelBuildRequest(input) {
return {
type: _RequestTracker.requestTypes.parcel_build_request,
id: 'parcel_build_request',
run,
input
};
}
async function run({
input,
api,
options
}) {
let {
optionsRef,
requestedAssetIds,
signal
} = input;
let bundleGraphRequest = (0, _BundleGraphRequest.default)({
optionsRef,
requestedAssetIds,
signal
});
let {
bundleGraph,
changedAssets,
assetRequests
} = await api.runRequest(bundleGraphRequest, {
force: options.shouldBuildLazily && requestedAssetIds.size > 0
});
// $FlowFixMe Added in Flow 0.121.0 upgrade in #4381 (Windows only)
(0, _dumpGraphToGraphViz.default)(bundleGraph._graph, 'BundleGraph', _BundleGraph.bundleGraphEdgeTypes);
await (0, _ReporterRunner.report)({
type: 'buildProgress',
phase: 'bundled',
bundleGraph: new _BundleGraph2.default(bundleGraph, (bundle, bundleGraph, options) => _Bundle.NamedBundle.get(bundle, bundleGraph, options), options),
changedAssets: new Map(Array.from(changedAssets).map(([id, asset]) => [id, (0, _Asset.assetFromValue)(asset, options)]))
});
let packagingMeasurement = _profiler().tracer.createMeasurement('packaging');
let writeBundlesRequest = (0, _WriteBundlesRequest.default)({
bundleGraph,
optionsRef
});
let bundleInfo = await api.runRequest(writeBundlesRequest);
packagingMeasurement && packagingMeasurement.end();
(0, _utils.assertSignalNotAborted)(signal);
return {
bundleGraph,
bundleInfo,
changedAssets,
assetRequests
};
}

View File

@@ -0,0 +1,465 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.create = create;
exports.default = createParcelConfigRequest;
exports.getCachedParcelConfig = getCachedParcelConfig;
exports.getResolveFrom = getResolveFrom;
exports.loadParcelConfig = loadParcelConfig;
exports.mergeConfigs = mergeConfigs;
exports.mergeMaps = mergeMaps;
exports.mergePipelines = mergePipelines;
exports.parseAndProcessConfig = parseAndProcessConfig;
exports.processConfig = processConfig;
exports.processConfigChain = processConfigChain;
exports.resolveExtends = resolveExtends;
exports.resolveParcelConfig = resolveParcelConfig;
exports.validateConfigFile = validateConfigFile;
exports.validateNotEmpty = validateNotEmpty;
function _utils() {
const data = require("@parcel/utils");
_utils = function () {
return data;
};
return data;
}
function _diagnostic() {
const data = _interopRequireWildcard(require("@parcel/diagnostic"));
_diagnostic = function () {
return data;
};
return data;
}
function _json() {
const data = require("json5");
_json = function () {
return data;
};
return data;
}
function _path() {
const data = _interopRequireDefault(require("path"));
_path = function () {
return data;
};
return data;
}
function _assert() {
const data = _interopRequireDefault(require("assert"));
_assert = function () {
return data;
};
return data;
}
var _ParcelConfig = _interopRequireDefault(require("../ParcelConfig.schema"));
var _utils2 = require("../utils");
var _ParcelConfig2 = _interopRequireDefault(require("../ParcelConfig"));
var _buildCache = require("../buildCache");
var _projectPath = require("../projectPath");
var _RequestTracker = require("../RequestTracker");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _getRequireWildcardCache(e) { if ("function" != typeof WeakMap) return null; var r = new WeakMap(), t = new WeakMap(); return (_getRequireWildcardCache = function (e) { return e ? t : r; })(e); }
function _interopRequireWildcard(e, r) { if (!r && e && e.__esModule) return e; if (null === e || "object" != typeof e && "function" != typeof e) return { default: e }; var t = _getRequireWildcardCache(r); if (t && t.has(e)) return t.get(e); var n = { __proto__: null }, a = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var u in e) if ("default" !== u && Object.prototype.hasOwnProperty.call(e, u)) { var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; } return n.default = e, t && t.set(e, n), n; }
const type = 'parcel_config_request';
function createParcelConfigRequest() {
return {
id: type,
type: _RequestTracker.requestTypes[type],
async run({
api,
options
}) {
let {
config,
extendedFiles,
usedDefault
} = await loadParcelConfig((0, _utils2.optionsProxy)(options, api.invalidateOnOptionChange));
api.invalidateOnFileUpdate(config.filePath);
api.invalidateOnFileDelete(config.filePath);
for (let filePath of extendedFiles) {
let pp = (0, _projectPath.toProjectPath)(options.projectRoot, filePath);
api.invalidateOnFileUpdate(pp);
api.invalidateOnFileDelete(pp);
}
if (usedDefault) {
let resolveFrom = getResolveFrom(options.inputFS, options.projectRoot);
api.invalidateOnFileCreate({
fileName: '.parcelrc',
aboveFilePath: (0, _projectPath.toProjectPath)(options.projectRoot, resolveFrom)
});
}
let cachePath = (0, _utils().hashObject)(config);
await options.cache.set(cachePath, config);
let result = {
config,
cachePath
};
// TODO: don't store config twice (once in the graph and once in a separate cache entry)
api.storeResult(result);
return result;
},
input: null
};
}
const parcelConfigCache = (0, _buildCache.createBuildCache)();
function getCachedParcelConfig(result, options) {
let {
config: processedConfig,
cachePath
} = result;
let config = parcelConfigCache.get(cachePath);
if (config) {
return config;
}
config = new _ParcelConfig2.default(processedConfig, options);
parcelConfigCache.set(cachePath, config);
return config;
}
async function loadParcelConfig(options) {
let parcelConfig = await resolveParcelConfig(options);
if (!parcelConfig) {
throw new Error('Could not find a .parcelrc');
}
return parcelConfig;
}
async function resolveParcelConfig(options) {
let resolveFrom = getResolveFrom(options.inputFS, options.projectRoot);
let configPath = options.config != null ? (await options.packageManager.resolve(options.config, resolveFrom)).resolved : await (0, _utils().resolveConfig)(options.inputFS, resolveFrom, ['.parcelrc'], options.projectRoot);
let usedDefault = false;
if (configPath == null && options.defaultConfig != null) {
usedDefault = true;
configPath = (await options.packageManager.resolve(options.defaultConfig, resolveFrom)).resolved;
}
if (configPath == null) {
return null;
}
let contents;
try {
contents = await options.inputFS.readFile(configPath, 'utf8');
} catch (e) {
throw new (_diagnostic().default)({
diagnostic: {
message: (0, _diagnostic().md)`Could not find parcel config at ${_path().default.relative(options.projectRoot, configPath)}`,
origin: '@parcel/core'
}
});
}
let {
config,
extendedFiles
} = await parseAndProcessConfig(configPath, contents, options);
if (options.additionalReporters.length > 0) {
var _config$reporters;
config.reporters = [...options.additionalReporters.map(({
packageName,
resolveFrom
}) => ({
packageName,
resolveFrom
})), ...((_config$reporters = config.reporters) !== null && _config$reporters !== void 0 ? _config$reporters : [])];
}
return {
config,
extendedFiles,
usedDefault
};
}
function create(config, options) {
return processConfigChain(config, config.filePath, options);
}
// eslint-disable-next-line require-await
async function parseAndProcessConfig(configPath, contents, options) {
let config;
try {
config = (0, _json().parse)(contents);
} catch (e) {
let pos = {
line: e.lineNumber,
column: e.columnNumber
};
throw new (_diagnostic().default)({
diagnostic: {
message: `Failed to parse .parcelrc`,
origin: '@parcel/core',
codeFrames: [{
filePath: configPath,
language: 'json5',
code: contents,
codeHighlights: [{
start: pos,
end: pos,
message: (0, _diagnostic().escapeMarkdown)(e.message)
}]
}]
}
});
}
return processConfigChain(config, configPath, options);
}
function processPipeline(options, pipeline, keyPath, filePath) {
if (pipeline) {
return pipeline.map((pkg, i) => {
// $FlowFixMe
if (pkg === '...') return pkg;
return {
packageName: pkg,
resolveFrom: (0, _projectPath.toProjectPath)(options.projectRoot, filePath),
keyPath: `${keyPath}/${i}`
};
});
}
}
const RESERVED_PIPELINES = new Set(['node:', 'npm:', 'http:', 'https:', 'data:', 'tel:', 'mailto:']);
async function processMap(
// $FlowFixMe
map, keyPath, filePath, options
// $FlowFixMe
) {
if (!map) return undefined;
// $FlowFixMe
let res = {};
for (let k in map) {
let i = k.indexOf(':');
if (i > 0 && RESERVED_PIPELINES.has(k.slice(0, i + 1))) {
let code = await options.inputFS.readFile(filePath, 'utf8');
throw new (_diagnostic().default)({
diagnostic: {
message: `Named pipeline '${k.slice(0, i + 1)}' is reserved.`,
origin: '@parcel/core',
codeFrames: [{
filePath: filePath,
language: 'json5',
code,
codeHighlights: (0, _diagnostic().generateJSONCodeHighlights)(code, [{
key: `${keyPath}/${k}`,
type: 'key'
}])
}],
documentationURL: 'https://parceljs.org/features/dependency-resolution/#url-schemes'
}
});
}
if (typeof map[k] === 'string') {
res[k] = {
packageName: map[k],
resolveFrom: (0, _projectPath.toProjectPath)(options.projectRoot, filePath),
keyPath: `${keyPath}/${k}`
};
} else {
res[k] = processPipeline(options, map[k], `${keyPath}/${k}`, filePath);
}
}
return res;
}
async function processConfig(configFile, options) {
return {
filePath: (0, _projectPath.toProjectPath)(options.projectRoot, configFile.filePath),
...(configFile.resolveFrom != null ? {
resolveFrom: (0, _projectPath.toProjectPath)(options.projectRoot, configFile.resolveFrom)
} : {
/*::...null*/
}),
resolvers: processPipeline(options, configFile.resolvers, '/resolvers', configFile.filePath),
transformers: await processMap(configFile.transformers, '/transformers', configFile.filePath, options),
bundler: configFile.bundler != null ? {
packageName: configFile.bundler,
resolveFrom: (0, _projectPath.toProjectPath)(options.projectRoot, configFile.filePath),
keyPath: '/bundler'
} : undefined,
namers: processPipeline(options, configFile.namers, '/namers', configFile.filePath),
runtimes: processPipeline(options, configFile.runtimes, '/runtimes', configFile.filePath),
packagers: await processMap(configFile.packagers, '/packagers', configFile.filePath, options),
optimizers: await processMap(configFile.optimizers, '/optimizers', configFile.filePath, options),
compressors: await processMap(configFile.compressors, '/compressors', configFile.filePath, options),
reporters: processPipeline(options, configFile.reporters, '/reporters', configFile.filePath),
validators: await processMap(configFile.validators, '/validators', configFile.filePath, options)
};
}
async function processConfigChain(configFile, filePath, options) {
// Validate config...
let relativePath = _path().default.relative(options.inputFS.cwd(), filePath);
validateConfigFile(configFile, relativePath);
// Process config...
let config = await processConfig({
filePath,
...configFile
}, options);
let extendedFiles = [];
if (configFile.extends != null) {
let exts = Array.isArray(configFile.extends) ? configFile.extends : [configFile.extends];
let errors = [];
if (exts.length !== 0) {
let extStartConfig;
let i = 0;
for (let ext of exts) {
try {
let key = Array.isArray(configFile.extends) ? `/extends/${i}` : '/extends';
let resolved = await resolveExtends(ext, filePath, key, options);
extendedFiles.push(resolved);
let {
extendedFiles: moreExtendedFiles,
config: nextConfig
} = await processExtendedConfig(filePath, key, ext, resolved, options);
extendedFiles = extendedFiles.concat(moreExtendedFiles);
extStartConfig = extStartConfig ? mergeConfigs(extStartConfig, nextConfig) : nextConfig;
} catch (err) {
errors.push(err);
}
i++;
}
// Merge with the inline config last
if (extStartConfig) {
config = mergeConfigs(extStartConfig, config);
}
}
if (errors.length > 0) {
throw new (_diagnostic().default)({
diagnostic: errors.flatMap(e => e.diagnostics)
});
}
}
return {
config,
extendedFiles
};
}
async function resolveExtends(ext, configPath, extendsKey, options) {
if (ext.startsWith('.')) {
return _path().default.resolve(_path().default.dirname(configPath), ext);
} else {
try {
let {
resolved
} = await options.packageManager.resolve(ext, configPath);
return options.inputFS.realpath(resolved);
} catch (err) {
let parentContents = await options.inputFS.readFile(configPath, 'utf8');
let alternatives = await (0, _utils().findAlternativeNodeModules)(options.inputFS, ext, _path().default.dirname(configPath));
throw new (_diagnostic().default)({
diagnostic: {
message: `Cannot find extended parcel config`,
origin: '@parcel/core',
codeFrames: [{
filePath: configPath,
language: 'json5',
code: parentContents,
codeHighlights: (0, _diagnostic().generateJSONCodeHighlights)(parentContents, [{
key: extendsKey,
type: 'value',
message: (0, _diagnostic().md)`Cannot find module "${ext}"${alternatives[0] ? `, did you mean "${alternatives[0]}"?` : ''}`
}])
}]
}
});
}
}
}
async function processExtendedConfig(configPath, extendsKey, extendsSpecifier, resolvedExtendedConfigPath, options) {
let contents;
try {
contents = await options.inputFS.readFile(resolvedExtendedConfigPath, 'utf8');
} catch (e) {
let parentContents = await options.inputFS.readFile(configPath, 'utf8');
let alternatives = await (0, _utils().findAlternativeFiles)(options.inputFS, extendsSpecifier, _path().default.dirname(resolvedExtendedConfigPath), options.projectRoot);
throw new (_diagnostic().default)({
diagnostic: {
message: 'Cannot find extended parcel config',
origin: '@parcel/core',
codeFrames: [{
filePath: configPath,
language: 'json5',
code: parentContents,
codeHighlights: (0, _diagnostic().generateJSONCodeHighlights)(parentContents, [{
key: extendsKey,
type: 'value',
message: (0, _diagnostic().md)`"${extendsSpecifier}" does not exist${alternatives[0] ? `, did you mean "${alternatives[0]}"?` : ''}`
}])
}]
}
});
}
return parseAndProcessConfig(resolvedExtendedConfigPath, contents, options);
}
function validateConfigFile(config, relativePath) {
validateNotEmpty(config, relativePath);
_utils().validateSchema.diagnostic(_ParcelConfig.default, {
data: config,
filePath: relativePath
}, '@parcel/core', 'Invalid Parcel Config');
}
function validateNotEmpty(config, relativePath) {
_assert().default.notDeepStrictEqual(config, {}, `${relativePath} can't be empty`);
}
function mergeConfigs(base, ext) {
return {
filePath: ext.filePath,
resolvers: assertPurePipeline(mergePipelines(base.resolvers, ext.resolvers)),
transformers: mergeMaps(base.transformers, ext.transformers, mergePipelines),
validators: mergeMaps(base.validators, ext.validators, mergePipelines),
bundler: ext.bundler || base.bundler,
namers: assertPurePipeline(mergePipelines(base.namers, ext.namers)),
runtimes: assertPurePipeline(mergePipelines(base.runtimes, ext.runtimes)),
packagers: mergeMaps(base.packagers, ext.packagers),
optimizers: mergeMaps(base.optimizers, ext.optimizers, mergePipelines),
compressors: mergeMaps(base.compressors, ext.compressors, mergePipelines),
reporters: assertPurePipeline(mergePipelines(base.reporters, ext.reporters))
};
}
function getResolveFrom(fs, projectRoot) {
let cwd = fs.cwd();
let dir = (0, _utils().isDirectoryInside)(cwd, projectRoot) ? cwd : projectRoot;
return _path().default.join(dir, 'index');
}
function assertPurePipeline(pipeline) {
return pipeline.map(s => {
(0, _assert().default)(typeof s !== 'string');
return s;
});
}
function mergePipelines(base, ext) {
if (ext == null) {
return base !== null && base !== void 0 ? base : [];
}
if (ext.filter(v => v === '...').length > 1) {
throw new Error('Only one spread element can be included in a config pipeline');
}
// Merge the base pipeline if a rest element is defined
let spreadIndex = ext.indexOf('...');
if (spreadIndex >= 0) {
return [...ext.slice(0, spreadIndex), ...(base !== null && base !== void 0 ? base : []), ...ext.slice(spreadIndex + 1)];
} else {
return ext;
}
}
function mergeMaps(base, ext, merger) {
if (!ext || Object.keys(ext).length === 0) {
return base || {};
}
if (!base) {
return ext;
}
let res = {};
// Add the extension options first so they have higher precedence in the output glob map
for (let k in ext) {
//$FlowFixMe Flow doesn't correctly infer the type. See https://github.com/facebook/flow/issues/1736.
let key = k;
res[key] = merger && base[key] != null ? merger(base[key], ext[key]) : ext[key];
}
// Add base options that aren't defined in the extension
for (let k in base) {
// $FlowFixMe
let key = k;
if (res[key] == null) {
res[key] = base[key];
}
}
return res;
}

View File

@@ -0,0 +1,350 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ResolverRunner = void 0;
exports.default = createPathRequest;
function _diagnostic() {
const data = _interopRequireWildcard(require("@parcel/diagnostic"));
_diagnostic = function () {
return data;
};
return data;
}
function _logger() {
const data = require("@parcel/logger");
_logger = function () {
return data;
};
return data;
}
function _nullthrows() {
const data = _interopRequireDefault(require("nullthrows"));
_nullthrows = function () {
return data;
};
return data;
}
function _path() {
const data = _interopRequireDefault(require("path"));
_path = function () {
return data;
};
return data;
}
function _utils() {
const data = require("@parcel/utils");
_utils = function () {
return data;
};
return data;
}
var _ReporterRunner = require("../ReporterRunner");
var _Dependency = require("../public/Dependency");
var _PluginOptions = _interopRequireDefault(require("../public/PluginOptions"));
var _ParcelConfig = _interopRequireDefault(require("../ParcelConfig"));
var _ParcelConfigRequest = _interopRequireWildcard(require("./ParcelConfigRequest"));
var _utils2 = require("../utils");
var _projectPath = require("../projectPath");
var _types = require("../types");
var _buildCache = require("../buildCache");
var _InternalConfig = require("../InternalConfig");
var _ConfigRequest = require("./ConfigRequest");
var _DevDepRequest = require("./DevDepRequest");
function _profiler() {
const data = require("@parcel/profiler");
_profiler = function () {
return data;
};
return data;
}
var _RequestTracker = require("../RequestTracker");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _getRequireWildcardCache(e) { if ("function" != typeof WeakMap) return null; var r = new WeakMap(), t = new WeakMap(); return (_getRequireWildcardCache = function (e) { return e ? t : r; })(e); }
function _interopRequireWildcard(e, r) { if (!r && e && e.__esModule) return e; if (null === e || "object" != typeof e && "function" != typeof e) return { default: e }; var t = _getRequireWildcardCache(r); if (t && t.has(e)) return t.get(e); var n = { __proto__: null }, a = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var u in e) if ("default" !== u && Object.prototype.hasOwnProperty.call(e, u)) { var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; } return n.default = e, t && t.set(e, n), n; }
const PIPELINE_REGEX = /^([a-z0-9-]+?):(.*)$/i;
function createPathRequest(input) {
return {
id: input.dependency.id + ':' + input.name,
type: _RequestTracker.requestTypes.path_request,
run,
input
};
}
async function run({
input,
api,
options
}) {
let configResult = (0, _nullthrows().default)(await api.runRequest((0, _ParcelConfigRequest.default)()));
let config = (0, _ParcelConfigRequest.getCachedParcelConfig)(configResult, options);
let {
devDeps,
invalidDevDeps
} = await (0, _DevDepRequest.getDevDepRequests)(api);
(0, _DevDepRequest.invalidateDevDeps)(invalidDevDeps, options, config);
let resolverRunner = new ResolverRunner({
options,
config,
previousDevDeps: devDeps
});
let result = await resolverRunner.resolve(input.dependency);
if (result.invalidateOnEnvChange) {
for (let env of result.invalidateOnEnvChange) {
api.invalidateOnEnvChange(env);
}
}
if (result.invalidateOnFileCreate) {
for (let file of result.invalidateOnFileCreate) {
api.invalidateOnFileCreate((0, _utils2.invalidateOnFileCreateToInternal)(options.projectRoot, file));
}
}
if (result.invalidateOnFileChange) {
for (let filePath of result.invalidateOnFileChange) {
let pp = (0, _projectPath.toProjectPath)(options.projectRoot, filePath);
api.invalidateOnFileUpdate(pp);
api.invalidateOnFileDelete(pp);
}
}
for (let config of resolverRunner.configs.values()) {
await (0, _ConfigRequest.runConfigRequest)(api, config);
}
for (let devDepRequest of resolverRunner.devDepRequests.values()) {
await (0, _DevDepRequest.runDevDepRequest)(api, devDepRequest);
}
if (result.assetGroup) {
api.invalidateOnFileDelete(result.assetGroup.filePath);
return result.assetGroup;
}
if (result.diagnostics && result.diagnostics.length > 0) {
let err = new (_diagnostic().default)({
diagnostic: result.diagnostics
});
// $FlowFixMe[prop-missing]
err.code = 'MODULE_NOT_FOUND';
throw err;
}
}
const configCache = (0, _buildCache.createBuildCache)();
class ResolverRunner {
constructor({
config,
options,
previousDevDeps
}) {
this.config = config;
this.options = options;
this.pluginOptions = new _PluginOptions.default(this.options);
this.previousDevDeps = previousDevDeps;
this.devDepRequests = new Map();
this.configs = new Map();
}
async getDiagnostic(dependency, message) {
let diagnostic = {
message,
origin: '@parcel/core'
};
if (dependency.loc && dependency.sourcePath != null) {
let filePath = (0, _projectPath.fromProjectPath)(this.options.projectRoot, dependency.sourcePath);
diagnostic.codeFrames = [{
filePath,
code: await this.options.inputFS.readFile(filePath, 'utf8').catch(() => ''),
codeHighlights: dependency.loc ? [(0, _diagnostic().convertSourceLocationToHighlight)(dependency.loc)] : []
}];
}
return diagnostic;
}
async loadConfigs(resolvers) {
for (let plugin of resolvers) {
// Only load config for a plugin once per build.
let config = configCache.get(plugin.name);
if (!config && plugin.plugin.loadConfig != null) {
config = (0, _InternalConfig.createConfig)({
plugin: plugin.name,
searchPath: (0, _projectPath.toProjectPathUnsafe)('index')
});
await (0, _ConfigRequest.loadPluginConfig)(plugin, config, this.options);
configCache.set(plugin.name, config);
this.configs.set(plugin.name, config);
}
if (config) {
for (let devDep of config.devDeps) {
let devDepRequest = await (0, _DevDepRequest.createDevDependency)(devDep, this.previousDevDeps, this.options);
this.runDevDepRequest(devDepRequest);
}
this.configs.set(plugin.name, config);
}
}
}
runDevDepRequest(devDepRequest) {
let {
specifier,
resolveFrom
} = devDepRequest;
let key = `${specifier}:${(0, _projectPath.fromProjectPathRelative)(resolveFrom)}`;
this.devDepRequests.set(key, devDepRequest);
}
async resolve(dependency) {
var _dependency$resolveFr;
let dep = (0, _Dependency.getPublicDependency)(dependency, this.options);
(0, _ReporterRunner.report)({
type: 'buildProgress',
phase: 'resolving',
dependency: dep
});
let resolvers = await this.config.getResolvers();
await this.loadConfigs(resolvers);
let pipeline;
let specifier;
let validPipelines = new Set(this.config.getNamedPipelines());
let match = dependency.specifier.match(PIPELINE_REGEX);
if (match &&
// Don't consider absolute paths. Absolute paths are only supported for entries,
// and include e.g. `C:\` on Windows, conflicting with pipelines.
!_path().default.isAbsolute(dependency.specifier)) {
[, pipeline, specifier] = match;
if (!validPipelines.has(pipeline)) {
// This may be a url protocol or scheme rather than a pipeline, such as
// `url('http://example.com/foo.png')`. Pass it to resolvers to handle.
specifier = dependency.specifier;
pipeline = null;
}
} else {
specifier = dependency.specifier;
}
// Entrypoints, convert ProjectPath in module specifier to absolute path
if (dep.resolveFrom == null) {
specifier = _path().default.join(this.options.projectRoot, specifier);
}
let diagnostics = [];
let invalidateOnFileCreate = [];
let invalidateOnFileChange = [];
let invalidateOnEnvChange = [];
for (let resolver of resolvers) {
let measurement;
try {
var _this$configs$get;
measurement = _profiler().tracer.createMeasurement(resolver.name, 'resolve', specifier);
let result = await resolver.plugin.resolve({
specifier,
pipeline,
dependency: dep,
options: this.pluginOptions,
logger: new (_logger().PluginLogger)({
origin: resolver.name
}),
tracer: new (_profiler().PluginTracer)({
origin: resolver.name,
category: 'resolver'
}),
config: (_this$configs$get = this.configs.get(resolver.name)) === null || _this$configs$get === void 0 ? void 0 : _this$configs$get.result
});
measurement && measurement.end();
if (result) {
if (result.meta) {
dependency.resolverMeta = result.meta;
dependency.meta = {
...dependency.meta,
...result.meta
};
}
if (result.priority != null) {
dependency.priority = _types.Priority[result.priority];
}
if (result.invalidateOnEnvChange) {
invalidateOnEnvChange.push(...result.invalidateOnEnvChange);
}
if (result.invalidateOnFileCreate) {
invalidateOnFileCreate.push(...result.invalidateOnFileCreate);
}
if (result.invalidateOnFileChange) {
invalidateOnFileChange.push(...result.invalidateOnFileChange);
}
if (result.isExcluded) {
return {
assetGroup: null,
invalidateOnFileCreate,
invalidateOnFileChange,
invalidateOnEnvChange
};
}
if (result.filePath != null) {
var _result$query, _pipeline;
let resultFilePath = result.filePath;
if (!_path().default.isAbsolute(resultFilePath)) {
throw new Error((0, _diagnostic().md)`Resolvers must return an absolute path, ${resolver.name} returned: ${resultFilePath}`);
}
return {
assetGroup: {
canDefer: result.canDefer,
filePath: (0, _projectPath.toProjectPath)(this.options.projectRoot, resultFilePath),
query: (_result$query = result.query) === null || _result$query === void 0 ? void 0 : _result$query.toString(),
sideEffects: result.sideEffects,
code: result.code,
env: dependency.env,
pipeline: result.pipeline === undefined ? (_pipeline = pipeline) !== null && _pipeline !== void 0 ? _pipeline : dependency.pipeline : result.pipeline,
isURL: dep.specifierType === 'url'
},
invalidateOnFileCreate,
invalidateOnFileChange,
invalidateOnEnvChange
};
}
if (result.diagnostics) {
let errorDiagnostic = (0, _diagnostic().errorToDiagnostic)(new (_diagnostic().default)({
diagnostic: result.diagnostics
}), {
origin: resolver.name,
filePath: specifier
});
diagnostics.push(...errorDiagnostic);
}
}
} catch (e) {
// Add error to error map, we'll append these to the standard error if we can't resolve the asset
let errorDiagnostic = (0, _diagnostic().errorToDiagnostic)(e, {
origin: resolver.name,
filePath: specifier
});
if (Array.isArray(errorDiagnostic)) {
diagnostics.push(...errorDiagnostic);
} else {
diagnostics.push(errorDiagnostic);
}
break;
} finally {
measurement && measurement.end();
// Add dev dependency for the resolver. This must be done AFTER running it due to
// the potential for lazy require() that aren't executed until the request runs.
let devDepRequest = await (0, _DevDepRequest.createDevDependency)({
specifier: resolver.name,
resolveFrom: resolver.resolveFrom
}, this.previousDevDeps, this.options);
this.runDevDepRequest(devDepRequest);
}
}
if (dep.isOptional) {
return {
assetGroup: null,
invalidateOnFileCreate,
invalidateOnFileChange,
invalidateOnEnvChange
};
}
let resolveFrom = (_dependency$resolveFr = dependency.resolveFrom) !== null && _dependency$resolveFr !== void 0 ? _dependency$resolveFr : dependency.sourcePath;
let dir = resolveFrom != null ? (0, _utils().normalizePath)((0, _projectPath.fromProjectPathRelative)(resolveFrom)) : '';
let diagnostic = await this.getDiagnostic(dependency, (0, _diagnostic().md)`Failed to resolve '${dependency.specifier}' ${dir ? `from '${dir}'` : ''}`);
diagnostics.unshift(diagnostic);
return {
assetGroup: null,
invalidateOnFileCreate,
invalidateOnFileChange,
invalidateOnEnvChange,
diagnostics
};
}
}
exports.ResolverRunner = ResolverRunner;

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,66 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = createValidationRequest;
function _nullthrows() {
const data = _interopRequireDefault(require("nullthrows"));
_nullthrows = function () {
return data;
};
return data;
}
var _ParcelConfig = _interopRequireDefault(require("../ParcelConfig"));
var _ReporterRunner = require("../ReporterRunner");
var _Validation = _interopRequireDefault(require("../Validation"));
var _ParcelConfigRequest = _interopRequireDefault(require("./ParcelConfigRequest"));
var _RequestTracker = require("../RequestTracker");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function createValidationRequest(input) {
return {
id: 'validation',
type: _RequestTracker.requestTypes.validation_request,
run: async ({
input: {
assetRequests,
optionsRef
},
api,
options,
farm
}) => {
let {
config: processedConfig,
cachePath
} = (0, _nullthrows().default)(await api.runRequest((0, _ParcelConfigRequest.default)()));
let config = new _ParcelConfig.default(processedConfig, options);
let trackedRequestsDesc = assetRequests.filter(request => {
return config.getValidatorNames(request.filePath).length > 0;
});
// Schedule validations on workers for all plugins that implement the one-asset-at-a-time "validate" method.
let promises = trackedRequestsDesc.map(async request => (await farm.createHandle('runValidate'))({
requests: [request],
optionsRef: optionsRef,
configCachePath: cachePath
}));
// Skip sending validation requests if no validators were configured
if (trackedRequestsDesc.length === 0) {
return;
}
// Schedule validations on the main thread for all validation plugins that implement "validateAll".
promises.push(new _Validation.default({
requests: trackedRequestsDesc,
options,
config,
report: _ReporterRunner.report,
dedicatedThread: true
}).run());
await Promise.all(promises);
},
input
};
}

View File

@@ -0,0 +1,254 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = createWriteBundleRequest;
var _constants = require("../constants");
function _nullthrows() {
const data = _interopRequireDefault(require("nullthrows"));
_nullthrows = function () {
return data;
};
return data;
}
function _path() {
const data = _interopRequireDefault(require("path"));
_path = function () {
return data;
};
return data;
}
var _Bundle = require("../public/Bundle");
function _utils() {
const data = require("@parcel/utils");
_utils = function () {
return data;
};
return data;
}
function _stream() {
const data = require("stream");
_stream = function () {
return data;
};
return data;
}
var _projectPath = require("../projectPath");
var _ParcelConfigRequest = _interopRequireWildcard(require("./ParcelConfigRequest"));
var _PluginOptions = _interopRequireDefault(require("../public/PluginOptions"));
function _logger() {
const data = require("@parcel/logger");
_logger = function () {
return data;
};
return data;
}
var _DevDepRequest = require("./DevDepRequest");
var _ParcelConfig = _interopRequireDefault(require("../ParcelConfig"));
function _diagnostic() {
const data = _interopRequireWildcard(require("@parcel/diagnostic"));
_diagnostic = function () {
return data;
};
return data;
}
function _profiler() {
const data = require("@parcel/profiler");
_profiler = function () {
return data;
};
return data;
}
var _RequestTracker = require("../RequestTracker");
function _getRequireWildcardCache(e) { if ("function" != typeof WeakMap) return null; var r = new WeakMap(), t = new WeakMap(); return (_getRequireWildcardCache = function (e) { return e ? t : r; })(e); }
function _interopRequireWildcard(e, r) { if (!r && e && e.__esModule) return e; if (null === e || "object" != typeof e && "function" != typeof e) return { default: e }; var t = _getRequireWildcardCache(r); if (t && t.has(e)) return t.get(e); var n = { __proto__: null }, a = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var u in e) if ("default" !== u && Object.prototype.hasOwnProperty.call(e, u)) { var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; } return n.default = e, t && t.set(e, n), n; }
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const HASH_REF_PREFIX_LEN = _constants.HASH_REF_PREFIX.length;
const BOUNDARY_LENGTH = _constants.HASH_REF_PREFIX.length + 32 - 1;
/**
* Writes a bundle to the dist directory, replacing hash references with the final content hashes.
*/
function createWriteBundleRequest(input) {
let name = (0, _nullthrows().default)(input.bundle.name);
let nameHash = (0, _nullthrows().default)(input.hashRefToNameHash.get(input.bundle.hashReference));
return {
id: `${input.bundle.id}:${input.info.hash}:${nameHash}:${name}`,
type: _RequestTracker.requestTypes.write_bundle_request,
run,
input
};
}
async function run({
input,
options,
api
}) {
var _info$time;
let {
bundleGraph,
bundle,
info,
hashRefToNameHash
} = input;
let {
inputFS,
outputFS
} = options;
let name = (0, _nullthrows().default)(bundle.name);
let thisHashReference = bundle.hashReference;
if (info.type !== bundle.type) {
name = name.slice(0, -_path().default.extname(name).length) + '.' + info.type;
}
if (name.includes(thisHashReference)) {
let thisNameHash = (0, _nullthrows().default)(hashRefToNameHash.get(thisHashReference));
name = name.replace(thisHashReference, thisNameHash);
}
let filePath = (0, _projectPath.joinProjectPath)(bundle.target.distDir, name);
// Watch the bundle and source map for deletion.
// Also watch the dist dir because invalidateOnFileDelete does not currently
// invalidate when a parent directory is deleted.
// TODO: do we want to also watch for file edits?
api.invalidateOnFileDelete(bundle.target.distDir);
api.invalidateOnFileDelete(filePath);
let cacheKeys = info.cacheKeys;
let mapKey = cacheKeys.map;
let fullPath = (0, _projectPath.fromProjectPath)(options.projectRoot, filePath);
if (mapKey && bundle.env.sourceMap && !bundle.env.sourceMap.inline) {
api.invalidateOnFileDelete((0, _projectPath.toProjectPath)(options.projectRoot, fullPath + '.map'));
}
let dir = _path().default.dirname(fullPath);
await outputFS.mkdirp(dir); // ? Got rid of dist exists, is this an expensive operation
// Use the file mode from the entry asset as the file mode for the bundle.
// Don't do this for browser builds, as the executable bit in particular is unnecessary.
let publicBundle = _Bundle.NamedBundle.get(bundle, bundleGraph, options);
let mainEntry = publicBundle.getMainEntry();
let writeOptions = publicBundle.env.isBrowser() || !mainEntry ? undefined : {
mode: (await inputFS.stat(mainEntry.filePath)).mode
};
let contentStream;
if (info.isLargeBlob) {
contentStream = options.cache.getStream(cacheKeys.content);
} else {
contentStream = (0, _utils().blobToStream)(await options.cache.getBlob(cacheKeys.content));
}
let size = 0;
contentStream = contentStream.pipe(new (_utils().TapStream)(buf => {
size += buf.length;
}));
let configResult = (0, _nullthrows().default)(await api.runRequest((0, _ParcelConfigRequest.default)()));
let config = (0, _ParcelConfigRequest.getCachedParcelConfig)(configResult, options);
let {
devDeps,
invalidDevDeps
} = await (0, _DevDepRequest.getDevDepRequests)(api);
(0, _DevDepRequest.invalidateDevDeps)(invalidDevDeps, options, config);
await writeFiles(contentStream, info, hashRefToNameHash, options, config, outputFS, filePath, writeOptions, devDeps, api);
if (mapKey && bundle.env.sourceMap && !bundle.env.sourceMap.inline && (await options.cache.has(mapKey))) {
await writeFiles((0, _utils().blobToStream)(await options.cache.getBlob(mapKey)), info, hashRefToNameHash, options, config, outputFS, (0, _projectPath.toProjectPathUnsafe)((0, _projectPath.fromProjectPathRelative)(filePath) + '.map'), writeOptions, devDeps, api);
}
let res = {
filePath,
type: info.type,
stats: {
size,
time: (_info$time = info.time) !== null && _info$time !== void 0 ? _info$time : 0
}
};
api.storeResult(res);
return res;
}
async function writeFiles(inputStream, info, hashRefToNameHash, options, config, outputFS, filePath, writeOptions, devDeps, api) {
let compressors = await config.getCompressors((0, _projectPath.fromProjectPathRelative)(filePath));
let fullPath = (0, _projectPath.fromProjectPath)(options.projectRoot, filePath);
let stream = info.hashReferences.length ? inputStream.pipe(replaceStream(hashRefToNameHash)) : inputStream;
let promises = [];
for (let compressor of compressors) {
promises.push(runCompressor(compressor, cloneStream(stream), options, outputFS, fullPath, writeOptions, devDeps, api));
}
await Promise.all(promises);
}
async function runCompressor(compressor, stream, options, outputFS, filePath, writeOptions, devDeps, api) {
let measurement;
try {
measurement = _profiler().tracer.createMeasurement(compressor.name, 'compress', _path().default.relative(options.projectRoot, filePath));
let res = await compressor.plugin.compress({
stream,
options: new _PluginOptions.default(options),
logger: new (_logger().PluginLogger)({
origin: compressor.name
}),
tracer: new (_profiler().PluginTracer)({
origin: compressor.name,
category: 'compress'
})
});
if (res != null) {
await new Promise((resolve, reject) => (0, _stream().pipeline)(res.stream, outputFS.createWriteStream(filePath + (res.type != null ? '.' + res.type : ''), writeOptions), err => {
if (err) reject(err);else resolve();
}));
}
} catch (err) {
throw new (_diagnostic().default)({
diagnostic: (0, _diagnostic().errorToDiagnostic)(err, {
origin: compressor.name
})
});
} finally {
measurement && measurement.end();
// Add dev deps for compressor plugins AFTER running them, to account for lazy require().
let devDepRequest = await (0, _DevDepRequest.createDevDependency)({
specifier: compressor.name,
resolveFrom: compressor.resolveFrom
}, devDeps, options);
await (0, _DevDepRequest.runDevDepRequest)(api, devDepRequest);
}
}
function replaceStream(hashRefToNameHash) {
let boundaryStr = Buffer.alloc(0);
let replaced = Buffer.alloc(0);
return new (_stream().Transform)({
transform(chunk, encoding, cb) {
let str = Buffer.concat([boundaryStr, Buffer.from(chunk)]);
let lastMatchI = 0;
if (replaced.length < str.byteLength) {
replaced = Buffer.alloc(str.byteLength);
}
let replacedLength = 0;
while (lastMatchI < str.byteLength) {
let matchI = str.indexOf(_constants.HASH_REF_PREFIX, lastMatchI);
if (matchI === -1) {
replaced.set(str.subarray(lastMatchI, str.byteLength), replacedLength);
replacedLength += str.byteLength - lastMatchI;
break;
} else {
var _hashRefToNameHash$ge;
let match = str.subarray(matchI, matchI + HASH_REF_PREFIX_LEN + _constants.HASH_REF_HASH_LEN).toString();
let replacement = Buffer.from((_hashRefToNameHash$ge = hashRefToNameHash.get(match)) !== null && _hashRefToNameHash$ge !== void 0 ? _hashRefToNameHash$ge : match);
replaced.set(str.subarray(lastMatchI, matchI), replacedLength);
replacedLength += matchI - lastMatchI;
replaced.set(replacement, replacedLength);
replacedLength += replacement.byteLength;
lastMatchI = matchI + HASH_REF_PREFIX_LEN + _constants.HASH_REF_HASH_LEN;
}
}
boundaryStr = replaced.subarray(replacedLength - BOUNDARY_LENGTH, replacedLength);
let strUpToBoundary = replaced.subarray(0, replacedLength - BOUNDARY_LENGTH);
cb(null, strUpToBoundary);
},
flush(cb) {
cb(null, boundaryStr);
}
});
}
function cloneStream(readable) {
let res = new (_stream().Readable)();
// $FlowFixMe
res._read = () => {};
readable.on('data', chunk => res.push(chunk));
readable.on('end', () => res.push(null));
readable.on('error', err => res.emit('error', err));
return res;
}

View File

@@ -0,0 +1,154 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = createWriteBundlesRequest;
var _RequestTracker = require("../RequestTracker");
var _constants = require("../constants");
var _projectPath = require("../projectPath");
function _nullthrows() {
const data = _interopRequireDefault(require("nullthrows"));
_nullthrows = function () {
return data;
};
return data;
}
function _rust() {
const data = require("@parcel/rust");
_rust = function () {
return data;
};
return data;
}
var _PackageRequest = require("./PackageRequest");
var _WriteBundleRequest = _interopRequireDefault(require("./WriteBundleRequest"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
/**
* Packages, optimizes, and writes all bundles to the dist directory.
*/
function createWriteBundlesRequest(input) {
return {
type: _RequestTracker.requestTypes.write_bundles_request,
id: 'write_bundles:' + input.bundleGraph.getBundleGraphHash(),
run,
input
};
}
async function run({
input,
api,
farm,
options
}) {
let {
bundleGraph,
optionsRef
} = input;
let {
ref,
dispose
} = await farm.createSharedReference(bundleGraph);
api.invalidateOnOptionChange('shouldContentHash');
let res = new Map();
let bundleInfoMap = {};
let writeEarlyPromises = {};
let hashRefToNameHash = new Map();
let bundles = bundleGraph.getBundles().filter(bundle => {
// Do not package and write placeholder bundles to disk. We just
// need to update the name so other bundles can reference it.
if (bundle.isPlaceholder) {
let hash = bundle.id.slice(-8);
hashRefToNameHash.set(bundle.hashReference, hash);
let name = (0, _nullthrows().default)(bundle.name).replace(bundle.hashReference, hash);
res.set(bundle.id, {
filePath: (0, _projectPath.joinProjectPath)(bundle.target.distDir, name),
type: bundle.type,
// FIXME: this is wrong if the packager changes the type...
stats: {
time: 0,
size: 0
}
});
return false;
}
return true;
});
// Package on the main thread if there is only one bundle to package.
// This avoids the cost of serializing the bundle graph for single file change builds.
let useMainThread = bundles.length === 1 || bundles.filter(b => !api.canSkipSubrequest(bundleGraph.getHash(b))).length === 1;
try {
await Promise.all(bundles.map(async bundle => {
let request = (0, _PackageRequest.createPackageRequest)({
bundle,
bundleGraph,
bundleGraphReference: ref,
optionsRef,
useMainThread
});
let info = await api.runRequest(request);
if (!useMainThread) {
// Force a refresh of the cache to avoid a race condition
// between threaded reads and writes that can result in an LMDB cache miss:
// 1. The main thread has read some value from cache, necessitating a read transaction.
// 2. Concurrently, Thread A finishes a packaging request.
// 3. Subsequently, the main thread is tasked with this request, but fails because the read transaction is stale.
// This only occurs if the reading thread has a transaction that was created before the writing thread committed,
// and the transaction is still live when the reading thread attempts to get the written value.
// See https://github.com/parcel-bundler/parcel/issues/9121
options.cache.refresh();
}
bundleInfoMap[bundle.id] = info;
if (!info.hashReferences.length) {
hashRefToNameHash.set(bundle.hashReference, options.shouldContentHash ? info.hash.slice(-8) : bundle.id.slice(-8));
let writeBundleRequest = (0, _WriteBundleRequest.default)({
bundle,
info,
hashRefToNameHash,
bundleGraph
});
let promise = api.runRequest(writeBundleRequest);
// If the promise rejects before we await it (below), we don't want to crash the build.
promise.catch(() => {});
writeEarlyPromises[bundle.id] = promise;
}
}));
assignComplexNameHashes(hashRefToNameHash, bundles, bundleInfoMap, options);
await Promise.all(bundles.map(bundle => {
var _writeEarlyPromises$b;
let promise = (_writeEarlyPromises$b = writeEarlyPromises[bundle.id]) !== null && _writeEarlyPromises$b !== void 0 ? _writeEarlyPromises$b : api.runRequest((0, _WriteBundleRequest.default)({
bundle,
info: bundleInfoMap[bundle.id],
hashRefToNameHash,
bundleGraph
}));
return promise.then(r => res.set(bundle.id, r));
}));
api.storeResult(res);
return res;
} finally {
await dispose();
}
}
function assignComplexNameHashes(hashRefToNameHash, bundles, bundleInfoMap, options) {
for (let bundle of bundles) {
if (hashRefToNameHash.get(bundle.hashReference) != null) {
continue;
}
hashRefToNameHash.set(bundle.hashReference, options.shouldContentHash ? (0, _rust().hashString)([...getBundlesIncludedInHash(bundle.id, bundleInfoMap)].map(bundleId => bundleInfoMap[bundleId].hash).join(':')).slice(-8) : bundle.id.slice(-8));
}
}
function getBundlesIncludedInHash(bundleId, bundleInfoMap, included = new Set()) {
included.add(bundleId);
for (let hashRef of bundleInfoMap[bundleId].hashReferences) {
let referencedId = getIdFromHashRef(hashRef);
if (!included.has(referencedId)) {
getBundlesIncludedInHash(referencedId, bundleInfoMap, included);
}
}
return included;
}
function getIdFromHashRef(hashRef) {
return hashRef.slice(_constants.HASH_REF_PREFIX.length);
}

View File

@@ -0,0 +1,221 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = resolveOptions;
function _path() {
const data = _interopRequireDefault(require("path"));
_path = function () {
return data;
};
return data;
}
function _rust() {
const data = require("@parcel/rust");
_rust = function () {
return data;
};
return data;
}
function _fs() {
const data = require("@parcel/fs");
_fs = function () {
return data;
};
return data;
}
function _cache() {
const data = require("@parcel/cache");
_cache = function () {
return data;
};
return data;
}
function _packageManager() {
const data = require("@parcel/package-manager");
_packageManager = function () {
return data;
};
return data;
}
function _utils() {
const data = require("@parcel/utils");
_utils = function () {
return data;
};
return data;
}
var _loadDotEnv = _interopRequireDefault(require("./loadDotEnv"));
var _projectPath = require("./projectPath");
var _ParcelConfigRequest = require("./requests/ParcelConfigRequest");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
// Default cache directory name
const DEFAULT_CACHE_DIRNAME = '.parcel-cache';
const LOCK_FILE_NAMES = ['yarn.lock', 'package-lock.json', 'pnpm-lock.yaml'];
// Generate a unique instanceId, will change on every run of parcel
function generateInstanceId(entries) {
return (0, _rust().hashString)(`${entries.join(',')}-${Date.now()}-${Math.round(Math.random() * 100)}`);
}
// Compiles an array of globs to regex - used for lazy include/excludes
function compileGlobs(globs) {
return globs.map(glob => (0, _utils().globToRegex)(glob));
}
async function resolveOptions(initialOptions) {
var _initialOptions$cache, _initialOptions$mode, _initialOptions$defau, _initialOptions$defau2, _initialOptions$defau3, _initialOptions$defau4, _initialOptions$defau5, _initialOptions$defau6, _initialOptions$shoul, _initialOptions$lazyI, _initialOptions$lazyE, _initialOptions$shoul2, _initialOptions$env, _initialOptions$shoul3, _initialOptions$shoul4, _initialOptions$hmrOp, _initialOptions$shoul5, _initialOptions$shoul6, _initialOptions$shoul7, _initialOptions$shoul8, _initialOptions$logLe, _initialOptions$addit, _initialOptions$addit2, _initialOptions$defau7, _initialOptions$defau8, _initialOptions$defau9, _initialOptions$defau10, _initialOptions$defau11, _initialOptions$defau12;
let inputFS = initialOptions.inputFS || new (_fs().NodeFS)();
let outputFS = initialOptions.outputFS || new (_fs().NodeFS)();
let inputCwd = inputFS.cwd();
let outputCwd = outputFS.cwd();
let entries;
if (initialOptions.entries == null || initialOptions.entries === '') {
entries = [];
} else if (Array.isArray(initialOptions.entries)) {
entries = initialOptions.entries.map(entry => _path().default.resolve(inputCwd, entry));
} else {
entries = [_path().default.resolve(inputCwd, initialOptions.entries)];
}
let shouldMakeEntryReferFolder = false;
if (entries.length === 1 && !(0, _utils().isGlob)(entries[0])) {
let [entry] = entries;
try {
shouldMakeEntryReferFolder = (await inputFS.stat(entry)).isDirectory();
} catch {
// ignore failing stat call
}
}
// getRootDir treats the input as files, so getRootDir(["/home/user/myproject"]) returns "/home/user".
// Instead we need to make the the entry refer to some file inside the specified folders if entries refers to the directory.
let entryRoot = (0, _utils().getRootDir)(shouldMakeEntryReferFolder ? [_path().default.join(entries[0], 'index')] : entries);
let projectRootFile = (await (0, _utils().resolveConfig)(inputFS, _path().default.join(entryRoot, 'index'), [...LOCK_FILE_NAMES, '.git', '.hg'], _path().default.parse(entryRoot).root)) || _path().default.join(inputCwd, 'index'); // ? Should this just be rootDir
let projectRoot = _path().default.dirname(projectRootFile);
let packageManager = initialOptions.packageManager || new (_packageManager().NodePackageManager)(inputFS, projectRoot);
let cacheDir =
// If a cacheDir is provided, resolve it relative to cwd. Otherwise,
// use a default directory resolved relative to the project root.
initialOptions.cacheDir != null ? _path().default.resolve(outputCwd, initialOptions.cacheDir) : _path().default.resolve(projectRoot, DEFAULT_CACHE_DIRNAME);
// Make the root watch directory configurable. This is useful in some cases
// where symlinked dependencies outside the project root need to trigger HMR
// updates. Default to the project root if not provided.
let watchDir = initialOptions.watchDir != null ? _path().default.resolve(initialOptions.watchDir) : projectRoot;
let cache = (_initialOptions$cache = initialOptions.cache) !== null && _initialOptions$cache !== void 0 ? _initialOptions$cache : outputFS instanceof _fs().NodeFS ? new (_cache().LMDBCache)(cacheDir) : new (_cache().FSCache)(outputFS, cacheDir);
let mode = (_initialOptions$mode = initialOptions.mode) !== null && _initialOptions$mode !== void 0 ? _initialOptions$mode : 'development';
let shouldOptimize = (_initialOptions$defau = initialOptions === null || initialOptions === void 0 || (_initialOptions$defau2 = initialOptions.defaultTargetOptions) === null || _initialOptions$defau2 === void 0 ? void 0 : _initialOptions$defau2.shouldOptimize) !== null && _initialOptions$defau !== void 0 ? _initialOptions$defau : mode === 'production';
let publicUrl = (_initialOptions$defau3 = initialOptions === null || initialOptions === void 0 || (_initialOptions$defau4 = initialOptions.defaultTargetOptions) === null || _initialOptions$defau4 === void 0 ? void 0 : _initialOptions$defau4.publicUrl) !== null && _initialOptions$defau3 !== void 0 ? _initialOptions$defau3 : '/';
let distDir = (initialOptions === null || initialOptions === void 0 || (_initialOptions$defau5 = initialOptions.defaultTargetOptions) === null || _initialOptions$defau5 === void 0 ? void 0 : _initialOptions$defau5.distDir) != null ? _path().default.resolve(inputCwd, initialOptions === null || initialOptions === void 0 || (_initialOptions$defau6 = initialOptions.defaultTargetOptions) === null || _initialOptions$defau6 === void 0 ? void 0 : _initialOptions$defau6.distDir) : undefined;
let shouldBuildLazily = (_initialOptions$shoul = initialOptions.shouldBuildLazily) !== null && _initialOptions$shoul !== void 0 ? _initialOptions$shoul : false;
let lazyIncludes = compileGlobs((_initialOptions$lazyI = initialOptions.lazyIncludes) !== null && _initialOptions$lazyI !== void 0 ? _initialOptions$lazyI : []);
if (lazyIncludes.length > 0 && !shouldBuildLazily) {
throw new Error('Lazy includes can only be provided when lazy building is enabled');
}
let lazyExcludes = compileGlobs((_initialOptions$lazyE = initialOptions.lazyExcludes) !== null && _initialOptions$lazyE !== void 0 ? _initialOptions$lazyE : []);
if (lazyExcludes.length > 0 && !shouldBuildLazily) {
throw new Error('Lazy excludes can only be provided when lazy building is enabled');
}
let shouldContentHash = (_initialOptions$shoul2 = initialOptions.shouldContentHash) !== null && _initialOptions$shoul2 !== void 0 ? _initialOptions$shoul2 : initialOptions.mode === 'production';
if (shouldBuildLazily && shouldContentHash) {
throw new Error('Lazy bundling does not work with content hashing');
}
let env = {
...(await (0, _loadDotEnv.default)((_initialOptions$env = initialOptions.env) !== null && _initialOptions$env !== void 0 ? _initialOptions$env : {}, inputFS, _path().default.join(projectRoot, 'index'), projectRoot)),
...process.env,
...initialOptions.env
};
let port = determinePort(initialOptions.serveOptions, env.PORT);
return {
config: getRelativeConfigSpecifier(inputFS, projectRoot, initialOptions.config),
defaultConfig: getRelativeConfigSpecifier(inputFS, projectRoot, initialOptions.defaultConfig),
shouldPatchConsole: (_initialOptions$shoul3 = initialOptions.shouldPatchConsole) !== null && _initialOptions$shoul3 !== void 0 ? _initialOptions$shoul3 : false,
env,
mode,
shouldAutoInstall: (_initialOptions$shoul4 = initialOptions.shouldAutoInstall) !== null && _initialOptions$shoul4 !== void 0 ? _initialOptions$shoul4 : false,
hmrOptions: (_initialOptions$hmrOp = initialOptions.hmrOptions) !== null && _initialOptions$hmrOp !== void 0 ? _initialOptions$hmrOp : null,
shouldBuildLazily,
lazyIncludes,
lazyExcludes,
unstableFileInvalidations: initialOptions.unstableFileInvalidations,
shouldBundleIncrementally: (_initialOptions$shoul5 = initialOptions.shouldBundleIncrementally) !== null && _initialOptions$shoul5 !== void 0 ? _initialOptions$shoul5 : true,
shouldContentHash,
serveOptions: initialOptions.serveOptions ? {
...initialOptions.serveOptions,
distDir: distDir !== null && distDir !== void 0 ? distDir : _path().default.join(outputCwd, 'dist'),
port
} : false,
shouldDisableCache: (_initialOptions$shoul6 = initialOptions.shouldDisableCache) !== null && _initialOptions$shoul6 !== void 0 ? _initialOptions$shoul6 : false,
shouldProfile: (_initialOptions$shoul7 = initialOptions.shouldProfile) !== null && _initialOptions$shoul7 !== void 0 ? _initialOptions$shoul7 : false,
shouldTrace: (_initialOptions$shoul8 = initialOptions.shouldTrace) !== null && _initialOptions$shoul8 !== void 0 ? _initialOptions$shoul8 : false,
cacheDir,
watchDir,
entries: entries.map(e => (0, _projectPath.toProjectPath)(projectRoot, e)),
targets: initialOptions.targets,
logLevel: (_initialOptions$logLe = initialOptions.logLevel) !== null && _initialOptions$logLe !== void 0 ? _initialOptions$logLe : 'info',
projectRoot,
inputFS,
outputFS,
cache,
packageManager,
additionalReporters: (_initialOptions$addit = (_initialOptions$addit2 = initialOptions.additionalReporters) === null || _initialOptions$addit2 === void 0 ? void 0 : _initialOptions$addit2.map(({
packageName,
resolveFrom
}) => ({
packageName,
resolveFrom: (0, _projectPath.toProjectPath)(projectRoot, resolveFrom)
}))) !== null && _initialOptions$addit !== void 0 ? _initialOptions$addit : [],
instanceId: generateInstanceId(entries),
detailedReport: initialOptions.detailedReport,
defaultTargetOptions: {
shouldOptimize,
shouldScopeHoist: initialOptions === null || initialOptions === void 0 || (_initialOptions$defau7 = initialOptions.defaultTargetOptions) === null || _initialOptions$defau7 === void 0 ? void 0 : _initialOptions$defau7.shouldScopeHoist,
sourceMaps: (_initialOptions$defau8 = initialOptions === null || initialOptions === void 0 || (_initialOptions$defau9 = initialOptions.defaultTargetOptions) === null || _initialOptions$defau9 === void 0 ? void 0 : _initialOptions$defau9.sourceMaps) !== null && _initialOptions$defau8 !== void 0 ? _initialOptions$defau8 : true,
publicUrl,
...(distDir != null ? {
distDir: (0, _projectPath.toProjectPath)(projectRoot, distDir)
} : {
/*::...null*/
}),
engines: initialOptions === null || initialOptions === void 0 || (_initialOptions$defau10 = initialOptions.defaultTargetOptions) === null || _initialOptions$defau10 === void 0 ? void 0 : _initialOptions$defau10.engines,
outputFormat: initialOptions === null || initialOptions === void 0 || (_initialOptions$defau11 = initialOptions.defaultTargetOptions) === null || _initialOptions$defau11 === void 0 ? void 0 : _initialOptions$defau11.outputFormat,
isLibrary: initialOptions === null || initialOptions === void 0 || (_initialOptions$defau12 = initialOptions.defaultTargetOptions) === null || _initialOptions$defau12 === void 0 ? void 0 : _initialOptions$defau12.isLibrary
}
};
}
function getRelativeConfigSpecifier(fs, projectRoot, specifier) {
if (specifier == null) {
return undefined;
} else if (_path().default.isAbsolute(specifier)) {
let resolveFrom = (0, _ParcelConfigRequest.getResolveFrom)(fs, projectRoot);
let relative = (0, _utils().relativePath)(_path().default.dirname(resolveFrom), specifier);
// If the config is outside the project root, use an absolute path so that if the project root
// moves the path still works. Otherwise, use a relative path so that the cache is portable.
return relative.startsWith('..') ? specifier : relative;
} else {
return specifier;
}
}
function determinePort(initialServerOptions, portInEnv, defaultPort = 1234) {
var _parsePort2;
function parsePort(port) {
let parsedPort = Number(port);
// return undefined if port number defined in .env is not valid integer
if (!Number.isInteger(parsedPort)) {
return undefined;
}
return parsedPort;
}
if (!initialServerOptions) {
var _parsePort;
return typeof portInEnv !== 'undefined' ? (_parsePort = parsePort(portInEnv)) !== null && _parsePort !== void 0 ? _parsePort : defaultPort : defaultPort;
}
// if initialServerOptions.port is equal to defaultPort, then this means that port number is provided via PORT=~~~~ on cli. In this case, we should ignore port number defined in .env.
if (initialServerOptions.port !== defaultPort) {
return initialServerOptions.port;
}
return typeof portInEnv !== 'undefined' ? (_parsePort2 = parsePort(portInEnv)) !== null && _parsePort2 !== void 0 ? _parsePort2 : defaultPort : defaultPort;
}

View File

@@ -0,0 +1,217 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.cacheSerializedObject = cacheSerializedObject;
exports.deserialize = deserialize;
Object.defineProperty(exports, "deserializeRaw", {
enumerable: true,
get: function () {
return _serializerCore.deserializeRaw;
}
});
exports.deserializeToCache = deserializeToCache;
exports.prepareForSerialization = prepareForSerialization;
exports.registerSerializableClass = registerSerializableClass;
exports.removeSerializedObjectFromCache = removeSerializedObjectFromCache;
exports.restoreDeserializedObject = restoreDeserializedObject;
exports.serialize = serialize;
Object.defineProperty(exports, "serializeRaw", {
enumerable: true,
get: function () {
return _serializerCore.serializeRaw;
}
});
exports.unregisterSerializableClass = unregisterSerializableClass;
var _buildCache = require("./buildCache");
var _serializerCore = require("./serializerCore");
const nameToCtor = new Map();
const ctorToName = new Map();
function registerSerializableClass(name, ctor) {
if (ctorToName.has(ctor)) {
throw new Error('Class already registered with serializer');
}
nameToCtor.set(name, ctor);
ctorToName.set(ctor, name);
}
function unregisterSerializableClass(name, ctor) {
if (nameToCtor.get(name) === ctor) {
nameToCtor.delete(name);
}
if (ctorToName.get(ctor) === name) {
ctorToName.delete(ctor);
}
}
function shallowCopy(object) {
if (object && typeof object === 'object') {
if (Array.isArray(object)) {
return [...object];
}
if (object instanceof Map) {
return new Map(object);
}
if (object instanceof Set) {
return new Set(object);
}
return Object.create(Object.getPrototypeOf(object), Object.getOwnPropertyDescriptors(object));
}
return object;
}
function isBuffer(object) {
return object.buffer instanceof ArrayBuffer || typeof SharedArrayBuffer !== 'undefined' && object.buffer instanceof SharedArrayBuffer;
}
function shouldContinueMapping(value) {
return value && typeof value === 'object' && value.$$raw !== true;
}
function mapObject(object, fn, preOrder = false) {
let cache = new Map();
let memo = new Map();
// Memoize the passed function to ensure it always returns the exact same
// output by reference for the same input. This is important to maintain
// reference integrity when deserializing rather than cloning.
let memoizedFn = val => {
let res = memo.get(val);
if (res == null) {
res = fn(val);
memo.set(val, res);
}
return res;
};
let walk = (object, shouldCopy = false) => {
// Check the cache first, both for performance and cycle detection.
if (cache.has(object)) {
return cache.get(object);
}
let result = object;
cache.set(object, result);
let processKey = (key, value) => {
let newValue = value;
if (preOrder && value && typeof value === 'object') {
newValue = memoizedFn(value);
}
// Recursively walk the children
if (preOrder ? shouldContinueMapping(newValue) : newValue && typeof newValue === 'object' && shouldContinueMapping(object)) {
newValue = walk(newValue, newValue === value);
}
if (!preOrder && newValue && typeof newValue === 'object') {
newValue = memoizedFn(newValue);
}
if (newValue !== value) {
// Copy on write. We only need to do this when serializing, not deserializing.
if (object === result && preOrder && shouldCopy) {
result = shallowCopy(object);
cache.set(object, result);
}
// Replace the key with the new value
if (result instanceof Map) {
result.set(key, newValue);
} else if (result instanceof Set) {
let _result = result; // For Flow
// TODO: do we care about iteration order??
_result.delete(value);
_result.add(newValue);
} else {
result[key] = newValue;
}
}
};
// Iterate in various ways depending on type.
if (Array.isArray(object)) {
for (let i = 0; i < object.length; i++) {
processKey(i, object[i]);
}
} else if (object instanceof Map || object instanceof Set) {
for (let [key, val] of object.entries()) {
processKey(key, val);
}
} else if (!isBuffer(object)) {
for (let key in object) {
processKey(key, object[key]);
}
}
return result;
};
let mapped = memoizedFn(object);
if (preOrder ? shouldContinueMapping(mapped) : mapped && typeof mapped === 'object' && shouldContinueMapping(object)) {
return walk(mapped, mapped === object);
}
return mapped;
}
function prepareForSerialization(object) {
if (object !== null && object !== void 0 && object.$$raw) {
return object;
}
return mapObject(object, value => {
// Add a $$type property with the name of this class, if any is registered.
if (value && typeof value === 'object' && typeof value.constructor === 'function') {
let type = ctorToName.get(value.constructor);
if (type != null) {
let serialized = value;
let raw = false;
if (value && typeof value.serialize === 'function') {
var _ref;
// If the object has a serialize method, call it
serialized = value.serialize();
raw = (_ref = serialized && serialized.$$raw) !== null && _ref !== void 0 ? _ref : true;
if (serialized) {
delete serialized.$$raw;
}
}
return {
$$type: type,
$$raw: raw,
value: {
...serialized
}
};
}
}
return value;
}, true);
}
function restoreDeserializedObject(object) {
return mapObject(object, value => {
// If the value has a $$type property, use it to restore the object type
if (value && value.$$type) {
let ctor = nameToCtor.get(value.$$type);
if (ctor == null) {
throw new Error(`Expected constructor ${value.$$type} to be registered with serializer to deserialize`);
}
if (typeof ctor.deserialize === 'function') {
return ctor.deserialize(value.value);
}
value = value.value;
Object.setPrototypeOf(value, ctor.prototype);
}
return value;
});
}
const serializeCache = (0, _buildCache.createBuildCache)();
function serialize(object) {
let cached = serializeCache.get(object);
if (cached) {
return cached;
}
let mapped = prepareForSerialization(object);
return (0, _serializerCore.serializeRaw)(mapped);
}
function deserialize(buffer) {
let obj = (0, _serializerCore.deserializeRaw)(buffer);
return restoreDeserializedObject(obj);
}
function cacheSerializedObject(object, buffer) {
serializeCache.set(object, buffer || serialize(object));
}
function deserializeToCache(buffer) {
let deserialized = deserialize(buffer);
serializeCache.set(deserialized, buffer);
return deserialized;
}
function removeSerializedObjectFromCache(object) {
serializeCache.delete(object);
}

View File

@@ -0,0 +1,29 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.serializeRaw = exports.deserializeRaw = void 0;
function _buffer() {
const data = require("buffer");
_buffer = function () {
return data;
};
return data;
}
function msgpackr() {
const data = _interopRequireWildcard(require("msgpackr"));
msgpackr = function () {
return data;
};
return data;
}
function _getRequireWildcardCache(e) { if ("function" != typeof WeakMap) return null; var r = new WeakMap(), t = new WeakMap(); return (_getRequireWildcardCache = function (e) { return e ? t : r; })(e); }
function _interopRequireWildcard(e, r) { if (!r && e && e.__esModule) return e; if (null === e || "object" != typeof e && "function" != typeof e) return { default: e }; var t = _getRequireWildcardCache(r); if (t && t.has(e)) return t.get(e); var n = { __proto__: null }, a = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var u in e) if ("default" !== u && Object.prototype.hasOwnProperty.call(e, u)) { var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; } return n.default = e, t && t.set(e, n), n; }
let encoder = new (msgpackr().Encoder)({
structuredClone: true
});
let serializeRaw = v => _buffer().Buffer.from(encoder.encode(v));
exports.serializeRaw = serializeRaw;
let deserializeRaw = v => encoder.decode(v);
exports.deserializeRaw = deserializeRaw;

View File

@@ -0,0 +1,16 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.serializeRaw = exports.deserializeRaw = void 0;
function _v() {
const data = _interopRequireDefault(require("v8"));
_v = function () {
return data;
};
return data;
}
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
let serializeRaw = exports.serializeRaw = _v().default.serialize;
let deserializeRaw = exports.deserializeRaw = _v().default.deserialize;

View File

@@ -0,0 +1,55 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = summarizeRequest;
function _path() {
const data = _interopRequireDefault(require("path"));
_path = function () {
return data;
};
return data;
}
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const NODE_MODULES = `${_path().default.sep}node_modules${_path().default.sep}`;
const BUFFER_LIMIT = 5000000; // 5mb
async function summarizeRequest(fs, req) {
let {
content,
size
} = await summarizeDiskRequest(fs, req);
let isSource = isFilePathSource(fs, req.filePath);
return {
content,
size,
isSource
};
}
function isFilePathSource(fs, filePath) {
return !filePath.includes(NODE_MODULES);
}
async function summarizeDiskRequest(fs, req) {
let code = req.code;
let content;
let size;
if (code == null) {
// Get the filesize. If greater than BUFFER_LIMIT, use a stream to
// compute the hash. In the common case, it's faster to just read the entire
// file first and do the hash all at once without the overhead of streams.
size = (await fs.stat(req.filePath)).size;
if (size > BUFFER_LIMIT) {
content = fs.createReadStream(req.filePath);
} else {
content = await fs.readFile(req.filePath);
}
} else {
content = code;
size = Buffer.byteLength(code);
}
return {
content,
size
};
}

View File

@@ -0,0 +1,35 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.SpecifierType = exports.Priority = exports.ExportsCondition = exports.BundleBehaviorNames = exports.BundleBehavior = void 0;
const SpecifierType = exports.SpecifierType = {
esm: 0,
commonjs: 1,
url: 2,
custom: 3
};
const Priority = exports.Priority = {
sync: 0,
parallel: 1,
lazy: 2
};
// Must match package_json.rs in node-resolver-rs.
const ExportsCondition = exports.ExportsCondition = {
import: 1 << 0,
require: 1 << 1,
module: 1 << 2,
style: 1 << 12,
sass: 1 << 13,
less: 1 << 14,
stylus: 1 << 15
};
const BundleBehavior = exports.BundleBehavior = {
inline: 0,
isolated: 1
};
const BundleBehaviorNames = exports.BundleBehaviorNames = Object.keys(BundleBehavior);
// Asset group nodes are essentially used as placeholders for the results of an asset request

View File

@@ -0,0 +1,160 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.BuildAbortError = void 0;
exports.assertSignalNotAborted = assertSignalNotAborted;
exports.createInvalidations = createInvalidations;
exports.fromInternalSourceLocation = fromInternalSourceLocation;
exports.getBundleGroupId = getBundleGroupId;
exports.getPublicId = getPublicId;
exports.hashFromOption = hashFromOption;
exports.invalidateOnFileCreateToInternal = invalidateOnFileCreateToInternal;
exports.optionsProxy = optionsProxy;
exports.toInternalSourceLocation = toInternalSourceLocation;
exports.toInternalSymbols = toInternalSymbols;
function _assert() {
const data = _interopRequireDefault(require("assert"));
_assert = function () {
return data;
};
return data;
}
function _baseX() {
const data = _interopRequireDefault(require("base-x"));
_baseX = function () {
return data;
};
return data;
}
function _utils() {
const data = require("@parcel/utils");
_utils = function () {
return data;
};
return data;
}
var _projectPath = require("./projectPath");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const base62 = (0, _baseX().default)('0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ');
function getBundleGroupId(bundleGroup) {
return 'bundle_group:' + bundleGroup.target.name + bundleGroup.entryAssetId;
}
function assertSignalNotAborted(signal) {
if (signal && signal.aborted) {
throw new BuildAbortError();
}
}
class BuildAbortError extends Error {
name = 'BuildAbortError';
}
exports.BuildAbortError = BuildAbortError;
function getPublicId(id, alreadyExists) {
let encoded = base62.encode(Buffer.from(id, 'hex'));
for (let end = 5; end <= encoded.length; end++) {
let candidate = encoded.slice(0, end);
if (!alreadyExists(candidate)) {
return candidate;
}
}
throw new Error('Original id was not unique');
}
// These options don't affect compilation and should cause invalidations
const ignoreOptions = new Set(['env',
// handled by separate invalidateOnEnvChange
'inputFS', 'outputFS', 'workerFarm', 'packageManager', 'detailedReport', 'shouldDisableCache', 'cacheDir', 'shouldAutoInstall', 'logLevel', 'shouldProfile', 'shouldTrace', 'shouldPatchConsole', 'projectRoot', 'additionalReporters']);
function optionsProxy(options, invalidateOnOptionChange, addDevDependency) {
let packageManager = addDevDependency ? proxyPackageManager(options.projectRoot, options.packageManager, addDevDependency) : options.packageManager;
return new Proxy(options, {
get(target, prop) {
if (prop === 'packageManager') {
return packageManager;
}
if (!ignoreOptions.has(prop)) {
invalidateOnOptionChange(prop);
}
return target[prop];
}
});
}
function proxyPackageManager(projectRoot, packageManager, addDevDependency) {
let require = (id, from, opts) => {
addDevDependency({
specifier: id,
resolveFrom: (0, _projectPath.toProjectPath)(projectRoot, from),
range: opts === null || opts === void 0 ? void 0 : opts.range
});
return packageManager.require(id, from, opts);
};
return new Proxy(packageManager, {
get(target, prop) {
if (prop === 'require') {
return require;
}
// $FlowFixMe
return target[prop];
}
});
}
function hashFromOption(value) {
if (typeof value === 'object' && value != null) {
return (0, _utils().hashObject)(value);
}
return String(value);
}
function invalidateOnFileCreateToInternal(projectRoot, invalidation) {
if (invalidation.glob != null) {
return {
glob: (0, _projectPath.toProjectPath)(projectRoot, invalidation.glob)
};
} else if (invalidation.filePath != null) {
return {
filePath: (0, _projectPath.toProjectPath)(projectRoot, invalidation.filePath)
};
} else {
(0, _assert().default)(invalidation.aboveFilePath != null && invalidation.fileName != null);
return {
fileName: invalidation.fileName,
aboveFilePath: (0, _projectPath.toProjectPath)(projectRoot, invalidation.aboveFilePath)
};
}
}
function createInvalidations() {
return {
invalidateOnBuild: false,
invalidateOnStartup: false,
invalidateOnOptionChange: new Set(),
invalidateOnEnvChange: new Set(),
invalidateOnFileChange: new Set(),
invalidateOnFileCreate: []
};
}
function fromInternalSourceLocation(projectRoot, loc) {
if (!loc) return loc;
return {
filePath: (0, _projectPath.fromProjectPath)(projectRoot, loc.filePath),
start: loc.start,
end: loc.end
};
}
function toInternalSourceLocation(projectRoot, loc) {
if (!loc) return loc;
return {
filePath: (0, _projectPath.toProjectPath)(projectRoot, loc.filePath),
start: loc.start,
end: loc.end
};
}
function toInternalSymbols(projectRoot, symbols) {
if (!symbols) return symbols;
return new Map([...symbols].map(([k, {
loc,
...v
}]) => [k, {
...v,
loc: toInternalSourceLocation(projectRoot, loc)
}]));
}

View File

@@ -0,0 +1,162 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.childInit = childInit;
exports.clearConfigCache = clearConfigCache;
exports.invalidateRequireCache = invalidateRequireCache;
exports.runPackage = runPackage;
exports.runTransform = runTransform;
exports.runValidate = runValidate;
function _utils() {
const data = require("@parcel/utils");
_utils = function () {
return data;
};
return data;
}
function _assert() {
const data = _interopRequireDefault(require("assert"));
_assert = function () {
return data;
};
return data;
}
function _nullthrows() {
const data = _interopRequireDefault(require("nullthrows"));
_nullthrows = function () {
return data;
};
return data;
}
var _BundleGraph = _interopRequireDefault(require("./BundleGraph"));
var _Transformation = _interopRequireDefault(require("./Transformation"));
var _ReporterRunner = require("./ReporterRunner");
var _PackagerRunner = _interopRequireDefault(require("./PackagerRunner"));
var _Validation = _interopRequireDefault(require("./Validation"));
var _ParcelConfig = _interopRequireDefault(require("./ParcelConfig"));
var _registerCoreWithSerializer = require("./registerCoreWithSerializer");
var _buildCache = require("./buildCache");
function _sourceMap() {
const data = require("@parcel/source-map");
_sourceMap = function () {
return data;
};
return data;
}
function _rust() {
const data = require("@parcel/rust");
_rust = function () {
return data;
};
return data;
}
function _workers() {
const data = _interopRequireDefault(require("@parcel/workers"));
_workers = function () {
return data;
};
return data;
}
require("@parcel/cache");
require("@parcel/package-manager");
require("@parcel/fs");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
// register with serializer
// $FlowFixMe
if (process.env.PARCEL_BUILD_REPL && process.browser) {
/* eslint-disable import/no-extraneous-dependencies, monorepo/no-internal-import */
require('@parcel/repl/src/parcel/BrowserPackageManager.js');
// $FlowFixMe
require('@parcel/repl/src/parcel/ExtendedMemoryFS.js');
/* eslint-enable import/no-extraneous-dependencies, monorepo/no-internal-import */
}
(0, _registerCoreWithSerializer.registerCoreWithSerializer)();
// Remove the workerApi type from the TransformationOpts and ValidationOpts types:
// https://github.com/facebook/flow/issues/2835
// TODO: this should eventually be replaced by an in memory cache layer
let parcelConfigCache = new Map();
function loadOptions(ref, workerApi) {
return (0, _nullthrows().default)(workerApi.getSharedReference(ref
// $FlowFixMe
));
}
async function loadConfig(cachePath, options) {
let config = parcelConfigCache.get(cachePath);
if (config && config.options === options) {
return config;
}
let processedConfig = (0, _nullthrows().default)(await options.cache.get(cachePath));
config = new _ParcelConfig.default(processedConfig, options);
parcelConfigCache.set(cachePath, config);
return config;
}
function clearConfigCache() {
_utils().loadConfig.clear();
(0, _buildCache.clearBuildCaches)();
}
async function runTransform(workerApi, opts) {
let {
optionsRef,
configCachePath,
...rest
} = opts;
let options = loadOptions(optionsRef, workerApi);
let config = await loadConfig(configCachePath, options);
return new _Transformation.default({
workerApi,
options,
config,
...rest
}).run();
}
async function runValidate(workerApi, opts) {
let {
optionsRef,
configCachePath,
...rest
} = opts;
let options = loadOptions(optionsRef, workerApi);
let config = await loadConfig(configCachePath, options);
return new _Validation.default({
workerApi,
report: _ReporterRunner.reportWorker.bind(null, workerApi),
options,
config,
...rest
}).run();
}
async function runPackage(workerApi, {
bundle,
bundleGraphReference,
configCachePath,
optionsRef,
previousDevDeps,
invalidDevDeps,
previousInvalidations
}) {
let bundleGraph = workerApi.getSharedReference(bundleGraphReference);
(0, _assert().default)(bundleGraph instanceof _BundleGraph.default);
let options = loadOptions(optionsRef, workerApi);
let parcelConfig = await loadConfig(configCachePath, options);
let runner = new _PackagerRunner.default({
config: parcelConfig,
options,
report: _workers().default.isWorker() ? _ReporterRunner.reportWorker.bind(null, workerApi) : _ReporterRunner.report,
previousDevDeps,
previousInvalidations
});
return runner.run(bundleGraph, bundle, invalidDevDeps);
}
async function childInit() {
await _sourceMap().init;
await (_rust().init === null || _rust().init === void 0 ? void 0 : (0, _rust().init)());
}
const PKG_RE = /node_modules[/\\]((?:@[^/\\]+[/\\][^/\\]+)|[^/\\]+)(?!.*[/\\]node_modules[/\\])/;
function invalidateRequireCache() {
throw new Error('invalidateRequireCache is only for tests');
}

Some files were not shown because too many files have changed in this diff Show More