larry babby and threejs for glsl

This commit is contained in:
Sam
2024-06-24 21:24:00 +12:00
parent 87d5dc634d
commit 907ebae4c0
6474 changed files with 1279596 additions and 8 deletions

View File

@@ -0,0 +1,20 @@
const RESTRICTED_CONFIG = [
'error',
{
paths: [
{
name: '@parcel/workers',
message:
'Do not import workers inside utils. Instead, create a separate package.',
},
],
},
];
module.exports = {
extends: '@parcel/eslint-config',
rules: {
'no-restricted-imports': RESTRICTED_CONFIG,
'no-restricted-modules': RESTRICTED_CONFIG,
},
};

View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2017-present Devon Govett
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,71 @@
{
"name": "@parcel/utils",
"version": "2.12.0",
"description": "Blazing fast, zero configuration web application bundler",
"license": "MIT",
"publishConfig": {
"access": "public"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
},
"repository": {
"type": "git",
"url": "https://github.com/parcel-bundler/parcel.git"
},
"main": "lib/index.js",
"source": "src/index.js",
"engines": {
"node": ">= 12.0.0"
},
"targets": {
"main": {
"includeNodeModules": {
"@parcel/codeframe": false,
"@parcel/diagnostic": false,
"@parcel/rust": false,
"@parcel/logger": false,
"@parcel/markdown-ansi": false,
"@parcel/source-map": false,
"chalk": false
}
}
},
"dependencies": {
"@parcel/codeframe": "2.12.0",
"@parcel/diagnostic": "2.12.0",
"@parcel/logger": "2.12.0",
"@parcel/markdown-ansi": "2.12.0",
"@parcel/rust": "2.12.0",
"@parcel/source-map": "^2.1.1",
"chalk": "^4.1.0",
"nullthrows": "^1.1.1"
},
"devDependencies": {
"@iarna/toml": "^2.2.0",
"ansi-html-community": "0.0.8",
"clone": "^2.1.1",
"fast-glob": "^3.2.12",
"fastest-levenshtein": "^1.0.16",
"is-glob": "^4.0.0",
"is-url": "^1.2.2",
"json5": "^2.2.0",
"lru-cache": "^6.0.0",
"micromatch": "^4.0.4",
"node-forge": "^1.2.1",
"nullthrows": "^1.1.1",
"open": "^7.0.3",
"random-int": "^1.0.0",
"snarkdown": "^2.0.0",
"strip-ansi": "^6.0.0",
"terminal-link": "^2.1.1"
},
"browser": {
"./src/generateCertificate.js": false,
"./src/http-server.js": false,
"./src/openInBrowser.js": false,
"@parcel/markdown-ansi": false
},
"gitHead": "2059029ee91e5f03a273b0954d3e629d7375f986"
}

View File

@@ -0,0 +1,47 @@
// @flow strict-local
export class DefaultMap<K, V> extends Map<K, V> {
_getDefault: K => V;
constructor(getDefault: K => V, entries?: Iterable<[K, V]>) {
super(entries);
this._getDefault = getDefault;
}
get(key: K): V {
let ret;
if (this.has(key)) {
ret = super.get(key);
} else {
ret = this._getDefault(key);
this.set(key, ret);
}
// $FlowFixMe
return ret;
}
}
// Duplicated from DefaultMap implementation for Flow
// Roughly mirrors https://github.com/facebook/flow/blob/2eb5a78d92c167117ba9caae070afd2b9f598599/lib/core.js#L617
export class DefaultWeakMap<K: interface {}, V> extends WeakMap<K, V> {
_getDefault: K => V;
constructor(getDefault: K => V, entries?: Iterable<[K, V]>) {
super(entries);
this._getDefault = getDefault;
}
get(key: K): V {
let ret;
if (this.has(key)) {
ret = super.get(key);
} else {
ret = this._getDefault(key);
this.set(key, ret);
}
// $FlowFixMe
return ret;
}
}

View File

@@ -0,0 +1,23 @@
// @flow strict-local
import invariant from 'assert';
export type Deferred<T> = {|
resolve(T): void,
reject(mixed): void,
|};
export function makeDeferredWithPromise<T>(): {|
deferred: Deferred<T>,
promise: Promise<T>,
|} {
let deferred: ?Deferred<T>;
let promise = new Promise<T>((resolve, reject) => {
deferred = {resolve, reject};
});
// Promise constructor callback executes synchronously, so this is defined
invariant(deferred != null);
return {deferred, promise};
}

View File

@@ -0,0 +1,131 @@
// @flow strict-local
import {makeDeferredWithPromise, type Deferred} from './Deferred';
type PromiseQueueOpts = {|maxConcurrent: number|};
export default class PromiseQueue<T> {
_deferred: ?Deferred<Array<T>>;
_maxConcurrent: number;
_numRunning: number = 0;
_queue: Array<() => Promise<void>> = [];
_runPromise: ?Promise<Array<T>> = null;
_error: mixed;
_count: number = 0;
_results: Array<T> = [];
_addSubscriptions: Set<() => void> = new Set();
constructor(opts: PromiseQueueOpts = {maxConcurrent: Infinity}) {
if (opts.maxConcurrent <= 0) {
throw new TypeError('maxConcurrent must be a positive, non-zero value');
}
this._maxConcurrent = opts.maxConcurrent;
}
getNumWaiting(): number {
return this._queue.length;
}
add(fn: () => Promise<T>): Promise<T> {
return new Promise((resolve, reject) => {
let i = this._count++;
let wrapped = () =>
fn().then(
result => {
this._results[i] = result;
resolve(result);
},
err => {
reject(err);
throw err;
},
);
this._queue.push(wrapped);
for (const addFn of this._addSubscriptions) {
addFn();
}
if (this._numRunning > 0 && this._numRunning < this._maxConcurrent) {
this._next();
}
});
}
subscribeToAdd(fn: () => void): () => void {
this._addSubscriptions.add(fn);
return () => {
this._addSubscriptions.delete(fn);
};
}
run(): Promise<Array<T>> {
if (this._runPromise != null) {
return this._runPromise;
}
if (this._queue.length === 0) {
return Promise.resolve([]);
}
let {deferred, promise} = makeDeferredWithPromise();
this._deferred = deferred;
this._runPromise = promise;
while (this._queue.length && this._numRunning < this._maxConcurrent) {
this._next();
}
return promise;
}
async _next(): Promise<void> {
let fn = this._queue.shift();
await this._runFn(fn);
if (this._queue.length) {
this._next();
} else if (this._numRunning === 0) {
this._done();
}
}
async _runFn(fn: () => mixed): Promise<void> {
this._numRunning++;
try {
await fn();
} catch (e) {
// Only store the first error that occurs.
// We don't reject immediately so that any other concurrent
// requests have time to complete.
if (this._error == null) {
this._error = e;
}
} finally {
this._numRunning--;
}
}
_resetState(): void {
this._queue = [];
this._count = 0;
this._results = [];
this._runPromise = null;
this._numRunning = 0;
this._deferred = null;
}
_done(): void {
if (this._deferred != null) {
if (this._error != null) {
this._deferred.reject(this._error);
} else {
this._deferred.resolve(this._results);
}
}
this._resetState();
}
}

View File

@@ -0,0 +1,28 @@
// @flow strict-local
import {Transform} from 'stream';
/*
* "Taps" into the contents of a flowing stream, yielding chunks to the passed
* callback. Continues to pass data chunks down the stream.
*/
export default class TapStream extends Transform {
_tap: Buffer => mixed;
constructor(tap: Buffer => mixed, options: mixed) {
super({...options});
this._tap = tap;
}
_transform(
chunk: Buffer | string,
encoding: string,
callback: (err: ?Error, chunk?: Buffer | string) => mixed,
) {
try {
this._tap(Buffer.from(chunk));
callback(null, chunk);
} catch (err) {
callback(err);
}
}
}

View File

@@ -0,0 +1,145 @@
// @flow
import path from 'path';
import type {FileSystem} from '@parcel/fs';
import {fuzzySearch} from './schema';
import {relativePath} from './path';
import {resolveConfig} from './config';
export async function findAlternativeNodeModules(
fs: FileSystem,
moduleName: string,
dir: string,
): Promise<Array<string>> {
let potentialModules: Array<string> = [];
let root = path.parse(dir).root;
let isOrganisationModule = moduleName.startsWith('@');
while (dir !== root) {
// Skip node_modules directories
if (path.basename(dir) === 'node_modules') {
dir = path.dirname(dir);
}
try {
let modulesDir = path.join(dir, 'node_modules');
let stats = await fs.stat(modulesDir);
if (stats.isDirectory()) {
let dirContent = (await fs.readdir(modulesDir)).sort();
// Filter out the modules that interest us
let modules = dirContent.filter(i =>
isOrganisationModule ? i.startsWith('@') : !i.startsWith('@'),
);
// If it's an organisation module, loop through all the modules of that organisation
if (isOrganisationModule) {
await Promise.all(
modules.map(async item => {
let orgDirPath = path.join(modulesDir, item);
let orgDirContent = (await fs.readdir(orgDirPath)).sort();
// Add all org packages
potentialModules.push(...orgDirContent.map(i => `${item}/${i}`));
}),
);
} else {
potentialModules.push(...modules);
}
}
} catch (err) {
// ignore
}
// Move up a directory
dir = path.dirname(dir);
}
return fuzzySearch(potentialModules.sort(), moduleName).slice(0, 2);
}
async function findAllFilesUp({
fs,
dir,
root,
basedir,
maxlength,
collected,
leadingDotSlash = true,
includeDirectories = true,
}: {|
fs: FileSystem,
dir: string,
root: string,
basedir: string,
maxlength: number,
collected: Array<string>,
leadingDotSlash?: boolean,
includeDirectories?: boolean,
|}): Promise<mixed> {
let dirContent = (await fs.readdir(dir)).sort();
return Promise.all(
dirContent.map(async item => {
let fullPath = path.join(dir, item);
let relativeFilePath = relativePath(basedir, fullPath, leadingDotSlash);
if (relativeFilePath.length < maxlength) {
let stats = await fs.stat(fullPath);
let isDir = stats.isDirectory();
if ((isDir && includeDirectories) || stats.isFile()) {
collected.push(relativeFilePath);
}
// If it's a directory, run over each item within said directory...
if (isDir) {
return findAllFilesUp({
fs,
dir: fullPath,
root,
basedir,
maxlength,
collected,
});
}
}
}),
);
}
export async function findAlternativeFiles(
fs: FileSystem,
fileSpecifier: string,
dir: string,
projectRoot: string,
leadingDotSlash?: boolean = true,
includeDirectories?: boolean = true,
includeExtension?: boolean = false,
): Promise<Array<string>> {
let potentialFiles: Array<string> = [];
// Find our root, we won't recommend files above the package root as that's bad practise
let pkg = await resolveConfig(
fs,
path.join(dir, 'index'),
['package.json'],
projectRoot,
);
let pkgRoot = pkg ? path.dirname(pkg) : projectRoot;
await findAllFilesUp({
fs,
dir: pkgRoot,
root: pkgRoot,
basedir: dir,
maxlength: fileSpecifier.length + 10,
collected: potentialFiles,
leadingDotSlash,
includeDirectories,
});
if (path.extname(fileSpecifier) === '' && !includeExtension) {
potentialFiles = potentialFiles.map(p => {
let ext = path.extname(p);
return ext.length > 0 ? p.slice(0, -ext.length) : p;
});
}
return fuzzySearch(potentialFiles, fileSpecifier).slice(0, 2);
}

View File

@@ -0,0 +1,7 @@
// @flow strict-local
import ansiHTML from 'ansi-html-community';
import {escapeHTML} from './escape-html';
export function ansiHtml(ansi: string): string {
return ansiHTML(escapeHTML(ansi));
}

View File

@@ -0,0 +1,27 @@
// @flow strict-local
import type {Blob} from '@parcel/types';
import {Buffer} from 'buffer';
import {bufferStream} from './';
import {Readable} from 'stream';
export function blobToBuffer(blob: Blob): Promise<Buffer> {
if (blob instanceof Readable) {
return bufferStream(blob);
} else if (blob instanceof Buffer) {
return Promise.resolve(Buffer.from(blob));
} else {
return Promise.resolve(Buffer.from(blob, 'utf8'));
}
}
export async function blobToString(blob: Blob): Promise<string> {
if (blob instanceof Readable) {
return (await bufferStream(blob)).toString();
} else if (blob instanceof Buffer) {
return blob.toString();
} else {
return blob;
}
}

View File

@@ -0,0 +1,35 @@
// @flow strict-local
let bundleURL: ?string = null;
function getBundleURLCached(): string {
if (bundleURL == null) {
bundleURL = _getBundleURL();
}
return bundleURL;
}
function _getBundleURL(): string {
// Attempt to find the URL of the current script and use that as the base URL
try {
throw new Error();
} catch (err) {
let stack: string = typeof err.stack === 'string' ? err.stack : '';
let matches = stack.match(/(https?|file|ftp):\/\/[^)\n]+/g);
if (matches) {
return getBaseURL(matches[0]);
}
}
return '/';
}
export function getBaseURL(url: ?string): string {
if (url == null) {
return '/';
}
return url.replace(/^((?:https?|file|ftp):\/\/.+)\/[^/]+$/, '$1') + '/';
}
export const getBundleURL = getBundleURLCached;

View File

@@ -0,0 +1,77 @@
// @flow strict-local
export function unique<T>(array: Array<T>): Array<T> {
return [...new Set(array)];
}
export function objectSortedEntries(obj: {
+[string]: mixed,
...
}): Array<[string, mixed]> {
return Object.entries(obj).sort(([keyA], [keyB]) => keyA.localeCompare(keyB));
}
export function objectSortedEntriesDeep(object: {
+[string]: mixed,
...
}): Array<[string, mixed]> {
let sortedEntries = objectSortedEntries(object);
for (let i = 0; i < sortedEntries.length; i++) {
sortedEntries[i][1] = sortEntry(sortedEntries[i][1]);
}
return sortedEntries;
}
function sortEntry(entry: mixed) {
if (Array.isArray(entry)) {
return entry.map(sortEntry);
}
if (typeof entry === 'object' && entry != null) {
return objectSortedEntriesDeep(entry);
}
return entry;
}
export function setDifference<T>(
a: $ReadOnlySet<T>,
b: $ReadOnlySet<T>,
): Set<T> {
let difference = new Set();
for (let e of a) {
if (!b.has(e)) {
difference.add(e);
}
}
for (let d of b) {
if (!a.has(d)) {
difference.add(d);
}
}
return difference;
}
export function setIntersect<T>(a: Set<T>, b: $ReadOnlySet<T>): void {
for (let entry of a) {
if (!b.has(entry)) {
a.delete(entry);
}
}
}
export function setUnion<T>(a: Iterable<T>, b: Iterable<T>): Set<T> {
return new Set([...a, ...b]);
}
export function setEqual<T>(a: $ReadOnlySet<T>, b: $ReadOnlySet<T>): boolean {
if (a.size != b.size) {
return false;
}
for (let entry of a) {
if (!b.has(entry)) {
return false;
}
}
return true;
}

View File

@@ -0,0 +1,181 @@
// @flow
import type {ConfigResult, File, FilePath} from '@parcel/types';
import type {FileSystem} from '@parcel/fs';
import ThrowableDiagnostic from '@parcel/diagnostic';
import path from 'path';
import clone from 'clone';
import json5 from 'json5';
import {parse as toml} from '@iarna/toml';
import LRU from 'lru-cache';
export type ConfigOutput = {|
config: ConfigResult,
files: Array<File>,
|};
export type ConfigOptions = {|
parse?: boolean,
parser?: string => any,
|};
const configCache = new LRU<FilePath, ConfigOutput>({max: 500});
const resolveCache = new Map();
export function resolveConfig(
fs: FileSystem,
filepath: FilePath,
filenames: Array<FilePath>,
projectRoot: FilePath,
): Promise<?FilePath> {
// Cache the result of resolving config for this directory.
// This is automatically invalidated at the end of the current build.
let key = path.dirname(filepath) + filenames.join(',');
let cached = resolveCache.get(key);
if (cached !== undefined) {
return Promise.resolve(cached);
}
let resolved = fs.findAncestorFile(
filenames,
path.dirname(filepath),
projectRoot,
);
resolveCache.set(key, resolved);
return Promise.resolve(resolved);
}
export function resolveConfigSync(
fs: FileSystem,
filepath: FilePath,
filenames: Array<FilePath>,
projectRoot: FilePath,
): ?FilePath {
return fs.findAncestorFile(filenames, path.dirname(filepath), projectRoot);
}
export async function loadConfig(
fs: FileSystem,
filepath: FilePath,
filenames: Array<FilePath>,
projectRoot: FilePath,
opts: ?ConfigOptions,
): Promise<ConfigOutput | null> {
let parse = opts?.parse ?? true;
let configFile = await resolveConfig(fs, filepath, filenames, projectRoot);
if (configFile) {
let cachedOutput = configCache.get(String(parse) + configFile);
if (cachedOutput) {
return cachedOutput;
}
try {
let extname = path.extname(configFile).slice(1);
if (extname === 'js' || extname === 'cjs') {
let output = {
// $FlowFixMe
config: clone(module.require(configFile)),
files: [{filePath: configFile}],
};
configCache.set(configFile, output);
return output;
}
return readConfig(fs, configFile, opts);
} catch (err) {
if (err.code === 'MODULE_NOT_FOUND' || err.code === 'ENOENT') {
return null;
}
throw err;
}
}
return null;
}
loadConfig.clear = () => {
configCache.reset();
resolveCache.clear();
};
export async function readConfig(
fs: FileSystem,
configFile: FilePath,
opts: ?ConfigOptions,
): Promise<ConfigOutput | null> {
let parse = opts?.parse ?? true;
let cachedOutput = configCache.get(String(parse) + configFile);
if (cachedOutput) {
return cachedOutput;
}
try {
let configContent = await fs.readFile(configFile, 'utf8');
let config;
if (parse === false) {
config = configContent;
} else {
let extname = path.extname(configFile).slice(1);
let parse = opts?.parser ?? getParser(extname);
try {
config = parse(configContent);
} catch (e) {
if (extname !== '' && extname !== 'json') {
throw e;
}
let pos = {
line: e.lineNumber,
column: e.columnNumber,
};
throw new ThrowableDiagnostic({
diagnostic: {
message: `Failed to parse ${path.basename(configFile)}`,
origin: '@parcel/utils',
codeFrames: [
{
language: 'json5',
filePath: configFile,
code: configContent,
codeHighlights: [
{
start: pos,
end: pos,
message: e.message,
},
],
},
],
},
});
}
}
let output = {
config,
files: [{filePath: configFile}],
};
configCache.set(String(parse) + configFile, output);
return output;
} catch (err) {
if (err.code === 'MODULE_NOT_FOUND' || err.code === 'ENOENT') {
return null;
}
throw err;
}
}
function getParser(extname) {
switch (extname) {
case 'toml':
return toml;
case 'json':
default:
return json5.parse;
}
}

View File

@@ -0,0 +1,15 @@
// @flow strict-local
export default function countLines(
string: string,
startIndex: number = 0,
): number {
let lines = 1;
for (let i = startIndex; i < string.length; i++) {
if (string.charAt(i) === '\n') {
lines++;
}
}
return lines;
}

View File

@@ -0,0 +1,19 @@
// @flow strict-local
export default function debounce<TArgs: Array<mixed>>(
fn: (...args: TArgs) => mixed,
delay: number,
): (...args: TArgs) => void {
let timeout;
return function (...args: TArgs) {
if (timeout) {
clearTimeout(timeout);
}
timeout = setTimeout(() => {
timeout = null;
fn(...args);
}, delay);
};
}

View File

@@ -0,0 +1,34 @@
// @flow
export default function createDependencyLocation(
start: interface {
line: number,
column: number,
},
specifier: string,
lineOffset: number = 0,
columnOffset: number = 0,
// Imports are usually wrapped in quotes
importWrapperLength: number = 2,
): {|
end: {|column: number, line: number|},
filePath: string,
start: {|column: number, line: number|},
|} {
return {
filePath: specifier,
start: {
line: start.line + lineOffset,
column: start.column + columnOffset,
},
end: {
line: start.line + lineOffset,
column:
start.column +
specifier.length -
1 +
importWrapperLength +
columnOffset,
},
};
}

View File

@@ -0,0 +1,20 @@
// @flow
// Based on _.escape https://github.com/lodash/lodash/blob/master/escape.js
const reUnescapedHtml = /[&<>"']/g;
const reHasUnescapedHtml = RegExp(reUnescapedHtml.source);
const htmlEscapes = {
'&': '&amp;',
'<': '&lt;',
'>': '&gt;',
'"': '&quot;',
"'": '&#39;',
};
export function escapeHTML(s: string): string {
if (reHasUnescapedHtml.test(s)) {
return s.replace(reUnescapedHtml, c => htmlEscapes[c]);
}
return s;
}

View File

@@ -0,0 +1,158 @@
// @flow
import type {FilePath, PackagedBundle} from '@parcel/types';
import type {FileSystem} from '@parcel/fs';
import SourceMap from '@parcel/source-map';
import nullthrows from 'nullthrows';
import path from 'path';
import {loadSourceMapUrl} from './';
export type AssetStats = {|
filePath: string,
size: number,
originalSize: number,
time: number,
|};
export type BundleStats = {|
filePath: string,
size: number,
time: number,
assets: Array<AssetStats>,
|};
export type BuildMetrics = {|
bundles: Array<BundleStats>,
|};
async function getSourcemapSizes(
filePath: FilePath,
fs: FileSystem,
projectRoot: FilePath,
): Promise<?Map<string, number>> {
let bundleContents = await fs.readFile(filePath, 'utf-8');
let mapUrlData = await loadSourceMapUrl(fs, filePath, bundleContents);
if (!mapUrlData) {
return null;
}
let rawMap = mapUrlData.map;
let sourceMap = new SourceMap(projectRoot);
sourceMap.addVLQMap(rawMap);
let parsedMapData = sourceMap.getMap();
if (parsedMapData.mappings.length > 2) {
let sources = parsedMapData.sources.map(s =>
path.normalize(path.join(projectRoot, s)),
);
let currLine = 1;
let currColumn = 0;
let currMappingIndex = 0;
let currMapping = parsedMapData.mappings[currMappingIndex];
let nextMapping = parsedMapData.mappings[currMappingIndex + 1];
let sourceSizes = new Array(sources.length).fill(0);
let unknownOrigin: number = 0;
for (let i = 0; i < bundleContents.length; i++) {
let character = bundleContents[i];
while (
nextMapping &&
nextMapping.generated.line === currLine &&
nextMapping.generated.column <= currColumn
) {
currMappingIndex++;
currMapping = parsedMapData.mappings[currMappingIndex];
nextMapping = parsedMapData.mappings[currMappingIndex + 1];
}
let currentSource = currMapping.source;
let charSize = Buffer.byteLength(character, 'utf8');
if (
currentSource != null &&
currMapping.generated.line === currLine &&
currMapping.generated.column <= currColumn
) {
sourceSizes[currentSource] += charSize;
} else {
unknownOrigin += charSize;
}
if (character === '\n') {
currColumn = 0;
currLine++;
} else {
currColumn++;
}
}
let sizeMap = new Map();
for (let i = 0; i < sourceSizes.length; i++) {
sizeMap.set(sources[i], sourceSizes[i]);
}
sizeMap.set('', unknownOrigin);
return sizeMap;
}
}
async function createBundleStats(
bundle: PackagedBundle,
fs: FileSystem,
projectRoot: FilePath,
) {
let filePath = bundle.filePath;
let sourcemapSizes = await getSourcemapSizes(filePath, fs, projectRoot);
let assets: Map<string, AssetStats> = new Map();
bundle.traverseAssets(asset => {
let filePath = path.normalize(asset.filePath);
assets.set(filePath, {
filePath,
size: asset.stats.size,
originalSize: asset.stats.size,
time: asset.stats.time,
});
});
let assetsReport: Array<AssetStats> = [];
if (sourcemapSizes && sourcemapSizes.size) {
assetsReport = Array.from(sourcemapSizes.keys()).map((filePath: string) => {
let foundSize = sourcemapSizes.get(filePath) || 0;
let stats = assets.get(filePath) || {
filePath,
size: foundSize,
originalSize: foundSize,
time: 0,
};
return {
...stats,
size: foundSize,
};
});
} else {
assetsReport = Array.from(assets.values());
}
return {
filePath: nullthrows(bundle.filePath),
size: bundle.stats.size,
time: bundle.stats.time,
assets: assetsReport.sort((a, b) => b.size - a.size),
};
}
export default async function generateBuildMetrics(
bundles: Array<PackagedBundle>,
fs: FileSystem,
projectRoot: FilePath,
): Promise<BuildMetrics> {
bundles.sort((a, b) => b.stats.size - a.stats.size).filter(b => !!b.filePath);
return {
bundles: (
await Promise.all(bundles.map(b => createBundleStats(b, fs, projectRoot)))
).filter(e => !!e),
};
}

View File

@@ -0,0 +1,140 @@
// @flow
import type {FileSystem} from '@parcel/fs';
import forge from 'node-forge';
import path from 'path';
import logger from '@parcel/logger';
export default async function generateCertificate(
fs: FileSystem,
cacheDir: string,
host: ?string,
): Promise<{|cert: Buffer, key: Buffer|}> {
let certDirectory = cacheDir;
const privateKeyPath = path.join(certDirectory, 'private.pem');
const certPath = path.join(certDirectory, 'primary.crt');
const cachedKey =
(await fs.exists(privateKeyPath)) && (await fs.readFile(privateKeyPath));
const cachedCert =
(await fs.exists(certPath)) && (await fs.readFile(certPath));
if (cachedKey && cachedCert) {
return {
key: cachedKey,
cert: cachedCert,
};
}
logger.progress('Generating SSL Certificate...');
const pki = forge.pki;
const keys = pki.rsa.generateKeyPair(2048);
const cert = pki.createCertificate();
cert.publicKey = keys.publicKey;
cert.serialNumber = Date.now().toString();
cert.validity.notBefore = new Date();
cert.validity.notAfter = new Date();
cert.validity.notAfter.setFullYear(cert.validity.notBefore.getFullYear() + 1);
const attrs = [
{
name: 'commonName',
value: 'parceljs.org',
},
{
name: 'countryName',
value: 'US',
},
{
shortName: 'ST',
value: 'Virginia',
},
{
name: 'localityName',
value: 'Blacksburg',
},
{
name: 'organizationName',
value: 'parcelBundler',
},
{
shortName: 'OU',
value: 'Test',
},
];
let altNames = [
{
type: 2, // DNS
value: 'localhost',
},
{
type: 7, // IP
ip: '127.0.0.1',
},
];
if (host) {
altNames.push({
type: 2, // DNS
value: host,
});
}
cert.setSubject(attrs);
cert.setIssuer(attrs);
cert.setExtensions([
{
name: 'basicConstraints',
cA: false,
},
{
name: 'keyUsage',
keyCertSign: true,
digitalSignature: true,
nonRepudiation: true,
keyEncipherment: true,
dataEncipherment: true,
},
{
name: 'extKeyUsage',
serverAuth: true,
clientAuth: true,
codeSigning: true,
emailProtection: true,
timeStamping: true,
},
{
name: 'nsCertType',
client: true,
server: true,
email: true,
objsign: true,
sslCA: true,
emailCA: true,
objCA: true,
},
{
name: 'subjectAltName',
altNames,
},
{
name: 'subjectKeyIdentifier',
},
]);
cert.sign(keys.privateKey, forge.md.sha256.create());
const privPem = pki.privateKeyToPem(keys.privateKey);
const certPem = pki.certificateToPem(cert);
await fs.mkdirp(certDirectory);
await fs.writeFile(privateKeyPath, privPem);
await fs.writeFile(certPath, certPem);
return {
key: privPem,
cert: certPem,
};
}

View File

@@ -0,0 +1,17 @@
// @flow
import type {HTTPSOptions} from '@parcel/types';
import type {FileSystem} from '@parcel/fs';
export default async function getCertificate(
fs: FileSystem,
options: HTTPSOptions,
): Promise<{|cert: Buffer, key: Buffer|}> {
try {
let cert = await fs.readFile(options.cert);
let key = await fs.readFile(options.key);
return {key, cert};
} catch (err) {
throw new Error('Certificate and/or key not found');
}
}

View File

@@ -0,0 +1,20 @@
// @flow strict-local
import fs from 'fs';
/**
* Creates an object that contains both source and minified (using the source as a fallback).
* e.g. builtins.min.js and builtins.js.
*/
export default function getExisting(
minifiedPath: string,
sourcePath: string,
): {|minified: string, source: string|} {
let source = fs.readFileSync(sourcePath, 'utf8').trim();
return {
source,
minified: fs.existsSync(minifiedPath)
? fs.readFileSync(minifiedPath, 'utf8').trim().replace(/;$/, '')
: source,
};
}

View File

@@ -0,0 +1,23 @@
// @flow strict-local
import path from 'path';
import {normalizeSeparators} from './path';
/**
* Returns the package name and the optional subpath
*/
export default function getModuleParts(_name: string): [string, ?string] {
let name = path.normalize(_name);
let splitOn = name.indexOf(path.sep);
if (name.charAt(0) === '@') {
splitOn = name.indexOf(path.sep, splitOn + 1);
}
if (splitOn < 0) {
return [normalizeSeparators(name), undefined];
} else {
return [
normalizeSeparators(name.substring(0, splitOn)),
name.substring(splitOn + 1) || undefined,
];
}
}

View File

@@ -0,0 +1,47 @@
// @flow strict-local
import type {FilePath} from '@parcel/types';
import {isGlob} from './glob';
import path from 'path';
export default function getRootDir(files: Array<FilePath>): FilePath {
let cur = null;
for (let file of files) {
let parsed = path.parse(file);
parsed.dir = findGlobRoot(parsed.dir);
if (!cur) {
cur = parsed;
} else if (parsed.root !== cur.root) {
// bail out. there is no common root.
// this can happen on windows, e.g. C:\foo\bar vs. D:\foo\bar
return process.cwd();
} else {
// find the common path parts.
let curParts = cur.dir.split(path.sep);
let newParts = parsed.dir.split(path.sep);
let len = Math.min(curParts.length, newParts.length);
let i = 0;
while (i < len && curParts[i] === newParts[i]) {
i++;
}
cur.dir = i > 1 ? curParts.slice(0, i).join(path.sep) : cur.root;
}
}
return cur ? cur.dir : process.cwd();
}
// Transforms a path like `packages/*/src/index.js` to the root of the glob, `packages/`
function findGlobRoot(dir: FilePath) {
let parts = dir.split(path.sep);
let last = parts.length;
for (let i = parts.length - 1; i >= 0; i--) {
if (isGlob(parts[i])) {
last = i;
}
}
return parts.slice(0, last).join(path.sep);
}

View File

@@ -0,0 +1,111 @@
// @flow
import type {FilePath, Glob} from '@parcel/types';
import type {FileSystem} from '@parcel/fs';
import _isGlob from 'is-glob';
import fastGlob, {type FastGlobOptions} from 'fast-glob';
import micromatch, {isMatch, makeRe, type Options} from 'micromatch';
import {normalizeSeparators} from './path';
export function isGlob(p: FilePath): any {
return _isGlob(normalizeSeparators(p));
}
export function isGlobMatch(
filePath: FilePath,
glob: Glob | Array<Glob>,
opts?: Options,
): any {
glob = Array.isArray(glob)
? glob.map(normalizeSeparators)
: normalizeSeparators(glob);
return isMatch(filePath, glob, opts);
}
export function globMatch(
values: Array<string>,
glob: Glob | Array<Glob>,
opts?: Options,
): Array<string> {
glob = Array.isArray(glob)
? glob.map(normalizeSeparators)
: normalizeSeparators(glob);
return micromatch(values, glob, opts);
}
export function globToRegex(glob: Glob, opts?: Options): RegExp {
return makeRe(glob, opts);
}
export function globSync(
p: FilePath,
fs: FileSystem,
options?: FastGlobOptions<FilePath>,
): Array<FilePath> {
// $FlowFixMe
options = {
...options,
fs: {
statSync: p => {
return fs.statSync(p);
},
lstatSync: p => {
// Our FileSystem interface doesn't have lstat support at the moment,
// but this is fine for our purposes since we follow symlinks by default.
return fs.statSync(p);
},
readdirSync: (p, opts) => {
return fs.readdirSync(p, opts);
},
},
};
// $FlowFixMe
return fastGlob.sync(normalizeSeparators(p), options);
}
export function glob(
p: FilePath,
fs: FileSystem,
options: FastGlobOptions<FilePath>,
): Promise<Array<FilePath>> {
// $FlowFixMe
options = {
...options,
fs: {
stat: async (p, cb) => {
try {
cb(null, await fs.stat(p));
} catch (err) {
cb(err);
}
},
lstat: async (p, cb) => {
// Our FileSystem interface doesn't have lstat support at the moment,
// but this is fine for our purposes since we follow symlinks by default.
try {
cb(null, await fs.stat(p));
} catch (err) {
cb(err);
}
},
readdir: async (p, opts, cb) => {
if (typeof opts === 'function') {
cb = opts;
opts = null;
}
try {
cb(null, await fs.readdir(p, opts));
} catch (err) {
cb(err);
}
},
},
};
// $FlowFixMe Added in Flow 0.121.0 upgrade in #4381
return fastGlob(normalizeSeparators(p), options);
}

View File

@@ -0,0 +1,49 @@
// @flow strict-local
import type {Readable} from 'stream';
import type {FileSystem} from '@parcel/fs';
import {objectSortedEntriesDeep} from './collection';
import {hashString, Hash} from '@parcel/rust';
export function hashStream(stream: Readable): Promise<string> {
let hash = new Hash();
return new Promise((resolve, reject) => {
stream.on('error', err => {
reject(err);
});
stream
.on('data', chunk => {
hash.writeBuffer(chunk);
})
.on('end', function () {
resolve(hash.finish());
})
.on('error', err => {
reject(err);
});
});
}
export function hashObject(obj: {+[string]: mixed, ...}): string {
return hashString(JSON.stringify(objectSortedEntriesDeep(obj)));
}
let testCache: {|[string]: Promise<string>|} = {
/*:: ...null */
};
export function hashFile(fs: FileSystem, filePath: string): Promise<string> {
if (process.env.PARCEL_BUILD_ENV === 'test') {
// Development builds of these native modules are especially big and slow to hash.
if (
/parcel-swc\.[^\\/]+\.node$|lightningcss.[^\\/]+.node$/.test(filePath)
) {
let cacheEntry = testCache[filePath];
if (cacheEntry) return cacheEntry;
let v = hashStream(fs.createReadStream(filePath));
testCache[filePath] = v;
return v;
}
}
return hashStream(fs.createReadStream(filePath));
}

View File

@@ -0,0 +1,93 @@
// @flow strict-local
import type {
Server as HTTPOnlyServer,
IncomingMessage as HTTPRequest,
ServerResponse as HTTPResponse,
} from 'http';
import type {
Server as HTTPSServer,
IncomingMessage as HTTPSRequest,
ServerResponse as HTTPSResponse,
} from 'https';
import type {Socket} from 'net';
import type {FilePath, HTTPSOptions} from '@parcel/types';
import type {FileSystem} from '@parcel/fs';
import http from 'http';
import https from 'https';
import nullthrows from 'nullthrows';
import {getCertificate, generateCertificate} from './';
type CreateHTTPServerOpts = {|
listener?: (HTTPRequest | HTTPSRequest, HTTPResponse | HTTPSResponse) => void,
host?: string,
...
| {|
https: ?(HTTPSOptions | boolean),
inputFS: FileSystem,
outputFS: FileSystem,
cacheDir: FilePath,
|}
| {||},
|};
export type HTTPServer = HTTPOnlyServer | HTTPSServer;
// Creates either an http or https server with an awaitable dispose
// that closes any connections
export async function createHTTPServer(
options: CreateHTTPServerOpts,
): Promise<{|
stop: () => Promise<void>,
server: HTTPServer,
|}> {
let server;
if (!options.https) {
server = http.createServer(options.listener);
} else if (options.https === true) {
let {cert, key} = await generateCertificate(
options.outputFS,
options.cacheDir,
options.host,
);
server = https.createServer({cert, key}, options.listener);
} else {
let {cert, key} = await getCertificate(options.inputFS, options.https);
server = https.createServer({cert, key}, options.listener);
}
// HTTPServer#close only stops accepting new connections, and does not close existing ones.
// Before closing, destroy any active connections through their sockets. Additionally, remove sockets when they close:
// https://stackoverflow.com/questions/18874689/force-close-all-connections-in-a-node-js-http-server
// https://stackoverflow.com/questions/14626636/how-do-i-shutdown-a-node-js-https-server-immediately/14636625#14636625
let sockets: Set<Socket> = new Set();
server.on('connection', (socket: Socket) => {
nullthrows(sockets).add(socket);
socket.on('close', () => {
nullthrows(sockets).delete(socket);
});
});
return {
server,
stop() {
return new Promise((resolve, reject) => {
for (let socket of nullthrows(sockets)) {
socket.destroy();
}
sockets = new Set();
server.close(err => {
if (err != null) {
reject(err);
return;
}
resolve();
});
});
},
};
}

View File

@@ -0,0 +1,88 @@
// @flow strict-local
export type * from './config';
export type * from './Deferred';
export type * from './generateBuildMetrics';
export type * from './http-server';
export type * from './path';
export type * from './prettyDiagnostic';
export type * from './schema';
export {default as countLines} from './countLines';
export {default as generateBuildMetrics} from './generateBuildMetrics';
export {default as generateCertificate} from './generateCertificate';
export {default as getCertificate} from './getCertificate';
export {default as getModuleParts} from './getModuleParts';
export {default as getRootDir} from './getRootDir';
export {default as isDirectoryInside} from './isDirectoryInside';
export {default as isURL} from './is-url';
export {default as objectHash} from './objectHash';
export {default as prettifyTime} from './prettifyTime';
export {default as prettyDiagnostic} from './prettyDiagnostic';
export {default as PromiseQueue} from './PromiseQueue';
export {default as validateSchema} from './schema';
export {default as TapStream} from './TapStream';
export {default as urlJoin} from './urlJoin';
export {default as relativeUrl} from './relativeUrl';
export {default as createDependencyLocation} from './dependency-location';
export {default as debounce} from './debounce';
export {default as throttle} from './throttle';
export {default as openInBrowser} from './openInBrowser';
// Explicit re-exports instead of export * for lazy require performance
export {findAlternativeNodeModules, findAlternativeFiles} from './alternatives';
export {blobToBuffer, blobToString} from './blob';
export {
unique,
objectSortedEntries,
objectSortedEntriesDeep,
setDifference,
setEqual,
setIntersect,
setUnion,
} from './collection';
export {
resolveConfig,
resolveConfigSync,
loadConfig,
readConfig,
} from './config';
export {DefaultMap, DefaultWeakMap} from './DefaultMap';
export {makeDeferredWithPromise} from './Deferred';
export {getProgressMessage} from './progress-message.js';
export {
isGlob,
isGlobMatch,
globMatch,
globSync,
glob,
globToRegex,
} from './glob';
export {hashStream, hashObject, hashFile} from './hash';
export {SharedBuffer} from './shared-buffer';
export {fuzzySearch} from './schema';
export {createHTTPServer} from './http-server';
export {normalizePath, normalizeSeparators, relativePath} from './path';
export {
replaceURLReferences,
replaceInlineReferences,
} from './replaceBundleReferences';
export {
measureStreamLength,
readableFromStringOrBuffer,
bufferStream,
blobToStream,
streamFromPromise,
fallbackStream,
} from './stream';
export {relativeBundlePath} from './relativeBundlePath';
export {ansiHtml} from './ansi-html';
export {escapeHTML} from './escape-html';
export {
SOURCEMAP_RE,
SOURCEMAP_EXTENSIONS,
matchSourceMappingURL,
loadSourceMapUrl,
loadSourceMap,
remapSourceLocation,
} from './sourcemap';
export {default as stripAnsi} from 'strip-ansi';

View File

@@ -0,0 +1,13 @@
// @flow
import _isURL from 'is-url';
// Matches anchor (ie: #raptors)
const ANCHOR_REGEXP = /^#/;
// Matches scheme (ie: tel:, mailto:, data:, itms-apps:)
const SCHEME_REGEXP = /^[a-z][a-z0-9\-+.]*:/i;
export default function isURL(url: string): boolean {
return _isURL(url) || ANCHOR_REGEXP.test(url) || SCHEME_REGEXP.test(url);
}

View File

@@ -0,0 +1,11 @@
// @flow strict-local
import type {FilePath} from '@parcel/types';
import path from 'path';
export default function isDirectoryInside(
child: FilePath,
parent: FilePath,
): boolean {
const relative = path.relative(parent, child);
return !relative.startsWith('..') && !path.isAbsolute(relative);
}

View File

@@ -0,0 +1,20 @@
// @flow strict-local
import crypto from 'crypto';
// $FlowFixMe
type Hashable = Object;
export default function objectHash(object: Hashable): string {
let hash = crypto.createHash('md5');
for (let key of Object.keys(object).sort()) {
let val = object[key];
if (typeof val === 'object' && val) {
hash.update(key + objectHash(val));
} else {
hash.update(key + val);
}
}
return hash.digest('hex');
}

View File

@@ -0,0 +1,64 @@
// @flow
import open from 'open';
import {execSync} from 'child_process';
import logger from '@parcel/logger';
// Chrome app name is platform dependent. we should not hard code it.
// https://github.com/react-native-community/cli/blob/e2be8a905285d9b37512fc78c9755b9635ecf805/packages/cli/src/commands/server/launchDebugger.ts#L28
function getChromeAppName(): string {
switch (process.platform) {
case 'darwin':
return 'google chrome';
case 'win32':
return 'chrome';
case 'linux':
if (commandExistsUnixSync('google-chrome')) {
return 'google-chrome';
}
if (commandExistsUnixSync('chromium-browser')) {
return 'chromium-browser';
}
return 'chromium';
default:
return 'google-chrome';
}
}
function commandExistsUnixSync(commandName: string) {
try {
const stdout = execSync(
`command -v ${commandName} 2>/dev/null` +
` && { echo >&1 '${commandName} found'; exit 0; }`,
);
return !!stdout;
} catch (error) {
return false;
}
}
function getAppName(appName: string): string {
if (['google', 'chrome'].includes(appName)) {
return getChromeAppName();
} else if (['brave', 'Brave'].includes(appName)) {
return 'Brave Browser';
} else return appName;
}
export default async function openInBrowser(url: string, browser: string) {
try {
const options =
typeof browser === 'string' && browser.length > 0
? {app: [getAppName(browser)]}
: undefined;
await open(url, options);
} catch (err) {
logger.error(
`Unexpected error while opening in browser: ${browser}`,
'@parcel/utils',
);
logger.error(err, '@parcel/utils');
}
}

View File

@@ -0,0 +1,11 @@
// @flow strict
export default function parseCSSImport(url: string): string {
if (!/^(~|\.\/|\/)/.test(url)) {
return './' + url;
} else if (!/^(~\/|\.\/|\/)/.test(url)) {
return url.substring(1);
} else {
return url;
}
}

View File

@@ -0,0 +1,48 @@
// @flow strict-local
import type {FilePath} from '@parcel/types';
import path from 'path';
const ABSOLUTE_PATH_REGEX = /^([a-zA-Z]:){0,1}[\\/]+/;
const SEPARATOR_REGEX = /[\\]+/g;
export function isAbsolute(filepath: string): boolean {
return ABSOLUTE_PATH_REGEX.test(filepath);
}
export function normalizeSeparators(filePath: FilePath): FilePath {
return filePath.replace(SEPARATOR_REGEX, '/');
}
export type PathOptions = {
noLeadingDotSlash?: boolean,
...
};
export function normalizePath(
filePath: FilePath,
leadingDotSlash: boolean = true,
): FilePath {
if (
leadingDotSlash &&
(filePath[0] !== '.' ||
(filePath[1] !== '.' && filePath[1] !== '/' && filePath[1] !== '\\')) &&
!path.isAbsolute(filePath)
) {
return normalizeSeparators('./' + filePath);
} else {
return normalizeSeparators(filePath);
}
}
export function relativePath(
from: string,
to: string,
leadingDotSlash: boolean = true,
): FilePath {
// Fast path
if (to.startsWith(from + '/')) {
return (leadingDotSlash ? './' : '') + to.slice(from.length + 1);
}
return normalizePath(path.relative(from, to), leadingDotSlash);
}

View File

@@ -0,0 +1,5 @@
// @flow strict-local
export default function prettifyTime(timeInMs: number): string {
return timeInMs < 1000 ? `${timeInMs}ms` : `${(timeInMs / 1000).toFixed(2)}s`;
}

View File

@@ -0,0 +1,140 @@
// @flow strict-local
import type {Diagnostic} from '@parcel/diagnostic';
import type {PluginOptions} from '@parcel/types';
import formatCodeFrame from '@parcel/codeframe';
import _mdAnsi from '@parcel/markdown-ansi';
import _chalk from 'chalk';
import path from 'path';
// $FlowFixMe
import _terminalLink from 'terminal-link';
/* eslint-disable import/no-extraneous-dependencies */
// $FlowFixMe
import snarkdown from 'snarkdown';
/* eslint-enable import/no-extraneous-dependencies */
export type FormattedCodeFrame = {|
location: string,
code: string,
|};
export type AnsiDiagnosticResult = {|
message: string,
stack: string,
/** A formatted string containing all code frames, including their file locations. */
codeframe: string,
/** A list of code frames with highlighted code and file locations separately. */
frames: Array<FormattedCodeFrame>,
hints: Array<string>,
documentation: string,
|};
export default async function prettyDiagnostic(
diagnostic: Diagnostic,
options?: PluginOptions,
terminalWidth?: number,
format: 'ansi' | 'html' = 'ansi',
): Promise<AnsiDiagnosticResult> {
let {
origin,
message,
stack,
codeFrames,
hints,
skipFormatting,
documentationURL,
} = diagnostic;
const md = format === 'ansi' ? _mdAnsi : snarkdown;
const terminalLink =
format === 'ansi'
? _terminalLink
: // eslint-disable-next-line no-unused-vars
(text, url, _) => `<a href="${url}">${text}</a>`;
const chalk =
format === 'ansi'
? _chalk
: {
gray: {
underline: v =>
`<span style="color: grey; text-decoration: underline;">${v}</span>`,
},
};
let result = {
message:
md(`**${origin ?? 'unknown'}**: `) +
(skipFormatting ? message : md(message)),
stack: '',
codeframe: '',
frames: [],
hints: [],
documentation: '',
};
if (codeFrames != null) {
for (let codeFrame of codeFrames) {
let filePath = codeFrame.filePath;
if (filePath != null && options && !path.isAbsolute(filePath)) {
filePath = path.join(options.projectRoot, filePath);
}
let highlights = codeFrame.codeHighlights;
let code = codeFrame.code;
if (code == null && options && filePath != null) {
code = await options.inputFS.readFile(filePath, 'utf8');
}
let formattedCodeFrame = '';
if (code != null) {
formattedCodeFrame = formatCodeFrame(code, highlights, {
useColor: true,
syntaxHighlighting: true,
language:
// $FlowFixMe sketchy null checks do not matter here...
codeFrame.language ||
(filePath != null ? path.extname(filePath).substr(1) : undefined),
terminalWidth,
});
}
let location;
if (typeof filePath !== 'string') {
location = '';
} else if (highlights.length === 0) {
location = filePath;
} else {
location = `${filePath}:${highlights[0].start.line}:${highlights[0].start.column}`;
}
result.codeframe += location ? chalk.gray.underline(location) + '\n' : '';
result.codeframe += formattedCodeFrame;
if (codeFrame !== codeFrames[codeFrames.length - 1]) {
result.codeframe += '\n\n';
}
result.frames.push({
location,
code: formattedCodeFrame,
});
}
}
if (stack != null) {
result.stack = stack;
}
if (Array.isArray(hints) && hints.length) {
result.hints = hints.map(h => {
return md(h);
});
}
if (documentationURL != null) {
result.documentation = terminalLink('Learn more', documentationURL, {
fallback: (text, url) => `${text}: ${url}`,
});
}
return result;
}

View File

@@ -0,0 +1,22 @@
// @flow strict-local
import type {BuildProgressEvent} from '@parcel/types';
import path from 'path';
export function getProgressMessage(event: BuildProgressEvent): ?string {
switch (event.phase) {
case 'transforming':
return `Building ${path.basename(event.filePath)}...`;
case 'bundling':
return 'Bundling...';
case 'packaging':
return `Packaging ${event.bundle.displayName}...`;
case 'optimizing':
return `Optimizing ${event.bundle.displayName}...`;
}
return null;
}

View File

@@ -0,0 +1,16 @@
// @flow strict-local
import type {FilePath, NamedBundle} from '@parcel/types';
import path from 'path';
import {relativePath} from './path';
export function relativeBundlePath(
from: NamedBundle,
to: NamedBundle,
opts: {|leadingDotSlash: boolean|} = {leadingDotSlash: true},
): FilePath {
let fromPath = path.join(from.target.distDir, from.name);
let toPath = path.join(to.target.distDir, to.name);
return relativePath(path.dirname(fromPath), toPath, opts.leadingDotSlash);
}

View File

@@ -0,0 +1,7 @@
// @flow
import path from 'path';
import url from 'url';
export default function relativeUrl(from: string, to: string): string {
return url.format(url.parse(path.relative(from, to)));
}

View File

@@ -0,0 +1,222 @@
// @flow strict-local
import type SourceMap from '@parcel/source-map';
import type {
Async,
Blob,
Bundle,
BundleGraph,
Dependency,
NamedBundle,
} from '@parcel/types';
import {Readable} from 'stream';
import nullthrows from 'nullthrows';
import invariant from 'assert';
import URL from 'url';
import {bufferStream, relativeBundlePath, urlJoin} from './';
type ReplacementMap = Map<
string /* dependency id */,
{|from: string, to: string|},
>;
/*
* Replaces references to dependency ids for URL dependencies with:
* - in the case of an unresolvable url dependency, the original specifier.
* These are external requests that Parcel did not bundle.
* - in the case of a reference to another bundle, the relative url to that
* bundle from the current bundle.
*/
export function replaceURLReferences({
bundle,
bundleGraph,
contents,
map,
getReplacement = s => s,
relative = true,
}: {|
bundle: NamedBundle,
bundleGraph: BundleGraph<NamedBundle>,
contents: string,
relative?: boolean,
map?: ?SourceMap,
getReplacement?: string => string,
|}): {|+contents: string, +map: ?SourceMap|} {
let replacements = new Map();
let urlDependencies = [];
bundle.traverse(node => {
if (node.type === 'dependency' && node.value.specifierType === 'url') {
urlDependencies.push(node.value);
}
});
for (let dependency of urlDependencies) {
if (dependency.specifierType !== 'url') {
continue;
}
let placeholder = dependency.meta?.placeholder ?? dependency.id;
invariant(typeof placeholder === 'string');
let resolved = bundleGraph.getReferencedBundle(dependency, bundle);
if (resolved == null) {
replacements.set(placeholder, {
from: placeholder,
to: getReplacement(dependency.specifier),
});
continue;
}
if (resolved.bundleBehavior === 'inline') {
// If a bundle is inline, it should be replaced with inline contents,
// not a URL.
continue;
}
replacements.set(
placeholder,
getURLReplacement({
dependency,
fromBundle: bundle,
toBundle: resolved,
relative,
getReplacement,
}),
);
}
return performReplacement(replacements, contents, map);
}
/*
* Replaces references to dependency ids for inline bundles with the packaged
* contents of that bundle.
*/
export async function replaceInlineReferences({
bundle,
bundleGraph,
contents,
map,
getInlineReplacement,
getInlineBundleContents,
}: {|
bundle: Bundle,
bundleGraph: BundleGraph<NamedBundle>,
contents: string,
getInlineReplacement: (
Dependency,
?'string',
string,
) => {|from: string, to: string|},
getInlineBundleContents: (
Bundle,
BundleGraph<NamedBundle>,
) => Async<{|contents: Blob|}>,
map?: ?SourceMap,
|}): Promise<{|+contents: string, +map: ?SourceMap|}> {
let replacements = new Map();
let dependencies = [];
bundle.traverse(node => {
if (node.type === 'dependency') {
dependencies.push(node.value);
}
});
for (let dependency of dependencies) {
let entryBundle = bundleGraph.getReferencedBundle(dependency, bundle);
if (entryBundle?.bundleBehavior !== 'inline') {
continue;
}
let packagedBundle = await getInlineBundleContents(
entryBundle,
bundleGraph,
);
let packagedContents = (
packagedBundle.contents instanceof Readable
? await bufferStream(packagedBundle.contents)
: packagedBundle.contents
).toString();
let inlineType = nullthrows(entryBundle.getMainEntry()).meta.inlineType;
if (inlineType == null || inlineType === 'string') {
let placeholder = dependency.meta?.placeholder ?? dependency.id;
invariant(typeof placeholder === 'string');
replacements.set(
placeholder,
getInlineReplacement(dependency, inlineType, packagedContents),
);
}
}
return performReplacement(replacements, contents, map);
}
export function getURLReplacement({
dependency,
fromBundle,
toBundle,
relative,
getReplacement,
}: {|
dependency: Dependency,
fromBundle: NamedBundle,
toBundle: NamedBundle,
relative: boolean,
getReplacement?: string => string,
|}): {|from: string, to: string|} {
let to;
let orig = URL.parse(dependency.specifier);
if (relative) {
to = URL.format({
pathname: relativeBundlePath(fromBundle, toBundle, {
leadingDotSlash: false,
}),
hash: orig.hash,
});
// If the resulting path includes a colon character and doesn't start with a ./ or ../
// we need to add one so that the first part before the colon isn't parsed as a URL protocol.
if (to.includes(':') && !to.startsWith('./') && !to.startsWith('../')) {
to = './' + to;
}
} else {
to = urlJoin(
toBundle.target.publicUrl,
URL.format({
pathname: nullthrows(toBundle.name),
hash: orig.hash,
}),
);
}
let placeholder = dependency.meta?.placeholder ?? dependency.id;
invariant(typeof placeholder === 'string');
return {
from: placeholder,
to: getReplacement ? getReplacement(to) : to,
};
}
function performReplacement(
replacements: ReplacementMap,
contents: string,
map?: ?SourceMap,
): {|+contents: string, +map: ?SourceMap|} {
let finalContents = contents;
for (let {from, to} of replacements.values()) {
// Perform replacement
finalContents = finalContents.split(from).join(to);
}
return {
contents: finalContents,
// TODO: Update sourcemap with adjusted contents
map,
};
}

View File

@@ -0,0 +1,504 @@
// @flow strict-local
import ThrowableDiagnostic, {
generateJSONCodeHighlights,
escapeMarkdown,
encodeJSONKeyComponent,
} from '@parcel/diagnostic';
import type {Mapping} from '@mischnic/json-sourcemap';
import nullthrows from 'nullthrows';
import * as levenshtein from 'fastest-levenshtein';
export type SchemaEntity =
| SchemaObject
| SchemaArray
| SchemaBoolean
| SchemaString
| SchemaNumber
| SchemaEnum
| SchemaOneOf
| SchemaAllOf
| SchemaNot
| SchemaAny;
export type SchemaArray = {|
type: 'array',
items?: SchemaEntity,
__type?: string,
|};
export type SchemaBoolean = {|
type: 'boolean',
__type?: string,
|};
export type SchemaOneOf = {|
oneOf: Array<SchemaEntity>,
|};
export type SchemaAllOf = {|
allOf: Array<SchemaEntity>,
|};
export type SchemaNot = {|
not: SchemaEntity,
__message: string,
|};
export type SchemaString = {|
type: 'string',
enum?: Array<string>,
__validate?: (val: string) => ?string,
__type?: string,
|};
export type SchemaNumber = {|
type: 'number',
enum?: Array<number>,
__type?: string,
|};
export type SchemaEnum = {|
enum: Array<mixed>,
|};
export type SchemaObject = {|
type: 'object',
properties: {[string]: SchemaEntity, ...},
additionalProperties?: boolean | SchemaEntity,
required?: Array<string>,
__forbiddenProperties?: Array<string>,
__type?: string,
|};
export type SchemaAny = {||};
export type SchemaError =
| {|
type: 'type',
expectedTypes: Array<string>,
dataType: ?'key' | 'value',
dataPath: string,
ancestors: Array<SchemaEntity>,
prettyType?: string,
|}
| {|
type: 'enum',
expectedValues: Array<mixed>,
dataType: 'key' | 'value',
actualValue: mixed,
dataPath: string,
ancestors: Array<SchemaEntity>,
prettyType?: string,
|}
| {|
type: 'forbidden-prop',
prop: string,
expectedProps: Array<string>,
actualProps: Array<string>,
dataType: 'key',
dataPath: string,
ancestors: Array<SchemaEntity>,
prettyType?: string,
|}
| {|
type: 'missing-prop',
prop: string,
expectedProps: Array<string>,
actualProps: Array<string>,
dataType: 'key' | 'value',
dataPath: string,
ancestors: Array<SchemaEntity>,
prettyType?: string,
|}
| {|
type: 'other',
actualValue: mixed,
dataType: ?'key' | 'value',
message?: string,
dataPath: string,
ancestors: Array<SchemaEntity>,
|};
function validateSchema(schema: SchemaEntity, data: mixed): Array<SchemaError> {
function walk(
schemaAncestors,
dataNode,
dataPath,
): ?SchemaError | Array<SchemaError> {
let [schemaNode] = schemaAncestors;
if (schemaNode.type) {
let type = Array.isArray(dataNode) ? 'array' : typeof dataNode;
if (schemaNode.type !== type) {
return {
type: 'type',
dataType: 'value',
dataPath,
expectedTypes: [schemaNode.type],
ancestors: schemaAncestors,
prettyType: schemaNode.__type,
};
} else {
switch (schemaNode.type) {
case 'array': {
if (schemaNode.items) {
let results: Array<SchemaError | Array<SchemaError>> = [];
// $FlowFixMe type was already checked
for (let i = 0; i < dataNode.length; i++) {
let result = walk(
[schemaNode.items].concat(schemaAncestors),
// $FlowFixMe type was already checked
dataNode[i],
dataPath + '/' + i,
);
if (result) results.push(result);
}
if (results.length)
return results.reduce((acc, v) => acc.concat(v), []);
}
break;
}
case 'string': {
// $FlowFixMe type was already checked
let value: string = dataNode;
if (schemaNode.enum) {
if (!schemaNode.enum.includes(value)) {
return {
type: 'enum',
dataType: 'value',
dataPath,
expectedValues: schemaNode.enum,
actualValue: value,
ancestors: schemaAncestors,
};
}
} else if (schemaNode.__validate) {
let validationError = schemaNode.__validate(value);
if (typeof validationError == 'string') {
return {
type: 'other',
dataType: 'value',
dataPath,
message: validationError,
actualValue: value,
ancestors: schemaAncestors,
};
}
}
break;
}
case 'number': {
// $FlowFixMe type was already checked
let value: number = dataNode;
if (schemaNode.enum) {
if (!schemaNode.enum.includes(value)) {
return {
type: 'enum',
dataType: 'value',
dataPath,
expectedValues: schemaNode.enum,
actualValue: value,
ancestors: schemaAncestors,
};
}
}
break;
}
case 'object': {
let results: Array<Array<SchemaError> | SchemaError> = [];
let invalidProps;
if (schemaNode.__forbiddenProperties) {
// $FlowFixMe type was already checked
let keys = Object.keys(dataNode);
invalidProps = schemaNode.__forbiddenProperties.filter(val =>
keys.includes(val),
);
results.push(
...invalidProps.map(
k =>
({
type: 'forbidden-prop',
dataPath: dataPath + '/' + encodeJSONKeyComponent(k),
dataType: 'key',
prop: k,
expectedProps: Object.keys(schemaNode.properties),
actualProps: keys,
ancestors: schemaAncestors,
}: SchemaError),
),
);
}
if (schemaNode.required) {
// $FlowFixMe type was already checked
let keys = Object.keys(dataNode);
let missingKeys = schemaNode.required.filter(
val => !keys.includes(val),
);
results.push(
...missingKeys.map(
k =>
({
type: 'missing-prop',
dataPath,
dataType: 'value',
prop: k,
expectedProps: schemaNode.required,
actualProps: keys,
ancestors: schemaAncestors,
}: SchemaError),
),
);
}
if (schemaNode.properties) {
let {additionalProperties = true} = schemaNode;
// $FlowFixMe type was already checked
for (let k in dataNode) {
if (invalidProps && invalidProps.includes(k)) {
// Don't check type on forbidden props
continue;
} else if (k in schemaNode.properties) {
let result = walk(
[schemaNode.properties[k]].concat(schemaAncestors),
// $FlowFixMe type was already checked
dataNode[k],
dataPath + '/' + encodeJSONKeyComponent(k),
);
if (result) results.push(result);
} else {
if (typeof additionalProperties === 'boolean') {
if (!additionalProperties) {
results.push({
type: 'enum',
dataType: 'key',
dataPath: dataPath + '/' + encodeJSONKeyComponent(k),
expectedValues: Object.keys(
schemaNode.properties,
).filter(
// $FlowFixMe type was already checked
p => !(p in dataNode),
),
actualValue: k,
ancestors: schemaAncestors,
prettyType: schemaNode.__type,
});
}
} else {
let result = walk(
[additionalProperties].concat(schemaAncestors),
// $FlowFixMe type was already checked
dataNode[k],
dataPath + '/' + encodeJSONKeyComponent(k),
);
if (result) results.push(result);
}
}
}
}
if (results.length)
return results.reduce((acc, v) => acc.concat(v), []);
break;
}
case 'boolean':
// NOOP, type was checked already
break;
default:
throw new Error(`Unimplemented schema type ${type}?`);
}
}
} else {
if (schemaNode.enum && !schemaNode.enum.includes(dataNode)) {
return {
type: 'enum',
dataType: 'value',
dataPath: dataPath,
expectedValues: schemaNode.enum,
actualValue: schemaNode,
ancestors: schemaAncestors,
};
}
if (schemaNode.oneOf || schemaNode.allOf) {
let list = schemaNode.oneOf || schemaNode.allOf;
let results: Array<SchemaError | Array<SchemaError>> = [];
for (let f of list) {
let result = walk([f].concat(schemaAncestors), dataNode, dataPath);
if (result) results.push(result);
}
if (
schemaNode.oneOf
? results.length == schemaNode.oneOf.length
: results.length > 0
) {
// return the result with more values / longer key
results.sort((a, b) =>
Array.isArray(a) || Array.isArray(b)
? Array.isArray(a) && !Array.isArray(b)
? -1
: !Array.isArray(a) && Array.isArray(b)
? 1
: Array.isArray(a) && Array.isArray(b)
? b.length - a.length
: 0
: b.dataPath.length - a.dataPath.length,
);
return results[0];
}
} else if (schemaNode.not) {
let result = walk(
[schemaNode.not].concat(schemaAncestors),
dataNode,
dataPath,
);
if (!result || result.length == 0) {
return {
type: 'other',
dataPath,
dataType: null,
message: schemaNode.__message,
actualValue: dataNode,
ancestors: schemaAncestors,
};
}
}
}
return undefined;
}
let result = walk([schema], data, '');
return Array.isArray(result) ? result : result ? [result] : [];
}
export default validateSchema;
export function fuzzySearch(
expectedValues: Array<string>,
actualValue: string,
): Array<string> {
let result = expectedValues
.map(exp => [exp, levenshtein.distance(exp, actualValue)])
.filter(
// Remove if more than half of the string would need to be changed
([, d]) => d * 2 < actualValue.length,
);
result.sort(([, a], [, b]) => a - b);
return result.map(([v]) => v);
}
validateSchema.diagnostic = function (
schema: SchemaEntity,
data: {|
...
| {|
source?: ?string,
data?: mixed,
|}
| {|
source: string,
map: {|
data: mixed,
pointers: {|[key: string]: Mapping|},
|},
|},
filePath?: ?string,
prependKey?: ?string,
|},
origin: string,
message: string,
): void {
if (
'source' in data &&
'data' in data &&
typeof data.source !== 'string' &&
!data
) {
throw new Error(
'At least one of data.source and data.data must be defined!',
);
}
let object = data.map
? data.map.data
: // $FlowFixMe we can assume it's a JSON object
data.data ?? JSON.parse(data.source);
let errors = validateSchema(schema, object);
if (errors.length) {
let keys = errors.map(e => {
let message;
if (e.type === 'enum') {
let {actualValue} = e;
let expectedValues = e.expectedValues.map(String);
let likely =
actualValue != null
? fuzzySearch(expectedValues, String(actualValue))
: [];
if (likely.length > 0) {
message = `Did you mean ${likely
.map(v => JSON.stringify(v))
.join(', ')}?`;
} else if (expectedValues.length > 0) {
message = `Possible values: ${expectedValues
.map(v => JSON.stringify(v))
.join(', ')}`;
} else {
message = 'Unexpected value';
}
} else if (e.type === 'forbidden-prop') {
let {prop, expectedProps, actualProps} = e;
let likely = fuzzySearch(expectedProps, prop).filter(
v => !actualProps.includes(v),
);
if (likely.length > 0) {
message = `Did you mean ${likely
.map(v => JSON.stringify(v))
.join(', ')}?`;
} else {
message = 'Unexpected property';
}
} else if (e.type === 'missing-prop') {
let {prop, actualProps} = e;
let likely = fuzzySearch(actualProps, prop);
if (likely.length > 0) {
message = `Did you mean ${JSON.stringify(prop)}?`;
e.dataPath += '/' + likely[0];
e.dataType = 'key';
} else {
message = `Missing property ${prop}`;
}
} else if (e.type === 'type') {
if (e.prettyType != null) {
message = `Expected ${e.prettyType}`;
} else {
message = `Expected type ${e.expectedTypes.join(', ')}`;
}
} else {
message = e.message;
}
return {key: e.dataPath, type: e.dataType, message};
});
let map, code;
if (data.map) {
map = data.map;
code = data.source;
} else {
// $FlowFixMe we can assume that data is valid JSON
map = data.source ?? JSON.stringify(nullthrows(data.data), 0, '\t');
code = map;
}
let codeFrames = [
{
filePath: data.filePath ?? undefined,
language: 'json',
code,
codeHighlights: generateJSONCodeHighlights(
map,
keys.map(({key, type, message}) => ({
key: (data.prependKey ?? '') + key,
type: type,
message: message != null ? escapeMarkdown(message) : message,
})),
),
},
];
throw new ThrowableDiagnostic({
diagnostic: {
message: message,
origin,
codeFrames,
},
});
}
};

View File

@@ -0,0 +1,23 @@
// @flow
export let SharedBuffer: Class<ArrayBuffer> | Class<SharedArrayBuffer>;
// $FlowFixMe[prop-missing]
if (process.browser) {
SharedBuffer = ArrayBuffer;
// Safari has removed the constructor
if (typeof SharedArrayBuffer !== 'undefined') {
let channel = new MessageChannel();
try {
// Firefox might throw when sending the Buffer over a MessagePort
channel.port1.postMessage(new SharedArrayBuffer(0));
SharedBuffer = SharedArrayBuffer;
} catch (_) {
// NOOP
}
channel.port1.close();
channel.port2.close();
}
} else {
SharedBuffer = SharedArrayBuffer;
}

View File

@@ -0,0 +1,138 @@
// @flow
import type {SourceLocation} from '@parcel/types';
import type {FileSystem} from '@parcel/fs';
import SourceMap from '@parcel/source-map';
import path from 'path';
import {normalizeSeparators, isAbsolute} from './path';
export const SOURCEMAP_RE: RegExp =
/(?:\/\*|\/\/)\s*[@#]\s*sourceMappingURL\s*=\s*([^\s*]+)(?:\s*\*\/)?\s*$/;
const DATA_URL_RE = /^data:[^;]+(?:;charset=[^;]+)?;base64,(.*)/;
export const SOURCEMAP_EXTENSIONS: Set<string> = new Set<string>([
'css',
'es',
'es6',
'js',
'jsx',
'mjs',
'ts',
'tsx',
]);
export function matchSourceMappingURL(
contents: string,
): null | RegExp$matchResult {
return contents.match(SOURCEMAP_RE);
}
export async function loadSourceMapUrl(
fs: FileSystem,
filename: string,
contents: string,
): Promise<?{|filename: string, map: any, url: string|}> {
let match = matchSourceMappingURL(contents);
if (match) {
let url = match[1].trim();
let dataURLMatch = url.match(DATA_URL_RE);
let mapFilePath;
if (dataURLMatch) {
mapFilePath = filename;
} else {
mapFilePath = url.replace(/^file:\/\//, '');
mapFilePath = isAbsolute(mapFilePath)
? mapFilePath
: path.join(path.dirname(filename), mapFilePath);
}
return {
url,
filename: mapFilePath,
map: JSON.parse(
dataURLMatch
? Buffer.from(dataURLMatch[1], 'base64').toString()
: await fs.readFile(mapFilePath, 'utf8'),
),
};
}
}
export async function loadSourceMap(
filename: string,
contents: string,
options: {fs: FileSystem, projectRoot: string, ...},
): Promise<?SourceMap> {
let foundMap = await loadSourceMapUrl(options.fs, filename, contents);
if (foundMap) {
let mapSourceRoot = path.dirname(filename);
if (
foundMap.map.sourceRoot &&
!normalizeSeparators(foundMap.map.sourceRoot).startsWith('/')
) {
mapSourceRoot = path.join(mapSourceRoot, foundMap.map.sourceRoot);
}
let sourcemapInstance = new SourceMap(options.projectRoot);
sourcemapInstance.addVLQMap({
...foundMap.map,
sources: foundMap.map.sources.map(s => {
return path.join(mapSourceRoot, s);
}),
});
return sourcemapInstance;
}
}
export function remapSourceLocation(
loc: SourceLocation,
originalMap: SourceMap,
): SourceLocation {
let {
filePath,
start: {line: startLine, column: startCol},
end: {line: endLine, column: endCol},
} = loc;
let lineDiff = endLine - startLine;
let colDiff = endCol - startCol;
let start = originalMap.findClosestMapping(startLine, startCol - 1);
let end = originalMap.findClosestMapping(endLine, endCol - 1);
if (start?.original) {
if (start.source) {
filePath = start.source;
}
({line: startLine, column: startCol} = start.original);
startCol++; // source map columns are 0-based
}
if (end?.original) {
({line: endLine, column: endCol} = end.original);
endCol++; // source map columns are 0-based
if (endLine < startLine) {
endLine = startLine;
endCol = startCol;
} else if (endLine === startLine && endCol < startCol && lineDiff === 0) {
endCol = startCol + colDiff;
} else if (endLine === startLine && startCol === endCol && lineDiff === 0) {
// Prevent 0-length ranges
endCol = startCol + 1;
}
} else {
endLine = startLine;
endCol = startCol;
}
return {
filePath,
start: {
line: startLine,
column: startCol,
},
end: {
line: endLine,
column: endCol,
},
};
}

View File

@@ -0,0 +1,74 @@
// @flow strict-local
import {Readable, PassThrough} from 'stream';
import type {Blob} from '@parcel/types';
export function measureStreamLength(stream: Readable): Promise<number> {
return new Promise((resolve, reject) => {
let length = 0;
stream.on('data', chunk => {
length += chunk;
});
stream.on('end', () => resolve(length));
stream.on('error', reject);
});
}
export function readableFromStringOrBuffer(str: string | Buffer): Readable {
// https://stackoverflow.com/questions/12755997/how-to-create-streams-from-string-in-node-js
const stream = new Readable();
stream.push(str);
stream.push(null);
return stream;
}
export function bufferStream(stream: Readable): Promise<Buffer> {
return new Promise((resolve, reject) => {
let buf = Buffer.from([]);
stream.on('data', data => {
buf = Buffer.concat([buf, data]);
});
stream.on('end', () => {
resolve(buf);
});
stream.on('error', reject);
});
}
export function blobToStream(blob: Blob): Readable {
if (blob instanceof Readable) {
return blob;
}
return readableFromStringOrBuffer(blob);
}
export function streamFromPromise(promise: Promise<Blob>): Readable {
const stream = new PassThrough();
promise.then(blob => {
if (blob instanceof Readable) {
blob.pipe(stream);
} else {
stream.end(blob);
}
});
return stream;
}
export function fallbackStream(
stream: Readable,
fallback: () => Readable,
): Readable {
const res = new PassThrough();
stream.on('error', err => {
if (err.code === 'ENOENT') {
fallback().pipe(res);
} else {
res.emit('error', err);
}
});
stream.pipe(res);
return res;
}

View File

@@ -0,0 +1,15 @@
// @flow strict-local
export default function throttle<TArgs: Iterable<mixed>>(
fn: (...args: TArgs) => mixed,
delay: number,
): (...args: TArgs) => void {
let lastCalled: ?number;
return function throttled(...args: TArgs) {
if (lastCalled == null || lastCalled + delay <= Date.now()) {
fn.call(this, ...args);
lastCalled = Date.now();
}
};
}

View File

@@ -0,0 +1,19 @@
// @flow strict-local
import URL from 'url';
import path from 'path';
/**
* Joins a path onto a URL, and normalizes Windows paths
* e.g. from \path\to\res.js to /path/to/res.js.
*/
export default function urlJoin(publicURL: string, assetPath: string): string {
const url = URL.parse(publicURL, false, true);
// Leading / ensures that paths with colons are not parsed as a protocol.
let p = assetPath.startsWith('/') ? assetPath : '/' + assetPath;
const assetUrl = URL.parse(p);
url.pathname = path.posix.join(url.pathname, assetUrl.pathname);
url.search = assetUrl.search;
url.hash = assetUrl.hash;
return URL.format(url);
}

View File

@@ -0,0 +1,41 @@
// @flow strict-local
import assert from 'assert';
import {DefaultMap} from '../src/DefaultMap';
describe('DefaultMap', () => {
it('constructs with entries just like Map', () => {
let map = new DefaultMap(
k => k,
[
[1, 3],
[2, 27],
],
);
assert.equal(map.get(1), 3);
assert.deepEqual(Array.from(map.entries()), [
[1, 3],
[2, 27],
]);
});
it("returns a default value based on a key if it doesn't exist", () => {
let map = new DefaultMap(k => k);
assert.equal(map.get(3), 3);
});
it("sets a default value based on a key if it doesn't exist", () => {
let map = new DefaultMap(k => k);
map.get(3);
assert.deepEqual(Array.from(map.entries()), [[3, 3]]);
});
it('respects undefined/null if it already existed in the map', () => {
let map = new DefaultMap<number, number | void | null>(k => k);
map.set(3, undefined);
assert.equal(map.get(3), undefined);
map.set(4, null);
assert.equal(map.get(4), null);
});
});

View File

@@ -0,0 +1,103 @@
// @flow
import assert from 'assert';
import randomInt from 'random-int';
import PromiseQueue from '../src/PromiseQueue';
import sinon from 'sinon';
describe('PromiseQueue', () => {
it('run() should resolve when all async functions in queue have completed', async () => {
let queue = new PromiseQueue();
let someBooleanToBeChanged = false;
queue.add(() =>
Promise.resolve().then(() => {
someBooleanToBeChanged = true;
}),
);
await queue.run();
assert(someBooleanToBeChanged);
});
it('run() should reject if any of the async functions in the queue failed', async () => {
let error = new Error('some failure');
try {
let queue = new PromiseQueue();
queue
.add(() => Promise.reject(error))
.catch(
/* catch this to prevent an unhandled promise rejection*/ () => {},
);
await queue.run();
} catch (e) {
assert.equal(e, error);
}
});
it('.run() should instantly resolve when the queue is empty', async () => {
let queue = new PromiseQueue();
await queue.run();
// no need to assert, test will hang or throw an error if condition fails
});
it(".add() should resolve with the same result when the passed in function's promise resolves", async () => {
let queue = new PromiseQueue();
let promise = queue.add(() => Promise.resolve(42));
await queue.run();
let result = await promise;
assert.equal(result, 42);
});
it(".add() should reject with the same error when the passed in function's promise rejects", async () => {
let queue = new PromiseQueue();
let error = new Error('Oh no!');
let promise = queue.add(() => Promise.reject(error));
await queue.run().catch(() => null);
await promise.then(null, e => assert.equal(e, error));
});
it('constructor() should allow for configuration of max concurrent running functions', async () => {
const maxConcurrent = 5;
const queue = new PromiseQueue({maxConcurrent});
let running = 0;
new Array(100).fill(0).map(() =>
queue.add(async () => {
running++;
assert(queue._numRunning === running);
assert(running <= maxConcurrent);
await Promise.resolve(randomInt(1, 10)); //sleep(randomInt(1, 10));
running--;
}),
);
await queue.run();
});
it('.add() should notify subscribers', async () => {
const queue = new PromiseQueue();
const subscribedFn = sinon.spy();
queue.subscribeToAdd(subscribedFn);
const promise = queue.add(() => Promise.resolve());
await queue.run();
await promise;
assert(subscribedFn.called);
});
it('.subscribeToAdd() should allow unsubscribing', async () => {
const queue = new PromiseQueue();
const subscribedFn = sinon.spy();
const unsubscribe = queue.subscribeToAdd(subscribedFn);
unsubscribe();
const promise = queue.add(() => Promise.resolve());
await queue.run();
await promise;
assert(!subscribedFn.called);
});
});

View File

@@ -0,0 +1,52 @@
// @flow
import assert from 'assert';
import {
objectSortedEntries,
objectSortedEntriesDeep,
setDifference,
} from '../src/collection';
describe('objectSortedEntries', () => {
it('returns a sorted list of key/value tuples', () => {
assert.deepEqual(
objectSortedEntries({foo: 'foo', baz: 'baz', bar: 'bar'}),
[
['bar', 'bar'],
['baz', 'baz'],
['foo', 'foo'],
],
);
});
});
describe('objectSortedEntriesDeep', () => {
it('returns a deeply sorted list of key/value tuples', () => {
assert.deepEqual(
objectSortedEntriesDeep({
foo: 'foo',
baz: ['d', 'c'],
bar: {g: 'g', b: 'b'},
}),
[
[
'bar',
[
['b', 'b'],
['g', 'g'],
],
],
['baz', ['d', 'c']],
['foo', 'foo'],
],
);
});
});
describe('setDifference', () => {
it('returns a setDifference of two sets of T type', () => {
assert.deepEqual(
setDifference(new Set([1, 2, 3]), new Set([3, 4, 5])),
new Set([1, 2, 4, 5]),
);
});
});

View File

@@ -0,0 +1,98 @@
// @flow strict-local
import assert from 'assert';
import {loadConfig} from '../src/config';
import {inputFS as fs} from '@parcel/test-utils';
import path from 'path';
describe('loadConfig', () => {
it('load config with json', async () => {
assert.deepEqual(
(
await loadConfig(
fs,
path.join(__dirname, './input/config/config.json'),
['config.json'],
path.join(__dirname, './input/config/'),
)
)?.config,
{
hoge: 'fuga',
},
);
});
it('should throw error with empty string json', async () => {
// $FlowFixMe[prop-missing]
await assert.rejects(async () => {
await loadConfig(
fs,
path.join(__dirname, './input/config/empty.json'),
['empty.json'],
path.join(__dirname, './input/config/'),
);
});
});
it('should load with empty string config toml', async () => {
assert.deepEqual(
(
await loadConfig(
fs,
path.join(__dirname, './input/config/empty.toml'),
['empty.toml'],
path.join(__dirname, './input/config/'),
)
)?.config,
{},
);
});
it('should load with js', async () => {
assert.deepEqual(
(
await loadConfig(
fs,
path.join(__dirname, './input/config/config.js'),
['config.js'],
path.join(__dirname, './input/config/'),
)
)?.config,
{
hoge: 'fuga',
},
);
});
it('should load with cjs', async () => {
assert.deepEqual(
(
await loadConfig(
fs,
path.join(__dirname, './input/config/config.cjs'),
['config.cjs'],
path.join(__dirname, './input/config/'),
)
)?.config,
{
hoge: 'fuga',
},
);
});
it('should load without an extension as json', async () => {
assert.deepEqual(
(
await loadConfig(
fs,
path.join(__dirname, './input/config/.testrc'),
['.testrc'],
path.join(__dirname, './input/config/'),
)
)?.config,
{
hoge: 'fuga',
},
);
});
});

View File

@@ -0,0 +1,3 @@
{
"hoge": "fuga"
}

View File

@@ -0,0 +1,3 @@
module.exports = {
hoge: 'fuga',
};

View File

@@ -0,0 +1,3 @@
module.exports = {
hoge: 'fuga',
};

View File

@@ -0,0 +1,3 @@
{
"hoge": "fuga"
}

View File

@@ -0,0 +1 @@
//@ sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiIiwic291cmNlcyI6WyJmb28uanMiLCJiYXIuanMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6Ijs7Ozs7Ozs7O1VBQ0c7Ozs7Ozs7Ozs7Ozs7O3NCQ0RIO3NCQUNBIn0=

View File

@@ -0,0 +1,6 @@
if ((ref$ = options.map) === 'linked' || ref$ === 'debug') {
mapPath = path.basename(outputFilename) + ".map";
result.code += "\n//# sourceMappingURL=" + mapPath + "\n";
} else {
result.code += "\n//# sourceMappingURL=data:application/json;base64," + bufferFrom(result.map.toString()).toString('base64') + "\n";
}

View File

@@ -0,0 +1,2 @@
function hello(){var l="Hello",o="world";console.log(l+" "+o+"!")}hello();
//# sourceMappingURL=file://referenced-min.js.map

View File

@@ -0,0 +1,6 @@
{
"version":3,
"sources":["./referenced.js"],
"names":["hello","l","o","console","log"],
"mappings":"AAAA,SAASA,QACP,IAAIC,EAAI,QACNC,EAAI,QACNC,QAAQC,IAAIH,EAAI,IAAMC,EAAI,KAE5BF"
}

View File

@@ -0,0 +1,2 @@
function hello(){var l="Hello",o="world";console.log(l+" "+o+"!")}hello();
//# sourceMappingURL=source-root.js.map

View File

@@ -0,0 +1,7 @@
{
"version":3,
"sourceRoot": "../",
"sources":["./source.js"],
"names":["hello","l","o","console","log"],
"mappings":"AAAA,SAASA,QACP,IAAIC,EAAI,QACNC,EAAI,QACNC,QAAQC,IAAIH,EAAI,IAAMC,EAAI,KAE5BF"
}

View File

@@ -0,0 +1,33 @@
// @flow
import assert from 'assert';
import objectHash from '../src/objectHash';
describe('objectHash', () => {
it('calculates the same hash for two different but deep equal objects', () => {
const obj1 = {
foo: {foo: 'foo', baz: ['foo', 'baz', 'bar'], bar: 'bar'},
baz: 'baz',
bar: 'bar',
};
const obj2 = {
foo: {foo: 'foo', baz: ['foo', 'baz', 'bar'], bar: 'bar'},
baz: 'baz',
bar: 'bar',
};
assert.equal(objectHash(obj1), objectHash(obj2));
});
it('calculates a unique hash for two deep equal objects', () => {
const obj1 = {
baz: 'baz',
bar: 'ba',
};
const obj2 = {
baz: 'baz',
bar: 'bar',
};
assert.notEqual(objectHash(obj1), objectHash(obj2));
});
});

View File

@@ -0,0 +1,17 @@
// @flow
import assert from 'assert';
import prettifyTime from '../src/prettifyTime';
describe('prettifyTime', () => {
it('should format numbers less than 1000 as ms', () => {
assert.equal(prettifyTime(888), '888ms');
assert.equal(prettifyTime(50), '50ms');
assert.equal(prettifyTime(0), '0ms');
});
it('should format numbers greater than 1000 as s with 2 fractional digits', () => {
assert.equal(prettifyTime(4000), '4.00s');
assert.equal(prettifyTime(90000), '90.00s');
assert.equal(prettifyTime(45678), '45.68s');
});
});

View File

@@ -0,0 +1,268 @@
// @flow strict-local
import type {NamedBundle, Dependency} from '@parcel/types';
import assert from 'assert';
import {getURLReplacement} from '../src/replaceBundleReferences';
describe('replace bundle references', () => {
it('Query params and named pipeline, relative', () => {
// $FlowFixMe
let fromBundle: NamedBundle = {
filePath: '/user/dist/reformat.html',
name: 'reformat.html',
// $FlowFixMe
target: {
distDir: '/user/dist',
publicUrl: '/',
},
};
// $FlowFixMe
let toBundle: NamedBundle = {
filePath:
'/user/dist/image.HASH_REF_87f9d66c16c2216ccc7e5664cf089305.webp',
name: 'image.HASH_REF_87f9d66c16c2216ccc7e5664cf089305.webp',
// $FlowFixMe
target: {
distDir: '/user/dist',
publicUrl: '/',
},
};
// $FlowFixMe
let dependency: Dependency = {
id: '074b36596e3147e900a8ad17ceb5c90b',
specifier: 'url:./image.jpg?as=webp',
specifierType: 'esm',
};
let result = getURLReplacement({
dependency,
fromBundle,
toBundle,
relative: true,
});
assert.equal(
result.to,
'image.HASH_REF_87f9d66c16c2216ccc7e5664cf089305.webp',
);
assert.equal(result.from, '074b36596e3147e900a8ad17ceb5c90b');
});
it('Query params and named pipeline, absolute', () => {
// $FlowFixMe
let fromBundle: NamedBundle = {
filePath: '/user/dist/reformat.html',
name: 'reformat.html',
// $FlowFixMe
target: {
distDir: '/user/dist',
publicUrl: '/',
},
};
// $FlowFixMe
let toBundle: NamedBundle = {
filePath:
'/user/dist/image.HASH_REF_87f9d66c16c2216ccc7e5664cf089305.webp',
name: 'image.HASH_REF_87f9d66c16c2216ccc7e5664cf089305.webp',
// $FlowFixMe
target: {
distDir: '/user/dist',
publicUrl: '/',
},
};
// $FlowFixMe
let dependency: Dependency = {
id: '074b36596e3147e900a8ad17ceb5c90b',
specifier: 'url:./image.jpg?as=webp',
specifierType: 'esm',
};
let result = getURLReplacement({
dependency,
fromBundle,
toBundle,
relative: false,
});
assert.equal(
result.to,
'/image.HASH_REF_87f9d66c16c2216ccc7e5664cf089305.webp',
);
assert.equal(result.from, '074b36596e3147e900a8ad17ceb5c90b');
});
it('Custom Public URL', () => {
// $FlowFixMe
let fromBundle: NamedBundle = {
filePath: '/user/dist/reformat.html',
name: 'reformat.html',
// $FlowFixMe
target: {
distDir: '/user/dist',
publicUrl: 'https://test.com/static',
},
};
// $FlowFixMe
let toBundle: NamedBundle = {
filePath:
'/user/dist/image.HASH_REF_87f9d66c16c2216ccc7e5664cf089305.webp',
name: 'image.HASH_REF_87f9d66c16c2216ccc7e5664cf089305.webp',
// $FlowFixMe
target: {
distDir: '/user/dist',
publicUrl: 'https://test.com/static',
},
};
// $FlowFixMe
let dependency: Dependency = {
id: '074b36596e314797845a8ad17ceb5c9b',
specifier: './image.jpg',
specifierType: 'esm',
};
let result = getURLReplacement({
dependency,
fromBundle,
toBundle,
relative: false,
});
assert.equal(
result.to,
'https://test.com/static/image.HASH_REF_87f9d66c16c2216ccc7e5664cf089305.webp',
);
assert.equal(result.from, '074b36596e314797845a8ad17ceb5c9b');
});
it('Relative with folders in between', () => {
// $FlowFixMe
let fromBundle: NamedBundle = {
filePath: '/user/dist/reformat.html',
name: 'reformat.html',
// $FlowFixMe
target: {
distDir: '/user/dist',
publicUrl: 'https://test.com/static',
},
};
// $FlowFixMe
let toBundle: NamedBundle = {
filePath:
'/user/dist/assets/image.HASH_REF_87f9d66c16c2216ccc7e5664cf089305.webp',
name: 'image.HASH_REF_87f9d66c16c2216ccc7e5664cf089305.webp',
// $FlowFixMe
target: {
distDir: '/user/dist/assets',
publicUrl: 'https://test.com/static',
},
};
// $FlowFixMe
let dependency: Dependency = {
id: '074b36596e3147e900a8ad17ceb5c90b',
specifier: 'url:./image.jpg?as=webp',
specifierType: 'esm',
};
let result = getURLReplacement({
dependency,
fromBundle,
toBundle,
relative: true,
});
assert.equal(
result.to,
'assets/image.HASH_REF_87f9d66c16c2216ccc7e5664cf089305.webp',
);
assert.equal(result.from, '074b36596e3147e900a8ad17ceb5c90b');
});
it('should work with bundle names with colons, relative', () => {
// $FlowFixMe
let fromBundle: NamedBundle = {
filePath: '/user/dist/reformat.html',
name: 'reformat.html',
// $FlowFixMe
target: {
distDir: '/user/dist',
publicUrl: '/',
},
};
// $FlowFixMe
let toBundle: NamedBundle = {
filePath: '/user/dist/a:b:c.html',
name: 'a:b:c.html',
// $FlowFixMe
target: {
distDir: '/user/dist',
publicUrl: '/',
},
};
// $FlowFixMe
let dependency: Dependency = {
id: '074b36596e3147e900a8ad17ceb5c90b',
specifier: './a:b:c.html',
specifierType: 'esm',
};
let result = getURLReplacement({
dependency,
fromBundle,
toBundle,
relative: true,
});
assert.equal(result.to, './a:b:c.html');
});
it('should work with bundle names with colons, absolute', () => {
// $FlowFixMe
let fromBundle: NamedBundle = {
filePath: '/user/dist/reformat.html',
name: 'reformat.html',
// $FlowFixMe
target: {
distDir: '/user/dist',
publicUrl: '/',
},
};
// $FlowFixMe
let toBundle: NamedBundle = {
filePath: '/user/dist/a:b:c.html',
name: 'a:b:c.html',
// $FlowFixMe
target: {
distDir: '/user/dist',
publicUrl: '/',
},
};
// $FlowFixMe
let dependency: Dependency = {
id: '074b36596e3147e900a8ad17ceb5c90b',
specifier: './a:b:c.html',
specifierType: 'esm',
};
let result = getURLReplacement({
dependency,
fromBundle,
toBundle,
relative: false,
});
assert.equal(result.to, '/a:b:c.html');
});
});

View File

@@ -0,0 +1,207 @@
import assert from 'assert';
import {
matchSourceMappingURL,
loadSourceMapUrl,
loadSourceMap,
} from '../src/sourcemap';
import {NodeFS} from '@parcel/fs';
import path from 'path';
const fs = new NodeFS();
describe('loadSourceMap', () => {
it('should not match sourceMappingURL when not at the end of the bundle', () => {
// Code example taken from livescript.js (issue #2408 in parcel-bundler)
// This snippet lead to JSAsset.js being mislead and incorrectly trying to
// load (due to false-positive match) sourcemap before fix was introduced
let code = fs.readFileSync(
path.join(__dirname, './input/sourcemap/no-sourcemap.js'),
'utf-8',
);
assert(!matchSourceMappingURL(code));
});
it('should match referenced-min sourceMappingURL when correctly inserted at end of the bundle', () => {
let code = fs.readFileSync(
path.join(__dirname, './input/sourcemap/referenced-min.js'),
'utf-8',
);
assert(!!matchSourceMappingURL(code));
});
it('should match inline sourceMappingURL when correctly inserted at end of the bundle', () => {
// inline source map taken from https://github.com/thlorenz/inline-source-map
let code = fs.readFileSync(
path.join(__dirname, './input/sourcemap/inline.js'),
'utf-8',
);
assert(!!matchSourceMappingURL(code));
});
it('Should be able to load sourcemap data from a url reference', async () => {
let filename = path.join(__dirname, './input/sourcemap/referenced-min.js');
let contents = fs.readFileSync(filename, 'utf-8');
let foundMap = await loadSourceMapUrl(fs, filename, contents);
assert.equal(foundMap.url, 'file://referenced-min.js.map');
assert.equal(
foundMap.filename,
path.join(__dirname, 'input/sourcemap/referenced-min.js.map'),
);
assert.deepEqual(foundMap.map, {
version: 3,
sources: ['./referenced.js'],
names: ['hello', 'l', 'o', 'console', 'log'],
mappings:
'AAAA,SAASA,QACP,IAAIC,EAAI,QACNC,EAAI,QACNC,QAAQC,IAAIH,EAAI,IAAMC,EAAI,KAE5BF',
});
});
it('Should be able to load sourcemap data from an inline url reference', async () => {
let filename = path.join(__dirname, './input/sourcemap/inline.js');
let contents = fs.readFileSync(filename, 'utf-8');
let foundMap = await loadSourceMapUrl(fs, filename, contents);
assert.equal(
foundMap.url,
'data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiIiwic291cmNlcyI6WyJmb28uanMiLCJiYXIuanMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6Ijs7Ozs7Ozs7O1VBQ0c7Ozs7Ozs7Ozs7Ozs7O3NCQ0RIO3NCQUNBIn0=',
);
assert.equal(foundMap.filename, filename);
assert.deepEqual(foundMap.map, {
version: 3,
file: '',
sources: ['foo.js', 'bar.js'],
names: [],
mappings: ';;;;;;;;;UACG;;;;;;;;;;;;;;sBCDH;sBACA',
});
});
it('Should be able to load a SourceMap instance from a file', async () => {
let filename = path.join(__dirname, './input/sourcemap/referenced-min.js');
let contents = fs.readFileSync(filename, 'utf-8');
let map = await loadSourceMap(filename, contents, {
fs,
projectRoot: __dirname,
});
assert(!!map);
let parsedMap = map.getMap();
assert.deepEqual(parsedMap.sources, ['input/sourcemap/referenced.js']);
assert.deepEqual(parsedMap.names, ['hello', 'l', 'o', 'console', 'log']);
assert.deepEqual(parsedMap.mappings, [
{
generated: {line: 1, column: 0},
original: {line: 1, column: 0},
source: 0,
},
{
generated: {line: 1, column: 9},
original: {line: 1, column: 9},
source: 0,
name: 0,
},
{
generated: {line: 1, column: 17},
original: {line: 2, column: 2},
source: 0,
},
{
generated: {line: 1, column: 21},
original: {line: 2, column: 6},
source: 0,
name: 1,
},
{
generated: {line: 1, column: 23},
original: {line: 2, column: 10},
source: 0,
},
{
generated: {line: 1, column: 31},
original: {line: 3, column: 4},
source: 0,
name: 2,
},
{
generated: {line: 1, column: 33},
original: {line: 3, column: 8},
source: 0,
},
{
generated: {line: 1, column: 41},
original: {line: 4, column: 2},
source: 0,
name: 3,
},
{
generated: {line: 1, column: 49},
original: {line: 4, column: 10},
source: 0,
name: 4,
},
{
generated: {line: 1, column: 53},
original: {line: 4, column: 14},
source: 0,
name: 1,
},
{
generated: {line: 1, column: 55},
original: {line: 4, column: 18},
source: 0,
},
{
generated: {line: 1, column: 59},
original: {line: 4, column: 24},
source: 0,
name: 2,
},
{
generated: {line: 1, column: 61},
original: {line: 4, column: 28},
source: 0,
},
{
generated: {line: 1, column: 66},
original: {line: 6, column: 0},
source: 0,
name: 0,
},
]);
});
it('Should remap sources when using sourceRoot', async () => {
let filename = path.join(__dirname, './input/sourcemap/referenced-min.js');
let contents = fs.readFileSync(filename, 'utf-8');
let map = await loadSourceMap(filename, contents, {
fs,
projectRoot: __dirname,
});
assert(!!map);
let parsedMap = map.getMap();
assert.deepEqual(parsedMap.sources, ['input/sourcemap/referenced.js']);
});
it('Should remap sources when using sourceRoot', async () => {
let filename = path.join(__dirname, './input/sourcemap/source-root.js');
let contents = fs.readFileSync(filename, 'utf-8');
let map = await loadSourceMap(filename, contents, {
fs,
projectRoot: __dirname,
});
assert(!!map);
let parsedMap = map.getMap();
assert.deepEqual(parsedMap.sources, ['input/source.js']);
});
});

View File

@@ -0,0 +1,44 @@
// @flow strict-local
import assert from 'assert';
import sinon from 'sinon';
import throttle from '../src/throttle';
describe('throttle', () => {
it("doesn't invoke a function more than once in a given interval", () => {
let spy = sinon.spy();
let throttled = throttle(spy, 100);
throttled(1);
throttled(2);
throttled(3);
assert(spy.calledOnceWithExactly(1));
});
it('calls the underlying function again once the interval has passed', () => {
let time = sinon.useFakeTimers();
let spy = sinon.spy();
let throttled = throttle(spy, 100);
throttled(1);
throttled(2);
throttled(3);
time.tick(100);
throttled(4);
assert.deepEqual(spy.args, [[1], [4]]);
time.restore();
});
it('preserves the `this` when throttled functions are invoked', () => {
let result;
let throttled = throttle(function () {
result = this.bar;
}, 100);
throttled.call({bar: 'baz'});
assert(result === 'baz');
});
});

View File

@@ -0,0 +1,37 @@
// @flow strict-local
import assert from 'assert';
import urlJoin from '../src/urlJoin';
describe('urlJoin', () => {
it('Should join two paths', () => {
let joinedUrl = urlJoin('/', './image.jpeg?test=test');
assert.equal(joinedUrl, '/image.jpeg?test=test');
});
it('Should join two paths with longer publicUrl', () => {
let joinedUrl = urlJoin('/static', './image.jpeg?test=test');
assert.equal(joinedUrl, '/static/image.jpeg?test=test');
});
it('Should join two paths with longer publicUrl', () => {
let joinedUrl = urlJoin('/static', 'image.jpeg?test=test');
assert.equal(joinedUrl, '/static/image.jpeg?test=test');
});
it('Should turn windows path into posix', () => {
let joinedUrl = urlJoin('/static', '.\\image.jpeg?test=test');
assert.equal(joinedUrl, '/static/image.jpeg?test=test');
});
it('should support paths with colons', () => {
let joinedUrl = urlJoin('/static', 'a:b:c.html');
assert.equal(joinedUrl, '/static/a:b:c.html');
joinedUrl = urlJoin('/static', '/a:b:c.html');
assert.equal(joinedUrl, '/static/a:b:c.html');
joinedUrl = urlJoin('/static', './a:b:c.html');
assert.equal(joinedUrl, '/static/a:b:c.html');
});
});