larry babby and threejs for glsl

This commit is contained in:
Sam
2024-06-24 21:24:00 +12:00
parent 87d5dc634d
commit 907ebae4c0
6474 changed files with 1279596 additions and 8 deletions

View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2017-present Devon Govett
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -0,0 +1,11 @@
import type {FilePath} from '@parcel/types';
import type {Cache} from './lib/types';
export type {Cache} from './lib/types';
export const FSCache: {
new (cacheDir: FilePath): Cache
};
export const LMDBCache: {
new (cacheDir: FilePath): Cache
};

View File

@@ -0,0 +1,167 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.FSCache = void 0;
function _stream() {
const data = _interopRequireDefault(require("stream"));
_stream = function () {
return data;
};
return data;
}
function _path() {
const data = _interopRequireDefault(require("path"));
_path = function () {
return data;
};
return data;
}
function _util() {
const data = require("util");
_util = function () {
return data;
};
return data;
}
function _logger() {
const data = _interopRequireDefault(require("@parcel/logger"));
_logger = function () {
return data;
};
return data;
}
function _core() {
const data = require("@parcel/core");
_core = function () {
return data;
};
return data;
}
var _package = _interopRequireDefault(require("../package.json"));
var _constants = require("./constants");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _classPrivateMethodInitSpec(obj, privateSet) { _checkPrivateRedeclaration(obj, privateSet); privateSet.add(obj); }
function _checkPrivateRedeclaration(obj, privateCollection) { if (privateCollection.has(obj)) { throw new TypeError("Cannot initialize the same private elements twice on an object"); } }
function _classPrivateMethodGet(receiver, privateSet, fn) { if (!privateSet.has(receiver)) { throw new TypeError("attempted to get private field on non-instance"); } return fn; } // flowlint-next-line untyped-import:off
const pipeline = (0, _util().promisify)(_stream().default.pipeline);
var _getFilePath = /*#__PURE__*/new WeakSet();
var _unlinkChunks = /*#__PURE__*/new WeakSet();
class FSCache {
constructor(fs, cacheDir) {
_classPrivateMethodInitSpec(this, _unlinkChunks);
_classPrivateMethodInitSpec(this, _getFilePath);
this.fs = fs;
this.dir = cacheDir;
}
async ensure() {
// First, create the main cache directory if necessary.
await this.fs.mkdirp(this.dir);
// In parallel, create sub-directories for every possible hex value
// This speeds up large caches on many file systems since there are fewer files in a single directory.
let dirPromises = [];
for (let i = 0; i < 256; i++) {
dirPromises.push(this.fs.mkdirp(_path().default.join(this.dir, ('00' + i.toString(16)).slice(-2))));
}
await Promise.all(dirPromises);
}
_getCachePath(cacheId) {
return _path().default.join(this.dir, cacheId.slice(0, 2), cacheId.slice(2));
}
getStream(key) {
return this.fs.createReadStream(this._getCachePath(`${key}-large`));
}
setStream(key, stream) {
return pipeline(stream, this.fs.createWriteStream(this._getCachePath(`${key}-large`)));
}
has(key) {
return this.fs.exists(this._getCachePath(key));
}
getBlob(key) {
return this.fs.readFile(this._getCachePath(key));
}
async setBlob(key, contents) {
await this.fs.writeFile(this._getCachePath(key), contents);
}
async getBuffer(key) {
try {
return await this.fs.readFile(this._getCachePath(key));
} catch (err) {
if (err.code === 'ENOENT') {
return null;
} else {
throw err;
}
}
}
hasLargeBlob(key) {
return this.fs.exists(_classPrivateMethodGet(this, _getFilePath, _getFilePath2).call(this, key, 0));
}
async getLargeBlob(key) {
const buffers = [];
for (let i = 0; await this.fs.exists(_classPrivateMethodGet(this, _getFilePath, _getFilePath2).call(this, key, i)); i += 1) {
const file = this.fs.readFile(_classPrivateMethodGet(this, _getFilePath, _getFilePath2).call(this, key, i));
buffers.push(file);
}
return Buffer.concat(await Promise.all(buffers));
}
async setLargeBlob(key, contents, options) {
const chunks = Math.ceil(contents.length / _constants.WRITE_LIMIT_CHUNK);
const writePromises = [];
if (chunks === 1) {
// If there's one chunk, don't slice the content
writePromises.push(this.fs.writeFile(_classPrivateMethodGet(this, _getFilePath, _getFilePath2).call(this, key, 0), contents, {
signal: options === null || options === void 0 ? void 0 : options.signal
}));
} else {
for (let i = 0; i < chunks; i += 1) {
writePromises.push(this.fs.writeFile(_classPrivateMethodGet(this, _getFilePath, _getFilePath2).call(this, key, i), typeof contents === 'string' ? contents.slice(i * _constants.WRITE_LIMIT_CHUNK, (i + 1) * _constants.WRITE_LIMIT_CHUNK) : contents.subarray(i * _constants.WRITE_LIMIT_CHUNK, (i + 1) * _constants.WRITE_LIMIT_CHUNK), {
signal: options === null || options === void 0 ? void 0 : options.signal
}));
}
}
// If there's already a files following this chunk, it's old and should be removed
writePromises.push(_classPrivateMethodGet(this, _unlinkChunks, _unlinkChunks2).call(this, key, chunks));
await Promise.all(writePromises);
}
async get(key) {
try {
let data = await this.fs.readFile(this._getCachePath(key));
return (0, _core().deserialize)(data);
} catch (err) {
if (err.code === 'ENOENT') {
return null;
} else {
throw err;
}
}
}
async set(key, value) {
try {
let blobPath = this._getCachePath(key);
let data = (0, _core().serialize)(value);
await this.fs.writeFile(blobPath, data);
} catch (err) {
_logger().default.error(err, '@parcel/cache');
}
}
refresh() {
// NOOP
}
}
exports.FSCache = FSCache;
function _getFilePath2(key, index) {
return _path().default.join(this.dir, `${key}-${index}`);
}
async function _unlinkChunks2(key, index) {
try {
await this.fs.unlink(_classPrivateMethodGet(this, _getFilePath, _getFilePath2).call(this, key, index));
await _classPrivateMethodGet(this, _unlinkChunks, _unlinkChunks2).call(this, key, index + 1);
} catch (err) {
// If there's an error, no more chunks are left to delete
}
}
(0, _core().registerSerializableClass)(`${_package.default.version}:FSCache`, FSCache);

View File

@@ -0,0 +1,145 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.IDBCache = void 0;
function _stream() {
const data = require("stream");
_stream = function () {
return data;
};
return data;
}
function _core() {
const data = require("@parcel/core");
_core = function () {
return data;
};
return data;
}
function _utils() {
const data = require("@parcel/utils");
_utils = function () {
return data;
};
return data;
}
var _package = _interopRequireDefault(require("../package.json"));
function _idb() {
const data = require("idb");
_idb = function () {
return data;
};
return data;
}
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
// $FlowFixMe[untyped-import]
// $FlowFixMe[untyped-import]
const STORE_NAME = 'cache';
class IDBCache {
// $FlowFixMe
constructor() {
this.store = (0, _idb().openDB)('REPL-parcel-cache', 1, {
upgrade(db) {
db.createObjectStore(STORE_NAME);
},
blocked() {},
blocking() {},
terminated() {}
});
}
ensure() {
return Promise.resolve();
}
serialize() {
return {
/*::...null*/
};
}
static deserialize() {
return new IDBCache();
}
has(key) {
return Promise.resolve(this.store.get(key) != null);
}
async get(key) {
let data = await (await this.store).get(STORE_NAME, key);
if (data == null) {
return null;
}
return Promise.resolve((0, _core().deserialize)(data));
}
async set(key, value) {
await (await this.store).put(STORE_NAME, (0, _core().serialize)(value), key);
}
getStream(key) {
let dataPromise = this.store.then(s => s.get(STORE_NAME, key)).then(d => Buffer.from(d)).catch(e => e);
const stream = new (_stream().Readable)({
// $FlowFixMe(incompatible-call)
async read() {
let data = await dataPromise;
if (data instanceof Error) {
stream.emit('error', data);
} else {
stream.push(Buffer.from(data));
stream.push(null);
}
}
});
return stream;
}
async setStream(key, stream) {
let buf = await (0, _utils().bufferStream)(stream);
await (await this.store).put(STORE_NAME, buf, key);
}
async getBlob(key) {
let data = await (await this.store).get(STORE_NAME, key);
if (data == null) {
return Promise.reject(new Error(`Key ${key} not found in cache`));
}
return Buffer.from(data.buffer);
}
async setBlob(key, contents) {
let data = contents instanceof Uint8Array ? contents : Buffer.from(contents);
await (await this.store).put(STORE_NAME, data, key);
}
// async setBlobs(
// entries: $ReadOnlyArray<[string, Buffer | string]>,
// ): Promise<void> {
// const tx = (await this.store).transaction(STORE_NAME, 'readwrite');
// await Promise.all([
// ...entries.map(([key, value]) =>
// tx.store.put(
// value instanceof Uint8Array ? value : Buffer.from(value),
// key,
// ),
// ),
// tx.done,
// ]);
// }
async getBuffer(key) {
let data = await (await this.store).get(STORE_NAME, key);
if (data == null) {
return null;
}
return Buffer.from(data.buffer);
}
hasLargeBlob(key) {
return this.has(key);
}
getLargeBlob(key) {
return this.getBlob(key);
}
setLargeBlob(key, contents) {
return this.setBlob(key, contents);
}
refresh() {
// NOOP
}
}
exports.IDBCache = IDBCache;
(0, _core().registerSerializableClass)(`${_package.default.version}:IDBCache`, IDBCache);

View File

@@ -0,0 +1,13 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.IDBCache = void 0;
// $FlowFixMe
class IDBCache {
constructor() {
throw new Error('IDBCache is only supported in the browser');
}
}
exports.IDBCache = IDBCache;

View File

@@ -0,0 +1,137 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.LMDBCache = void 0;
function _stream() {
const data = _interopRequireDefault(require("stream"));
_stream = function () {
return data;
};
return data;
}
function _path() {
const data = _interopRequireDefault(require("path"));
_path = function () {
return data;
};
return data;
}
function _util() {
const data = require("util");
_util = function () {
return data;
};
return data;
}
function _core() {
const data = require("@parcel/core");
_core = function () {
return data;
};
return data;
}
function _fs() {
const data = require("@parcel/fs");
_fs = function () {
return data;
};
return data;
}
var _package = _interopRequireDefault(require("../package.json"));
function _lmdb() {
const data = _interopRequireDefault(require("lmdb"));
_lmdb = function () {
return data;
};
return data;
}
var _FSCache = require("./FSCache");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _classPrivateMethodInitSpec(obj, privateSet) { _checkPrivateRedeclaration(obj, privateSet); privateSet.add(obj); }
function _checkPrivateRedeclaration(obj, privateCollection) { if (privateCollection.has(obj)) { throw new TypeError("Cannot initialize the same private elements twice on an object"); } }
function _classPrivateMethodGet(receiver, privateSet, fn) { if (!privateSet.has(receiver)) { throw new TypeError("attempted to get private field on non-instance"); } return fn; } // flowlint-next-line untyped-import:off
// $FlowFixMe
const pipeline = (0, _util().promisify)(_stream().default.pipeline);
var _getFilePath = /*#__PURE__*/new WeakSet();
class LMDBCache {
// $FlowFixMe
constructor(cacheDir) {
_classPrivateMethodInitSpec(this, _getFilePath);
this.fs = new (_fs().NodeFS)();
this.dir = cacheDir;
this.fsCache = new _FSCache.FSCache(this.fs, cacheDir);
this.store = _lmdb().default.open(cacheDir, {
name: 'parcel-cache',
encoding: 'binary',
compression: true
});
}
ensure() {
return Promise.resolve();
}
serialize() {
return {
dir: this.dir
};
}
static deserialize(opts) {
return new LMDBCache(opts.dir);
}
has(key) {
return Promise.resolve(this.store.get(key) != null);
}
get(key) {
let data = this.store.get(key);
if (data == null) {
return Promise.resolve(null);
}
return Promise.resolve((0, _core().deserialize)(data));
}
async set(key, value) {
await this.setBlob(key, (0, _core().serialize)(value));
}
getStream(key) {
return this.fs.createReadStream(_path().default.join(this.dir, key));
}
setStream(key, stream) {
return pipeline(stream, this.fs.createWriteStream(_path().default.join(this.dir, key)));
}
getBlob(key) {
let buffer = this.store.get(key);
return buffer != null ? Promise.resolve(buffer) : Promise.reject(new Error(`Key ${key} not found in cache`));
}
async setBlob(key, contents) {
await this.store.put(key, contents);
}
getBuffer(key) {
return Promise.resolve(this.store.get(key));
}
hasLargeBlob(key) {
return this.fs.exists(_classPrivateMethodGet(this, _getFilePath, _getFilePath2).call(this, key, 0));
}
// eslint-disable-next-line require-await
async getLargeBlob(key) {
return this.fsCache.getLargeBlob(key);
}
// eslint-disable-next-line require-await
async setLargeBlob(key, contents, options) {
return this.fsCache.setLargeBlob(key, contents, options);
}
refresh() {
// Reset the read transaction for the store. This guarantees that
// the next read will see the latest changes to the store.
// Useful in scenarios where reads and writes are multi-threaded.
// See https://github.com/kriszyp/lmdb-js#resetreadtxn-void
this.store.resetReadTxn();
}
}
exports.LMDBCache = LMDBCache;
function _getFilePath2(key, index) {
return _path().default.join(this.dir, `${key}-${index}`);
}
(0, _core().registerSerializableClass)(`${_package.default.version}:LMDBCache`, LMDBCache);

View File

@@ -0,0 +1,8 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.WRITE_LIMIT_CHUNK = void 0;
// Node has a file size limit of 2 GB
const WRITE_LIMIT_CHUNK = exports.WRITE_LIMIT_CHUNK = 2 * 1024 ** 3;

View File

@@ -0,0 +1,38 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
var _LMDBCache = require("./LMDBCache");
Object.keys(_LMDBCache).forEach(function (key) {
if (key === "default" || key === "__esModule") return;
if (key in exports && exports[key] === _LMDBCache[key]) return;
Object.defineProperty(exports, key, {
enumerable: true,
get: function () {
return _LMDBCache[key];
}
});
});
var _FSCache = require("./FSCache");
Object.keys(_FSCache).forEach(function (key) {
if (key === "default" || key === "__esModule") return;
if (key in exports && exports[key] === _FSCache[key]) return;
Object.defineProperty(exports, key, {
enumerable: true,
get: function () {
return _FSCache[key];
}
});
});
var _IDBCache = require("./IDBCache");
Object.keys(_IDBCache).forEach(function (key) {
if (key === "default" || key === "__esModule") return;
if (key in exports && exports[key] === _IDBCache[key]) return;
Object.defineProperty(exports, key, {
enumerable: true,
get: function () {
return _IDBCache[key];
}
});
});

View File

@@ -0,0 +1,25 @@
import type { Readable } from "stream";
import type { AbortSignal } from "abortcontroller-polyfill/dist/cjs-ponyfill";
export interface Cache {
ensure(): Promise<void>;
has(key: string): Promise<boolean>;
get<T>(key: string): Promise<T | null | undefined>;
set(key: string, value: unknown): Promise<void>;
getStream(key: string): Readable;
setStream(key: string, stream: Readable): Promise<void>;
getBlob(key: string): Promise<Buffer>;
setBlob(key: string, contents: Buffer | string): Promise<void>;
hasLargeBlob(key: string): Promise<boolean>;
getLargeBlob(key: string): Promise<Buffer>;
setLargeBlob(key: string, contents: Buffer | string, options?: {
signal?: AbortSignal;
}): Promise<void>;
getBuffer(key: string): Promise<Buffer | null | undefined>;
/**
* In a multi-threaded environment, where there are potentially multiple Cache
* instances writing to the cache, ensure that this instance has the latest view
* of the changes that may have been written to the cache in other threads.
*/
refresh(): void;
}

View File

@@ -0,0 +1 @@
"use strict";

View File

@@ -0,0 +1,43 @@
{
"name": "@parcel/cache",
"version": "2.12.0",
"license": "MIT",
"publishConfig": {
"access": "public"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
},
"repository": {
"type": "git",
"url": "https://github.com/parcel-bundler/parcel.git"
},
"main": "lib/index.js",
"source": "src/index.js",
"types": "index.d.ts",
"engines": {
"node": ">= 12.0.0"
},
"scripts": {
"build-ts": "mkdir -p lib && flow-to-ts src/types.js > lib/types.d.ts",
"check-ts": "tsc --noEmit index.d.ts"
},
"dependencies": {
"@parcel/fs": "2.12.0",
"@parcel/logger": "2.12.0",
"@parcel/utils": "2.12.0",
"lmdb": "2.8.5"
},
"peerDependencies": {
"@parcel/core": "^2.12.0"
},
"devDependencies": {
"idb": "^5.0.8"
},
"browser": {
"./src/IDBCache.js": "./src/IDBCache.browser.js",
"./src/LMDBCache.js": false
},
"gitHead": "2059029ee91e5f03a273b0954d3e629d7375f986"
}

View File

@@ -0,0 +1,184 @@
// @flow strict-local
import type {Readable, Writable} from 'stream';
import type {FilePath} from '@parcel/types';
import type {FileSystem} from '@parcel/fs';
import type {Cache} from './types';
import type {AbortSignal} from 'abortcontroller-polyfill/dist/cjs-ponyfill';
import stream from 'stream';
import path from 'path';
import {promisify} from 'util';
import logger from '@parcel/logger';
import {serialize, deserialize, registerSerializableClass} from '@parcel/core';
// flowlint-next-line untyped-import:off
import packageJson from '../package.json';
import {WRITE_LIMIT_CHUNK} from './constants';
const pipeline: (Readable, Writable) => Promise<void> = promisify(
stream.pipeline,
);
export class FSCache implements Cache {
fs: FileSystem;
dir: FilePath;
constructor(fs: FileSystem, cacheDir: FilePath) {
this.fs = fs;
this.dir = cacheDir;
}
async ensure(): Promise<void> {
// First, create the main cache directory if necessary.
await this.fs.mkdirp(this.dir);
// In parallel, create sub-directories for every possible hex value
// This speeds up large caches on many file systems since there are fewer files in a single directory.
let dirPromises = [];
for (let i = 0; i < 256; i++) {
dirPromises.push(
this.fs.mkdirp(path.join(this.dir, ('00' + i.toString(16)).slice(-2))),
);
}
await Promise.all(dirPromises);
}
_getCachePath(cacheId: string): FilePath {
return path.join(this.dir, cacheId.slice(0, 2), cacheId.slice(2));
}
getStream(key: string): Readable {
return this.fs.createReadStream(this._getCachePath(`${key}-large`));
}
setStream(key: string, stream: Readable): Promise<void> {
return pipeline(
stream,
this.fs.createWriteStream(this._getCachePath(`${key}-large`)),
);
}
has(key: string): Promise<boolean> {
return this.fs.exists(this._getCachePath(key));
}
getBlob(key: string): Promise<Buffer> {
return this.fs.readFile(this._getCachePath(key));
}
async setBlob(key: string, contents: Buffer | string): Promise<void> {
await this.fs.writeFile(this._getCachePath(key), contents);
}
async getBuffer(key: string): Promise<?Buffer> {
try {
return await this.fs.readFile(this._getCachePath(key));
} catch (err) {
if (err.code === 'ENOENT') {
return null;
} else {
throw err;
}
}
}
#getFilePath(key: string, index: number): string {
return path.join(this.dir, `${key}-${index}`);
}
async #unlinkChunks(key: string, index: number): Promise<void> {
try {
await this.fs.unlink(this.#getFilePath(key, index));
await this.#unlinkChunks(key, index + 1);
} catch (err) {
// If there's an error, no more chunks are left to delete
}
}
hasLargeBlob(key: string): Promise<boolean> {
return this.fs.exists(this.#getFilePath(key, 0));
}
async getLargeBlob(key: string): Promise<Buffer> {
const buffers: Promise<Buffer>[] = [];
for (let i = 0; await this.fs.exists(this.#getFilePath(key, i)); i += 1) {
const file: Promise<Buffer> = this.fs.readFile(this.#getFilePath(key, i));
buffers.push(file);
}
return Buffer.concat(await Promise.all(buffers));
}
async setLargeBlob(
key: string,
contents: Buffer | string,
options?: {|signal?: AbortSignal|},
): Promise<void> {
const chunks = Math.ceil(contents.length / WRITE_LIMIT_CHUNK);
const writePromises: Promise<void>[] = [];
if (chunks === 1) {
// If there's one chunk, don't slice the content
writePromises.push(
this.fs.writeFile(this.#getFilePath(key, 0), contents, {
signal: options?.signal,
}),
);
} else {
for (let i = 0; i < chunks; i += 1) {
writePromises.push(
this.fs.writeFile(
this.#getFilePath(key, i),
typeof contents === 'string'
? contents.slice(
i * WRITE_LIMIT_CHUNK,
(i + 1) * WRITE_LIMIT_CHUNK,
)
: contents.subarray(
i * WRITE_LIMIT_CHUNK,
(i + 1) * WRITE_LIMIT_CHUNK,
),
{signal: options?.signal},
),
);
}
}
// If there's already a files following this chunk, it's old and should be removed
writePromises.push(this.#unlinkChunks(key, chunks));
await Promise.all(writePromises);
}
async get<T>(key: string): Promise<?T> {
try {
let data = await this.fs.readFile(this._getCachePath(key));
return deserialize(data);
} catch (err) {
if (err.code === 'ENOENT') {
return null;
} else {
throw err;
}
}
}
async set(key: string, value: mixed): Promise<void> {
try {
let blobPath = this._getCachePath(key);
let data = serialize(value);
await this.fs.writeFile(blobPath, data);
} catch (err) {
logger.error(err, '@parcel/cache');
}
}
refresh(): void {
// NOOP
}
}
registerSerializableClass(`${packageJson.version}:FSCache`, FSCache);

View File

@@ -0,0 +1,141 @@
// @flow strict-local
import type {Cache} from './types';
import {Readable} from 'stream';
import {serialize, deserialize, registerSerializableClass} from '@parcel/core';
import {bufferStream} from '@parcel/utils';
// $FlowFixMe[untyped-import]
import packageJson from '../package.json';
// $FlowFixMe[untyped-import]
import {openDB} from 'idb';
const STORE_NAME = 'cache';
export class IDBCache implements Cache {
// $FlowFixMe
store: any;
constructor() {
this.store = openDB('REPL-parcel-cache', 1, {
upgrade(db) {
db.createObjectStore(STORE_NAME);
},
blocked() {},
blocking() {},
terminated() {},
});
}
ensure(): Promise<void> {
return Promise.resolve();
}
serialize(): {||} {
return {
/*::...null*/
};
}
static deserialize(): IDBCache {
return new IDBCache();
}
has(key: string): Promise<boolean> {
return Promise.resolve(this.store.get(key) != null);
}
async get<T>(key: string): Promise<?T> {
let data = await (await this.store).get(STORE_NAME, key);
if (data == null) {
return null;
}
return Promise.resolve(deserialize(data));
}
async set(key: string, value: mixed): Promise<void> {
await (await this.store).put(STORE_NAME, serialize(value), key);
}
getStream(key: string): Readable {
let dataPromise = this.store
.then(s => s.get(STORE_NAME, key))
.then(d => Buffer.from(d))
.catch(e => e);
const stream = new Readable({
// $FlowFixMe(incompatible-call)
async read() {
let data = await dataPromise;
if (data instanceof Error) {
stream.emit('error', data);
} else {
stream.push(Buffer.from(data));
stream.push(null);
}
},
});
return stream;
}
async setStream(key: string, stream: Readable): Promise<void> {
let buf = await bufferStream(stream);
await (await this.store).put(STORE_NAME, buf, key);
}
async getBlob(key: string): Promise<Buffer> {
let data = await (await this.store).get(STORE_NAME, key);
if (data == null) {
return Promise.reject(new Error(`Key ${key} not found in cache`));
}
return Buffer.from(data.buffer);
}
async setBlob(key: string, contents: Buffer | string): Promise<void> {
let data =
contents instanceof Uint8Array ? contents : Buffer.from(contents);
await (await this.store).put(STORE_NAME, data, key);
}
// async setBlobs(
// entries: $ReadOnlyArray<[string, Buffer | string]>,
// ): Promise<void> {
// const tx = (await this.store).transaction(STORE_NAME, 'readwrite');
// await Promise.all([
// ...entries.map(([key, value]) =>
// tx.store.put(
// value instanceof Uint8Array ? value : Buffer.from(value),
// key,
// ),
// ),
// tx.done,
// ]);
// }
async getBuffer(key: string): Promise<?Buffer> {
let data = await (await this.store).get(STORE_NAME, key);
if (data == null) {
return null;
}
return Buffer.from(data.buffer);
}
hasLargeBlob(key: string): Promise<boolean> {
return this.has(key);
}
getLargeBlob(key: string): Promise<Buffer> {
return this.getBlob(key);
}
setLargeBlob(key: string, contents: Buffer | string): Promise<void> {
return this.setBlob(key, contents);
}
refresh(): void {
// NOOP
}
}
registerSerializableClass(`${packageJson.version}:IDBCache`, IDBCache);

View File

@@ -0,0 +1,9 @@
// @flow strict-local
import type {Cache} from './types';
// $FlowFixMe
export class IDBCache implements Cache {
constructor() {
throw new Error('IDBCache is only supported in the browser');
}
}

View File

@@ -0,0 +1,130 @@
// @flow strict-local
import type {FilePath} from '@parcel/types';
import type {Cache} from './types';
import type {Readable, Writable} from 'stream';
import type {AbortSignal} from 'abortcontroller-polyfill/dist/cjs-ponyfill';
import stream from 'stream';
import path from 'path';
import {promisify} from 'util';
import {serialize, deserialize, registerSerializableClass} from '@parcel/core';
import {NodeFS} from '@parcel/fs';
// flowlint-next-line untyped-import:off
import packageJson from '../package.json';
// $FlowFixMe
import lmdb from 'lmdb';
import {FSCache} from './FSCache';
const pipeline: (Readable, Writable) => Promise<void> = promisify(
stream.pipeline,
);
export class LMDBCache implements Cache {
fs: NodeFS;
dir: FilePath;
// $FlowFixMe
store: any;
fsCache: FSCache;
constructor(cacheDir: FilePath) {
this.fs = new NodeFS();
this.dir = cacheDir;
this.fsCache = new FSCache(this.fs, cacheDir);
this.store = lmdb.open(cacheDir, {
name: 'parcel-cache',
encoding: 'binary',
compression: true,
});
}
ensure(): Promise<void> {
return Promise.resolve();
}
serialize(): {|dir: FilePath|} {
return {
dir: this.dir,
};
}
static deserialize(opts: {|dir: FilePath|}): LMDBCache {
return new LMDBCache(opts.dir);
}
has(key: string): Promise<boolean> {
return Promise.resolve(this.store.get(key) != null);
}
get<T>(key: string): Promise<?T> {
let data = this.store.get(key);
if (data == null) {
return Promise.resolve(null);
}
return Promise.resolve(deserialize(data));
}
async set(key: string, value: mixed): Promise<void> {
await this.setBlob(key, serialize(value));
}
getStream(key: string): Readable {
return this.fs.createReadStream(path.join(this.dir, key));
}
setStream(key: string, stream: Readable): Promise<void> {
return pipeline(
stream,
this.fs.createWriteStream(path.join(this.dir, key)),
);
}
getBlob(key: string): Promise<Buffer> {
let buffer = this.store.get(key);
return buffer != null
? Promise.resolve(buffer)
: Promise.reject(new Error(`Key ${key} not found in cache`));
}
async setBlob(key: string, contents: Buffer | string): Promise<void> {
await this.store.put(key, contents);
}
getBuffer(key: string): Promise<?Buffer> {
return Promise.resolve(this.store.get(key));
}
#getFilePath(key: string, index: number): string {
return path.join(this.dir, `${key}-${index}`);
}
hasLargeBlob(key: string): Promise<boolean> {
return this.fs.exists(this.#getFilePath(key, 0));
}
// eslint-disable-next-line require-await
async getLargeBlob(key: string): Promise<Buffer> {
return this.fsCache.getLargeBlob(key);
}
// eslint-disable-next-line require-await
async setLargeBlob(
key: string,
contents: Buffer | string,
options?: {|signal?: AbortSignal|},
): Promise<void> {
return this.fsCache.setLargeBlob(key, contents, options);
}
refresh(): void {
// Reset the read transaction for the store. This guarantees that
// the next read will see the latest changes to the store.
// Useful in scenarios where reads and writes are multi-threaded.
// See https://github.com/kriszyp/lmdb-js#resetreadtxn-void
this.store.resetReadTxn();
}
}
registerSerializableClass(`${packageJson.version}:LMDBCache`, LMDBCache);

View File

@@ -0,0 +1,4 @@
// @flow strict-local
// Node has a file size limit of 2 GB
export const WRITE_LIMIT_CHUNK = 2 * 1024 ** 3;

View File

@@ -0,0 +1,5 @@
// @flow
export type {Cache} from './types';
export * from './LMDBCache';
export * from './FSCache';
export * from './IDBCache';

View File

@@ -0,0 +1,28 @@
// @flow
import type {Readable} from 'stream';
import type {AbortSignal} from 'abortcontroller-polyfill/dist/cjs-ponyfill';
export interface Cache {
ensure(): Promise<void>;
has(key: string): Promise<boolean>;
get<T>(key: string): Promise<?T>;
set(key: string, value: mixed): Promise<void>;
getStream(key: string): Readable;
setStream(key: string, stream: Readable): Promise<void>;
getBlob(key: string): Promise<Buffer>;
setBlob(key: string, contents: Buffer | string): Promise<void>;
hasLargeBlob(key: string): Promise<boolean>;
getLargeBlob(key: string): Promise<Buffer>;
setLargeBlob(
key: string,
contents: Buffer | string,
options?: {|signal?: AbortSignal|},
): Promise<void>;
getBuffer(key: string): Promise<?Buffer>;
/**
* In a multi-threaded environment, where there are potentially multiple Cache
* instances writing to the cache, ensure that this instance has the latest view
* of the changes that may have been written to the cache in other threads.
*/
refresh(): void;
}