larry babby and threejs for glsl

This commit is contained in:
Sam
2024-06-24 21:24:00 +12:00
parent 87d5dc634d
commit 907ebae4c0
6474 changed files with 1279596 additions and 8 deletions

View File

@@ -0,0 +1,167 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.FSCache = void 0;
function _stream() {
const data = _interopRequireDefault(require("stream"));
_stream = function () {
return data;
};
return data;
}
function _path() {
const data = _interopRequireDefault(require("path"));
_path = function () {
return data;
};
return data;
}
function _util() {
const data = require("util");
_util = function () {
return data;
};
return data;
}
function _logger() {
const data = _interopRequireDefault(require("@parcel/logger"));
_logger = function () {
return data;
};
return data;
}
function _core() {
const data = require("@parcel/core");
_core = function () {
return data;
};
return data;
}
var _package = _interopRequireDefault(require("../package.json"));
var _constants = require("./constants");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _classPrivateMethodInitSpec(obj, privateSet) { _checkPrivateRedeclaration(obj, privateSet); privateSet.add(obj); }
function _checkPrivateRedeclaration(obj, privateCollection) { if (privateCollection.has(obj)) { throw new TypeError("Cannot initialize the same private elements twice on an object"); } }
function _classPrivateMethodGet(receiver, privateSet, fn) { if (!privateSet.has(receiver)) { throw new TypeError("attempted to get private field on non-instance"); } return fn; } // flowlint-next-line untyped-import:off
const pipeline = (0, _util().promisify)(_stream().default.pipeline);
var _getFilePath = /*#__PURE__*/new WeakSet();
var _unlinkChunks = /*#__PURE__*/new WeakSet();
class FSCache {
constructor(fs, cacheDir) {
_classPrivateMethodInitSpec(this, _unlinkChunks);
_classPrivateMethodInitSpec(this, _getFilePath);
this.fs = fs;
this.dir = cacheDir;
}
async ensure() {
// First, create the main cache directory if necessary.
await this.fs.mkdirp(this.dir);
// In parallel, create sub-directories for every possible hex value
// This speeds up large caches on many file systems since there are fewer files in a single directory.
let dirPromises = [];
for (let i = 0; i < 256; i++) {
dirPromises.push(this.fs.mkdirp(_path().default.join(this.dir, ('00' + i.toString(16)).slice(-2))));
}
await Promise.all(dirPromises);
}
_getCachePath(cacheId) {
return _path().default.join(this.dir, cacheId.slice(0, 2), cacheId.slice(2));
}
getStream(key) {
return this.fs.createReadStream(this._getCachePath(`${key}-large`));
}
setStream(key, stream) {
return pipeline(stream, this.fs.createWriteStream(this._getCachePath(`${key}-large`)));
}
has(key) {
return this.fs.exists(this._getCachePath(key));
}
getBlob(key) {
return this.fs.readFile(this._getCachePath(key));
}
async setBlob(key, contents) {
await this.fs.writeFile(this._getCachePath(key), contents);
}
async getBuffer(key) {
try {
return await this.fs.readFile(this._getCachePath(key));
} catch (err) {
if (err.code === 'ENOENT') {
return null;
} else {
throw err;
}
}
}
hasLargeBlob(key) {
return this.fs.exists(_classPrivateMethodGet(this, _getFilePath, _getFilePath2).call(this, key, 0));
}
async getLargeBlob(key) {
const buffers = [];
for (let i = 0; await this.fs.exists(_classPrivateMethodGet(this, _getFilePath, _getFilePath2).call(this, key, i)); i += 1) {
const file = this.fs.readFile(_classPrivateMethodGet(this, _getFilePath, _getFilePath2).call(this, key, i));
buffers.push(file);
}
return Buffer.concat(await Promise.all(buffers));
}
async setLargeBlob(key, contents, options) {
const chunks = Math.ceil(contents.length / _constants.WRITE_LIMIT_CHUNK);
const writePromises = [];
if (chunks === 1) {
// If there's one chunk, don't slice the content
writePromises.push(this.fs.writeFile(_classPrivateMethodGet(this, _getFilePath, _getFilePath2).call(this, key, 0), contents, {
signal: options === null || options === void 0 ? void 0 : options.signal
}));
} else {
for (let i = 0; i < chunks; i += 1) {
writePromises.push(this.fs.writeFile(_classPrivateMethodGet(this, _getFilePath, _getFilePath2).call(this, key, i), typeof contents === 'string' ? contents.slice(i * _constants.WRITE_LIMIT_CHUNK, (i + 1) * _constants.WRITE_LIMIT_CHUNK) : contents.subarray(i * _constants.WRITE_LIMIT_CHUNK, (i + 1) * _constants.WRITE_LIMIT_CHUNK), {
signal: options === null || options === void 0 ? void 0 : options.signal
}));
}
}
// If there's already a files following this chunk, it's old and should be removed
writePromises.push(_classPrivateMethodGet(this, _unlinkChunks, _unlinkChunks2).call(this, key, chunks));
await Promise.all(writePromises);
}
async get(key) {
try {
let data = await this.fs.readFile(this._getCachePath(key));
return (0, _core().deserialize)(data);
} catch (err) {
if (err.code === 'ENOENT') {
return null;
} else {
throw err;
}
}
}
async set(key, value) {
try {
let blobPath = this._getCachePath(key);
let data = (0, _core().serialize)(value);
await this.fs.writeFile(blobPath, data);
} catch (err) {
_logger().default.error(err, '@parcel/cache');
}
}
refresh() {
// NOOP
}
}
exports.FSCache = FSCache;
function _getFilePath2(key, index) {
return _path().default.join(this.dir, `${key}-${index}`);
}
async function _unlinkChunks2(key, index) {
try {
await this.fs.unlink(_classPrivateMethodGet(this, _getFilePath, _getFilePath2).call(this, key, index));
await _classPrivateMethodGet(this, _unlinkChunks, _unlinkChunks2).call(this, key, index + 1);
} catch (err) {
// If there's an error, no more chunks are left to delete
}
}
(0, _core().registerSerializableClass)(`${_package.default.version}:FSCache`, FSCache);

View File

@@ -0,0 +1,145 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.IDBCache = void 0;
function _stream() {
const data = require("stream");
_stream = function () {
return data;
};
return data;
}
function _core() {
const data = require("@parcel/core");
_core = function () {
return data;
};
return data;
}
function _utils() {
const data = require("@parcel/utils");
_utils = function () {
return data;
};
return data;
}
var _package = _interopRequireDefault(require("../package.json"));
function _idb() {
const data = require("idb");
_idb = function () {
return data;
};
return data;
}
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
// $FlowFixMe[untyped-import]
// $FlowFixMe[untyped-import]
const STORE_NAME = 'cache';
class IDBCache {
// $FlowFixMe
constructor() {
this.store = (0, _idb().openDB)('REPL-parcel-cache', 1, {
upgrade(db) {
db.createObjectStore(STORE_NAME);
},
blocked() {},
blocking() {},
terminated() {}
});
}
ensure() {
return Promise.resolve();
}
serialize() {
return {
/*::...null*/
};
}
static deserialize() {
return new IDBCache();
}
has(key) {
return Promise.resolve(this.store.get(key) != null);
}
async get(key) {
let data = await (await this.store).get(STORE_NAME, key);
if (data == null) {
return null;
}
return Promise.resolve((0, _core().deserialize)(data));
}
async set(key, value) {
await (await this.store).put(STORE_NAME, (0, _core().serialize)(value), key);
}
getStream(key) {
let dataPromise = this.store.then(s => s.get(STORE_NAME, key)).then(d => Buffer.from(d)).catch(e => e);
const stream = new (_stream().Readable)({
// $FlowFixMe(incompatible-call)
async read() {
let data = await dataPromise;
if (data instanceof Error) {
stream.emit('error', data);
} else {
stream.push(Buffer.from(data));
stream.push(null);
}
}
});
return stream;
}
async setStream(key, stream) {
let buf = await (0, _utils().bufferStream)(stream);
await (await this.store).put(STORE_NAME, buf, key);
}
async getBlob(key) {
let data = await (await this.store).get(STORE_NAME, key);
if (data == null) {
return Promise.reject(new Error(`Key ${key} not found in cache`));
}
return Buffer.from(data.buffer);
}
async setBlob(key, contents) {
let data = contents instanceof Uint8Array ? contents : Buffer.from(contents);
await (await this.store).put(STORE_NAME, data, key);
}
// async setBlobs(
// entries: $ReadOnlyArray<[string, Buffer | string]>,
// ): Promise<void> {
// const tx = (await this.store).transaction(STORE_NAME, 'readwrite');
// await Promise.all([
// ...entries.map(([key, value]) =>
// tx.store.put(
// value instanceof Uint8Array ? value : Buffer.from(value),
// key,
// ),
// ),
// tx.done,
// ]);
// }
async getBuffer(key) {
let data = await (await this.store).get(STORE_NAME, key);
if (data == null) {
return null;
}
return Buffer.from(data.buffer);
}
hasLargeBlob(key) {
return this.has(key);
}
getLargeBlob(key) {
return this.getBlob(key);
}
setLargeBlob(key, contents) {
return this.setBlob(key, contents);
}
refresh() {
// NOOP
}
}
exports.IDBCache = IDBCache;
(0, _core().registerSerializableClass)(`${_package.default.version}:IDBCache`, IDBCache);

View File

@@ -0,0 +1,13 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.IDBCache = void 0;
// $FlowFixMe
class IDBCache {
constructor() {
throw new Error('IDBCache is only supported in the browser');
}
}
exports.IDBCache = IDBCache;

View File

@@ -0,0 +1,137 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.LMDBCache = void 0;
function _stream() {
const data = _interopRequireDefault(require("stream"));
_stream = function () {
return data;
};
return data;
}
function _path() {
const data = _interopRequireDefault(require("path"));
_path = function () {
return data;
};
return data;
}
function _util() {
const data = require("util");
_util = function () {
return data;
};
return data;
}
function _core() {
const data = require("@parcel/core");
_core = function () {
return data;
};
return data;
}
function _fs() {
const data = require("@parcel/fs");
_fs = function () {
return data;
};
return data;
}
var _package = _interopRequireDefault(require("../package.json"));
function _lmdb() {
const data = _interopRequireDefault(require("lmdb"));
_lmdb = function () {
return data;
};
return data;
}
var _FSCache = require("./FSCache");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _classPrivateMethodInitSpec(obj, privateSet) { _checkPrivateRedeclaration(obj, privateSet); privateSet.add(obj); }
function _checkPrivateRedeclaration(obj, privateCollection) { if (privateCollection.has(obj)) { throw new TypeError("Cannot initialize the same private elements twice on an object"); } }
function _classPrivateMethodGet(receiver, privateSet, fn) { if (!privateSet.has(receiver)) { throw new TypeError("attempted to get private field on non-instance"); } return fn; } // flowlint-next-line untyped-import:off
// $FlowFixMe
const pipeline = (0, _util().promisify)(_stream().default.pipeline);
var _getFilePath = /*#__PURE__*/new WeakSet();
class LMDBCache {
// $FlowFixMe
constructor(cacheDir) {
_classPrivateMethodInitSpec(this, _getFilePath);
this.fs = new (_fs().NodeFS)();
this.dir = cacheDir;
this.fsCache = new _FSCache.FSCache(this.fs, cacheDir);
this.store = _lmdb().default.open(cacheDir, {
name: 'parcel-cache',
encoding: 'binary',
compression: true
});
}
ensure() {
return Promise.resolve();
}
serialize() {
return {
dir: this.dir
};
}
static deserialize(opts) {
return new LMDBCache(opts.dir);
}
has(key) {
return Promise.resolve(this.store.get(key) != null);
}
get(key) {
let data = this.store.get(key);
if (data == null) {
return Promise.resolve(null);
}
return Promise.resolve((0, _core().deserialize)(data));
}
async set(key, value) {
await this.setBlob(key, (0, _core().serialize)(value));
}
getStream(key) {
return this.fs.createReadStream(_path().default.join(this.dir, key));
}
setStream(key, stream) {
return pipeline(stream, this.fs.createWriteStream(_path().default.join(this.dir, key)));
}
getBlob(key) {
let buffer = this.store.get(key);
return buffer != null ? Promise.resolve(buffer) : Promise.reject(new Error(`Key ${key} not found in cache`));
}
async setBlob(key, contents) {
await this.store.put(key, contents);
}
getBuffer(key) {
return Promise.resolve(this.store.get(key));
}
hasLargeBlob(key) {
return this.fs.exists(_classPrivateMethodGet(this, _getFilePath, _getFilePath2).call(this, key, 0));
}
// eslint-disable-next-line require-await
async getLargeBlob(key) {
return this.fsCache.getLargeBlob(key);
}
// eslint-disable-next-line require-await
async setLargeBlob(key, contents, options) {
return this.fsCache.setLargeBlob(key, contents, options);
}
refresh() {
// Reset the read transaction for the store. This guarantees that
// the next read will see the latest changes to the store.
// Useful in scenarios where reads and writes are multi-threaded.
// See https://github.com/kriszyp/lmdb-js#resetreadtxn-void
this.store.resetReadTxn();
}
}
exports.LMDBCache = LMDBCache;
function _getFilePath2(key, index) {
return _path().default.join(this.dir, `${key}-${index}`);
}
(0, _core().registerSerializableClass)(`${_package.default.version}:LMDBCache`, LMDBCache);

View File

@@ -0,0 +1,8 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.WRITE_LIMIT_CHUNK = void 0;
// Node has a file size limit of 2 GB
const WRITE_LIMIT_CHUNK = exports.WRITE_LIMIT_CHUNK = 2 * 1024 ** 3;

View File

@@ -0,0 +1,38 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
var _LMDBCache = require("./LMDBCache");
Object.keys(_LMDBCache).forEach(function (key) {
if (key === "default" || key === "__esModule") return;
if (key in exports && exports[key] === _LMDBCache[key]) return;
Object.defineProperty(exports, key, {
enumerable: true,
get: function () {
return _LMDBCache[key];
}
});
});
var _FSCache = require("./FSCache");
Object.keys(_FSCache).forEach(function (key) {
if (key === "default" || key === "__esModule") return;
if (key in exports && exports[key] === _FSCache[key]) return;
Object.defineProperty(exports, key, {
enumerable: true,
get: function () {
return _FSCache[key];
}
});
});
var _IDBCache = require("./IDBCache");
Object.keys(_IDBCache).forEach(function (key) {
if (key === "default" || key === "__esModule") return;
if (key in exports && exports[key] === _IDBCache[key]) return;
Object.defineProperty(exports, key, {
enumerable: true,
get: function () {
return _IDBCache[key];
}
});
});

View File

@@ -0,0 +1,25 @@
import type { Readable } from "stream";
import type { AbortSignal } from "abortcontroller-polyfill/dist/cjs-ponyfill";
export interface Cache {
ensure(): Promise<void>;
has(key: string): Promise<boolean>;
get<T>(key: string): Promise<T | null | undefined>;
set(key: string, value: unknown): Promise<void>;
getStream(key: string): Readable;
setStream(key: string, stream: Readable): Promise<void>;
getBlob(key: string): Promise<Buffer>;
setBlob(key: string, contents: Buffer | string): Promise<void>;
hasLargeBlob(key: string): Promise<boolean>;
getLargeBlob(key: string): Promise<Buffer>;
setLargeBlob(key: string, contents: Buffer | string, options?: {
signal?: AbortSignal;
}): Promise<void>;
getBuffer(key: string): Promise<Buffer | null | undefined>;
/**
* In a multi-threaded environment, where there are potentially multiple Cache
* instances writing to the cache, ensure that this instance has the latest view
* of the changes that may have been written to the cache in other threads.
*/
refresh(): void;
}

View File

@@ -0,0 +1 @@
"use strict";