larry babby and threejs for glsl

This commit is contained in:
Sam
2024-06-24 21:24:00 +12:00
parent 87d5dc634d
commit 907ebae4c0
6474 changed files with 1279596 additions and 8 deletions

View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2017-present Devon Govett
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,80 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.BitSet = void 0;
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/clz32#implementing_count_leading_ones_and_beyond
function ctz32(n) {
if (n === 0) {
return 32;
}
return 31 - Math.clz32(n & -n);
}
class BitSet {
constructor(maxBits) {
this.bits = new Uint32Array(Math.ceil(maxBits / 32));
}
clone() {
let res = new BitSet(this.capacity);
res.bits.set(this.bits);
return res;
}
static union(a, b) {
let res = a.clone();
res.union(b);
return res;
}
get capacity() {
return this.bits.length * 32;
}
add(bit) {
this.bits[bit >>> 5] |= 1 << (bit & 31);
}
delete(bit) {
this.bits[bit >>> 5] &= ~(1 << (bit & 31));
}
has(bit) {
return Boolean(this.bits[bit >>> 5] & 1 << (bit & 31));
}
empty() {
for (let k = 0; k < this.bits.length; k++) {
if (this.bits[k] !== 0) {
return false;
}
}
return true;
}
clear() {
this.bits.fill(0);
}
intersect(other) {
for (let i = 0; i < this.bits.length; i++) {
this.bits[i] &= other.bits[i];
}
}
union(other) {
for (let i = 0; i < this.bits.length; i++) {
this.bits[i] |= other.bits[i];
}
}
remove(other) {
for (let i = 0; i < this.bits.length; i++) {
this.bits[i] &= ~other.bits[i];
}
}
forEach(fn) {
// https://lemire.me/blog/2018/02/21/iterating-over-set-bits-quickly/
let bits = this.bits;
for (let k = 0; k < bits.length; k++) {
let v = bits[k];
while (v !== 0) {
let t = (v & -v) >>> 0;
// $FlowFixMe
fn((k << 5) + ctz32(v));
v ^= t;
}
}
}
}
exports.BitSet = BitSet;

View File

@@ -0,0 +1,80 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
var _Graph = _interopRequireDefault(require("./Graph"));
function _nullthrows() {
const data = _interopRequireDefault(require("nullthrows"));
_nullthrows = function () {
return data;
};
return data;
}
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
class ContentGraph extends _Graph.default {
constructor(opts) {
if (opts) {
let {
_contentKeyToNodeId,
_nodeIdToContentKey,
...rest
} = opts;
super(rest);
this._contentKeyToNodeId = _contentKeyToNodeId;
this._nodeIdToContentKey = _nodeIdToContentKey;
} else {
super();
this._contentKeyToNodeId = new Map();
this._nodeIdToContentKey = new Map();
}
}
// $FlowFixMe[prop-missing]
static deserialize(opts) {
return new ContentGraph(opts);
}
// $FlowFixMe[prop-missing]
serialize() {
// $FlowFixMe[prop-missing]
return {
...super.serialize(),
_contentKeyToNodeId: this._contentKeyToNodeId,
_nodeIdToContentKey: this._nodeIdToContentKey
};
}
addNodeByContentKey(contentKey, node) {
if (this.hasContentKey(contentKey)) {
throw new Error('Graph already has content key ' + contentKey);
}
let nodeId = super.addNode(node);
this._contentKeyToNodeId.set(contentKey, nodeId);
this._nodeIdToContentKey.set(nodeId, contentKey);
return nodeId;
}
addNodeByContentKeyIfNeeded(contentKey, node) {
return this.hasContentKey(contentKey) ? this.getNodeIdByContentKey(contentKey) : this.addNodeByContentKey(contentKey, node);
}
getNodeByContentKey(contentKey) {
let nodeId = this._contentKeyToNodeId.get(contentKey);
if (nodeId != null) {
return super.getNode(nodeId);
}
}
getNodeIdByContentKey(contentKey) {
return (0, _nullthrows().default)(this._contentKeyToNodeId.get(contentKey), `Expected content key ${contentKey} to exist`);
}
hasContentKey(contentKey) {
return this._contentKeyToNodeId.has(contentKey);
}
removeNode(nodeId) {
this._assertHasNodeId(nodeId);
let contentKey = (0, _nullthrows().default)(this._nodeIdToContentKey.get(nodeId));
this._contentKeyToNodeId.delete(contentKey);
this._nodeIdToContentKey.delete(nodeId);
super.removeNode(nodeId);
}
}
exports.default = ContentGraph;

View File

@@ -0,0 +1,483 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = exports.ALL_EDGE_TYPES = void 0;
exports.mapVisitor = mapVisitor;
var _types = require("./types");
var _AdjacencyList = _interopRequireDefault(require("./AdjacencyList"));
var _BitSet = require("./BitSet");
function _nullthrows() {
const data = _interopRequireDefault(require("nullthrows"));
_nullthrows = function () {
return data;
};
return data;
}
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const ALL_EDGE_TYPES = exports.ALL_EDGE_TYPES = -1;
class Graph {
constructor(opts) {
this.nodes = (opts === null || opts === void 0 ? void 0 : opts.nodes) || [];
this.setRootNodeId(opts === null || opts === void 0 ? void 0 : opts.rootNodeId);
let adjacencyList = opts === null || opts === void 0 ? void 0 : opts.adjacencyList;
this.adjacencyList = adjacencyList ? _AdjacencyList.default.deserialize(adjacencyList) : new _AdjacencyList.default();
}
setRootNodeId(id) {
this.rootNodeId = id;
}
static deserialize(opts) {
return new this({
nodes: opts.nodes,
adjacencyList: opts.adjacencyList,
rootNodeId: opts.rootNodeId
});
}
serialize() {
return {
nodes: this.nodes,
adjacencyList: this.adjacencyList.serialize(),
rootNodeId: this.rootNodeId
};
}
// Returns an iterator of all edges in the graph. This can be large, so iterating
// the complete list can be costly in large graphs. Used when merging graphs.
getAllEdges() {
return this.adjacencyList.getAllEdges();
}
addNode(node) {
let id = this.adjacencyList.addNode();
this.nodes.push(node);
return id;
}
hasNode(id) {
return this.nodes[id] != null;
}
getNode(id) {
return this.nodes[id];
}
addEdge(from, to, type = 1) {
if (Number(type) === 0) {
throw new Error(`Edge type "${type}" not allowed`);
}
if (this.getNode(from) == null) {
throw new Error(`"from" node '${(0, _types.fromNodeId)(from)}' not found`);
}
if (this.getNode(to) == null) {
throw new Error(`"to" node '${(0, _types.fromNodeId)(to)}' not found`);
}
return this.adjacencyList.addEdge(from, to, type);
}
hasEdge(from, to, type = 1) {
return this.adjacencyList.hasEdge(from, to, type);
}
getNodeIdsConnectedTo(nodeId, type = 1) {
this._assertHasNodeId(nodeId);
return this.adjacencyList.getNodeIdsConnectedTo(nodeId, type);
}
getNodeIdsConnectedFrom(nodeId, type = 1) {
this._assertHasNodeId(nodeId);
return this.adjacencyList.getNodeIdsConnectedFrom(nodeId, type);
}
// Removes node and any edges coming from or to that node
removeNode(nodeId) {
if (!this.hasNode(nodeId)) {
return;
}
for (let {
type,
from
} of this.adjacencyList.getInboundEdgesByType(nodeId)) {
this._removeEdge(from, nodeId, type,
// Do not allow orphans to be removed as this node could be one
// and is already being removed.
false);
}
for (let {
type,
to
} of this.adjacencyList.getOutboundEdgesByType(nodeId)) {
this._removeEdge(nodeId, to, type);
}
this.nodes[nodeId] = null;
}
removeEdges(nodeId, type = 1) {
if (!this.hasNode(nodeId)) {
return;
}
for (let to of this.getNodeIdsConnectedFrom(nodeId, type)) {
this._removeEdge(nodeId, to, type);
}
}
removeEdge(from, to, type = 1, removeOrphans = true) {
if (!this.adjacencyList.hasEdge(from, to, type)) {
throw new Error(`Edge from ${(0, _types.fromNodeId)(from)} to ${(0, _types.fromNodeId)(to)} not found!`);
}
this._removeEdge(from, to, type, removeOrphans);
}
// Removes edge and node the edge is to if the node is orphaned
_removeEdge(from, to, type = 1, removeOrphans = true) {
if (!this.adjacencyList.hasEdge(from, to, type)) {
return;
}
this.adjacencyList.removeEdge(from, to, type);
if (removeOrphans && this.isOrphanedNode(to)) {
this.removeNode(to);
}
}
isOrphanedNode(nodeId) {
if (!this.hasNode(nodeId)) {
return false;
}
if (this.rootNodeId == null) {
// If the graph does not have a root, and there are inbound edges,
// this node should not be considered orphaned.
return !this.adjacencyList.hasInboundEdges(nodeId);
}
// Otherwise, attempt to traverse backwards to the root. If there is a path,
// then this is not an orphaned node.
let hasPathToRoot = false;
// go back to traverseAncestors
this.traverseAncestors(nodeId, (ancestorId, _, actions) => {
if (ancestorId === this.rootNodeId) {
hasPathToRoot = true;
actions.stop();
}
}, ALL_EDGE_TYPES);
if (hasPathToRoot) {
return false;
}
return true;
}
updateNode(nodeId, node) {
this._assertHasNodeId(nodeId);
this.nodes[nodeId] = node;
}
// Update a node's downstream nodes making sure to prune any orphaned branches
replaceNodeIdsConnectedTo(fromNodeId, toNodeIds, replaceFilter, type = 1) {
this._assertHasNodeId(fromNodeId);
let outboundEdges = this.getNodeIdsConnectedFrom(fromNodeId, type);
let childrenToRemove = new Set(replaceFilter ? outboundEdges.filter(toNodeId => replaceFilter(toNodeId)) : outboundEdges);
for (let toNodeId of toNodeIds) {
childrenToRemove.delete(toNodeId);
if (!this.hasEdge(fromNodeId, toNodeId, type)) {
this.addEdge(fromNodeId, toNodeId, type);
}
}
for (let child of childrenToRemove) {
this._removeEdge(fromNodeId, child, type);
}
}
traverse(visit, startNodeId, type = 1) {
let enter = typeof visit === 'function' ? visit : visit.enter;
if (type === ALL_EDGE_TYPES && enter && (typeof visit === 'function' || !visit.exit)) {
return this.dfsFast(enter, startNodeId);
} else {
return this.dfs({
visit,
startNodeId,
getChildren: nodeId => this.getNodeIdsConnectedFrom(nodeId, type)
});
}
}
filteredTraverse(filter, visit, startNodeId, type) {
return this.traverse(mapVisitor(filter, visit), startNodeId, type);
}
traverseAncestors(startNodeId, visit, type = 1) {
return this.dfs({
visit,
startNodeId,
getChildren: nodeId => this.getNodeIdsConnectedTo(nodeId, type)
});
}
dfsFast(visit, startNodeId) {
let traversalStartNode = (0, _nullthrows().default)(startNodeId !== null && startNodeId !== void 0 ? startNodeId : this.rootNodeId, 'A start node is required to traverse');
this._assertHasNodeId(traversalStartNode);
let visited;
if (!this._visited || this._visited.capacity < this.nodes.length) {
this._visited = new _BitSet.BitSet(this.nodes.length);
visited = this._visited;
} else {
visited = this._visited;
visited.clear();
}
// Take shared instance to avoid re-entrancy issues.
this._visited = null;
let stopped = false;
let skipped = false;
let actions = {
skipChildren() {
skipped = true;
},
stop() {
stopped = true;
}
};
let queue = [{
nodeId: traversalStartNode,
context: null
}];
while (queue.length !== 0) {
let {
nodeId,
context
} = queue.pop();
if (!this.hasNode(nodeId) || visited.has(nodeId)) continue;
visited.add(nodeId);
skipped = false;
let newContext = visit(nodeId, context, actions);
if (typeof newContext !== 'undefined') {
// $FlowFixMe[reassign-const]
context = newContext;
}
if (skipped) {
continue;
}
if (stopped) {
this._visited = visited;
return context;
}
this.adjacencyList.forEachNodeIdConnectedFromReverse(nodeId, child => {
if (!visited.has(child)) {
queue.push({
nodeId: child,
context
});
}
return false;
});
}
this._visited = visited;
return null;
}
// A post-order implementation of dfsFast
postOrderDfsFast(visit, startNodeId) {
let traversalStartNode = (0, _nullthrows().default)(startNodeId !== null && startNodeId !== void 0 ? startNodeId : this.rootNodeId, 'A start node is required to traverse');
this._assertHasNodeId(traversalStartNode);
let visited;
if (!this._visited || this._visited.capacity < this.nodes.length) {
this._visited = new _BitSet.BitSet(this.nodes.length);
visited = this._visited;
} else {
visited = this._visited;
visited.clear();
}
this._visited = null;
let stopped = false;
let actions = {
stop() {
stopped = true;
},
skipChildren() {
throw new Error('Calling skipChildren inside a post-order traversal is not allowed');
}
};
let queue = [traversalStartNode];
while (queue.length !== 0) {
let nodeId = queue[queue.length - 1];
if (!visited.has(nodeId)) {
visited.add(nodeId);
this.adjacencyList.forEachNodeIdConnectedFromReverse(nodeId, child => {
if (!visited.has(child)) {
queue.push(child);
}
return false;
});
} else {
queue.pop();
visit(nodeId, null, actions);
if (stopped) {
this._visited = visited;
return;
}
}
}
this._visited = visited;
}
dfs({
visit,
startNodeId,
getChildren
}) {
let traversalStartNode = (0, _nullthrows().default)(startNodeId !== null && startNodeId !== void 0 ? startNodeId : this.rootNodeId, 'A start node is required to traverse');
this._assertHasNodeId(traversalStartNode);
let visited;
if (!this._visited || this._visited.capacity < this.nodes.length) {
this._visited = new _BitSet.BitSet(this.nodes.length);
visited = this._visited;
} else {
visited = this._visited;
visited.clear();
}
// Take shared instance to avoid re-entrancy issues.
this._visited = null;
let stopped = false;
let skipped = false;
let actions = {
skipChildren() {
skipped = true;
},
stop() {
stopped = true;
}
};
let walk = (nodeId, context) => {
if (!this.hasNode(nodeId)) return;
visited.add(nodeId);
skipped = false;
let enter = typeof visit === 'function' ? visit : visit.enter;
if (enter) {
let newContext = enter(nodeId, context, actions);
if (typeof newContext !== 'undefined') {
// $FlowFixMe[reassign-const]
context = newContext;
}
}
if (skipped) {
return;
}
if (stopped) {
return context;
}
for (let child of getChildren(nodeId)) {
if (visited.has(child)) {
continue;
}
visited.add(child);
let result = walk(child, context);
if (stopped) {
return result;
}
}
if (typeof visit !== 'function' && visit.exit &&
// Make sure the graph still has the node: it may have been removed between enter and exit
this.hasNode(nodeId)) {
let newContext = visit.exit(nodeId, context, actions);
if (typeof newContext !== 'undefined') {
// $FlowFixMe[reassign-const]
context = newContext;
}
}
if (skipped) {
return;
}
if (stopped) {
return context;
}
};
let result = walk(traversalStartNode);
this._visited = visited;
return result;
}
bfs(visit) {
let rootNodeId = (0, _nullthrows().default)(this.rootNodeId, 'A root node is required to traverse');
let queue = [rootNodeId];
let visited = new Set([rootNodeId]);
while (queue.length > 0) {
let node = queue.shift();
let stop = visit(rootNodeId);
if (stop === true) {
return node;
}
for (let child of this.getNodeIdsConnectedFrom(node)) {
if (!visited.has(child)) {
visited.add(child);
queue.push(child);
}
}
}
return null;
}
topoSort(type) {
let sorted = [];
this.traverse({
exit: nodeId => {
sorted.push(nodeId);
}
}, null, type);
return sorted.reverse();
}
findAncestor(nodeId, fn) {
let res = null;
this.traverseAncestors(nodeId, (nodeId, ctx, traversal) => {
if (fn(nodeId)) {
res = nodeId;
traversal.stop();
}
});
return res;
}
findAncestors(nodeId, fn) {
let res = [];
this.traverseAncestors(nodeId, (nodeId, ctx, traversal) => {
if (fn(nodeId)) {
res.push(nodeId);
traversal.skipChildren();
}
});
return res;
}
findDescendant(nodeId, fn) {
let res = null;
this.traverse((nodeId, ctx, traversal) => {
if (fn(nodeId)) {
res = nodeId;
traversal.stop();
}
}, nodeId);
return res;
}
findDescendants(nodeId, fn) {
let res = [];
this.traverse((nodeId, ctx, traversal) => {
if (fn(nodeId)) {
res.push(nodeId);
traversal.skipChildren();
}
}, nodeId);
return res;
}
_assertHasNodeId(nodeId) {
if (!this.hasNode(nodeId)) {
throw new Error('Does not have node ' + (0, _types.fromNodeId)(nodeId));
}
}
}
exports.default = Graph;
function mapVisitor(filter, visit) {
function makeEnter(visit) {
return function (nodeId, context, actions) {
let value = filter(nodeId, actions);
if (value != null) {
return visit(value, context, actions);
}
};
}
if (typeof visit === 'function') {
return makeEnter(visit);
}
let mapped = {};
if (visit.enter != null) {
mapped.enter = makeEnter(visit.enter);
}
if (visit.exit != null) {
mapped.exit = function (nodeId, context, actions) {
let exit = visit.exit;
if (!exit) {
return;
}
let value = filter(nodeId, actions);
if (value != null) {
return exit(value, context, actions);
}
};
}
return mapped;
}

View File

@@ -0,0 +1,54 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "ALL_EDGE_TYPES", {
enumerable: true,
get: function () {
return _Graph.ALL_EDGE_TYPES;
}
});
Object.defineProperty(exports, "BitSet", {
enumerable: true,
get: function () {
return _BitSet.BitSet;
}
});
Object.defineProperty(exports, "ContentGraph", {
enumerable: true,
get: function () {
return _ContentGraph.default;
}
});
Object.defineProperty(exports, "Graph", {
enumerable: true,
get: function () {
return _Graph.default;
}
});
Object.defineProperty(exports, "fromNodeId", {
enumerable: true,
get: function () {
return _types.fromNodeId;
}
});
Object.defineProperty(exports, "mapVisitor", {
enumerable: true,
get: function () {
return _Graph.mapVisitor;
}
});
Object.defineProperty(exports, "toNodeId", {
enumerable: true,
get: function () {
return _types.toNodeId;
}
});
var _types = require("./types");
var _Graph = _interopRequireWildcard(require("./Graph"));
var _ContentGraph = _interopRequireDefault(require("./ContentGraph"));
var _BitSet = require("./BitSet");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _getRequireWildcardCache(e) { if ("function" != typeof WeakMap) return null; var r = new WeakMap(), t = new WeakMap(); return (_getRequireWildcardCache = function (e) { return e ? t : r; })(e); }
function _interopRequireWildcard(e, r) { if (!r && e && e.__esModule) return e; if (null === e || "object" != typeof e && "function" != typeof e) return { default: e }; var t = _getRequireWildcardCache(r); if (t && t.has(e)) return t.get(e); var n = { __proto__: null }, a = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var u in e) if ("default" !== u && Object.prototype.hasOwnProperty.call(e, u)) { var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; } return n.default = e, t && t.set(e, n), n; }

View File

@@ -0,0 +1,28 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.SharedBuffer = void 0;
// Copy from @parcel/utils to fix: https://github.com/stackblitz/core/issues/1855
let SharedBuffer = exports.SharedBuffer = void 0;
// $FlowFixMe[prop-missing]
if (process.browser) {
exports.SharedBuffer = SharedBuffer = ArrayBuffer;
// Safari has removed the constructor
if (typeof SharedArrayBuffer !== 'undefined') {
let channel = new MessageChannel();
try {
// Firefox might throw when sending the Buffer over a MessagePort
channel.port1.postMessage(new SharedArrayBuffer(0));
exports.SharedBuffer = SharedBuffer = SharedArrayBuffer;
} catch (_) {
// NOOP
}
channel.port1.close();
channel.port2.close();
}
} else {
exports.SharedBuffer = SharedBuffer = SharedArrayBuffer;
}

View File

@@ -0,0 +1,14 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.fromNodeId = fromNodeId;
exports.toNodeId = toNodeId;
// forcing NodeId to be opaque as it should only be created once
function toNodeId(x) {
return x;
}
function fromNodeId(x) {
return x;
}

View File

@@ -0,0 +1,26 @@
{
"name": "@parcel/graph",
"version": "3.2.0",
"description": "Blazing fast, zero configuration web application bundler",
"license": "MIT",
"publishConfig": {
"access": "public"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
},
"repository": {
"type": "git",
"url": "https://github.com/parcel-bundler/parcel.git"
},
"main": "lib/index.js",
"source": "src/index.js",
"engines": {
"node": ">= 12.0.0"
},
"dependencies": {
"nullthrows": "^1.1.1"
},
"gitHead": "2059029ee91e5f03a273b0954d3e629d7375f986"
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,98 @@
// @flow strict-local
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/clz32#implementing_count_leading_ones_and_beyond
function ctz32(n: number): number {
if (n === 0) {
return 32;
}
let reversed = n & -n;
return 31 - Math.clz32(reversed);
}
export class BitSet {
bits: Uint32Array;
constructor(maxBits: number) {
this.bits = new Uint32Array(Math.ceil(maxBits / 32));
}
clone(): BitSet {
let res = new BitSet(this.capacity);
res.bits.set(this.bits);
return res;
}
static union(a: BitSet, b: BitSet): BitSet {
let res = a.clone();
res.union(b);
return res;
}
get capacity(): number {
return this.bits.length * 32;
}
add(bit: number) {
let i = bit >>> 5;
let b = bit & 31;
this.bits[i] |= 1 << b;
}
delete(bit: number) {
let i = bit >>> 5;
let b = bit & 31;
this.bits[i] &= ~(1 << b);
}
has(bit: number): boolean {
let i = bit >>> 5;
let b = bit & 31;
return Boolean(this.bits[i] & (1 << b));
}
empty(): boolean {
for (let k = 0; k < this.bits.length; k++) {
if (this.bits[k] !== 0) {
return false;
}
}
return true;
}
clear() {
this.bits.fill(0);
}
intersect(other: BitSet) {
for (let i = 0; i < this.bits.length; i++) {
this.bits[i] &= other.bits[i];
}
}
union(other: BitSet) {
for (let i = 0; i < this.bits.length; i++) {
this.bits[i] |= other.bits[i];
}
}
remove(other: BitSet) {
for (let i = 0; i < this.bits.length; i++) {
this.bits[i] &= ~other.bits[i];
}
}
forEach(fn: (bit: number) => void) {
// https://lemire.me/blog/2018/02/21/iterating-over-set-bits-quickly/
let bits = this.bits;
for (let k = 0; k < bits.length; k++) {
let v = bits[k];
while (v !== 0) {
let t = (v & -v) >>> 0;
// $FlowFixMe
fn((k << 5) + ctz32(v));
v ^= t;
}
}
}
}

View File

@@ -0,0 +1,96 @@
// @flow strict-local
import type {ContentKey, NodeId} from './types';
import Graph, {type SerializedGraph, type GraphOpts} from './Graph';
import nullthrows from 'nullthrows';
export type ContentGraphOpts<TNode, TEdgeType: number = 1> = {|
...GraphOpts<TNode, TEdgeType>,
_contentKeyToNodeId: Map<ContentKey, NodeId>,
_nodeIdToContentKey: Map<NodeId, ContentKey>,
|};
export type SerializedContentGraph<TNode, TEdgeType: number = 1> = {|
...SerializedGraph<TNode, TEdgeType>,
_contentKeyToNodeId: Map<ContentKey, NodeId>,
|};
export default class ContentGraph<TNode, TEdgeType: number = 1> extends Graph<
TNode,
TEdgeType,
> {
_contentKeyToNodeId: Map<ContentKey, NodeId>;
_nodeIdToContentKey: Map<NodeId, ContentKey>;
constructor(opts: ?ContentGraphOpts<TNode, TEdgeType>) {
if (opts) {
let {_contentKeyToNodeId, _nodeIdToContentKey, ...rest} = opts;
super(rest);
this._contentKeyToNodeId = _contentKeyToNodeId;
this._nodeIdToContentKey = _nodeIdToContentKey;
} else {
super();
this._contentKeyToNodeId = new Map();
this._nodeIdToContentKey = new Map();
}
}
// $FlowFixMe[prop-missing]
static deserialize(
opts: ContentGraphOpts<TNode, TEdgeType>,
): ContentGraph<TNode, TEdgeType> {
return new ContentGraph(opts);
}
// $FlowFixMe[prop-missing]
serialize(): SerializedContentGraph<TNode, TEdgeType> {
// $FlowFixMe[prop-missing]
return {
...super.serialize(),
_contentKeyToNodeId: this._contentKeyToNodeId,
_nodeIdToContentKey: this._nodeIdToContentKey,
};
}
addNodeByContentKey(contentKey: ContentKey, node: TNode): NodeId {
if (this.hasContentKey(contentKey)) {
throw new Error('Graph already has content key ' + contentKey);
}
let nodeId = super.addNode(node);
this._contentKeyToNodeId.set(contentKey, nodeId);
this._nodeIdToContentKey.set(nodeId, contentKey);
return nodeId;
}
addNodeByContentKeyIfNeeded(contentKey: ContentKey, node: TNode): NodeId {
return this.hasContentKey(contentKey)
? this.getNodeIdByContentKey(contentKey)
: this.addNodeByContentKey(contentKey, node);
}
getNodeByContentKey(contentKey: ContentKey): ?TNode {
let nodeId = this._contentKeyToNodeId.get(contentKey);
if (nodeId != null) {
return super.getNode(nodeId);
}
}
getNodeIdByContentKey(contentKey: ContentKey): NodeId {
return nullthrows(
this._contentKeyToNodeId.get(contentKey),
`Expected content key ${contentKey} to exist`,
);
}
hasContentKey(contentKey: ContentKey): boolean {
return this._contentKeyToNodeId.has(contentKey);
}
removeNode(nodeId: NodeId): void {
this._assertHasNodeId(nodeId);
let contentKey = nullthrows(this._nodeIdToContentKey.get(nodeId));
this._contentKeyToNodeId.delete(contentKey);
this._nodeIdToContentKey.delete(nodeId);
super.removeNode(nodeId);
}
}

View File

@@ -0,0 +1,685 @@
// @flow strict-local
import {fromNodeId} from './types';
import AdjacencyList, {type SerializedAdjacencyList} from './AdjacencyList';
import type {Edge, NodeId} from './types';
import type {
TraversalActions,
GraphVisitor,
GraphTraversalCallback,
} from '@parcel/types';
import {BitSet} from './BitSet';
import nullthrows from 'nullthrows';
export type NullEdgeType = 1;
export type GraphOpts<TNode, TEdgeType: number = 1> = {|
nodes?: Array<TNode | null>,
adjacencyList?: SerializedAdjacencyList<TEdgeType>,
rootNodeId?: ?NodeId,
|};
export type SerializedGraph<TNode, TEdgeType: number = 1> = {|
nodes: Array<TNode | null>,
adjacencyList: SerializedAdjacencyList<TEdgeType>,
rootNodeId: ?NodeId,
|};
export type AllEdgeTypes = -1;
export const ALL_EDGE_TYPES: AllEdgeTypes = -1;
export default class Graph<TNode, TEdgeType: number = 1> {
nodes: Array<TNode | null>;
adjacencyList: AdjacencyList<TEdgeType>;
rootNodeId: ?NodeId;
_visited: ?BitSet;
constructor(opts: ?GraphOpts<TNode, TEdgeType>) {
this.nodes = opts?.nodes || [];
this.setRootNodeId(opts?.rootNodeId);
let adjacencyList = opts?.adjacencyList;
this.adjacencyList = adjacencyList
? AdjacencyList.deserialize(adjacencyList)
: new AdjacencyList<TEdgeType>();
}
setRootNodeId(id: ?NodeId) {
this.rootNodeId = id;
}
static deserialize(
opts: GraphOpts<TNode, TEdgeType>,
): Graph<TNode, TEdgeType> {
return new this({
nodes: opts.nodes,
adjacencyList: opts.adjacencyList,
rootNodeId: opts.rootNodeId,
});
}
serialize(): SerializedGraph<TNode, TEdgeType> {
return {
nodes: this.nodes,
adjacencyList: this.adjacencyList.serialize(),
rootNodeId: this.rootNodeId,
};
}
// Returns an iterator of all edges in the graph. This can be large, so iterating
// the complete list can be costly in large graphs. Used when merging graphs.
getAllEdges(): Iterator<Edge<TEdgeType | NullEdgeType>> {
return this.adjacencyList.getAllEdges();
}
addNode(node: TNode): NodeId {
let id = this.adjacencyList.addNode();
this.nodes.push(node);
return id;
}
hasNode(id: NodeId): boolean {
return this.nodes[id] != null;
}
getNode(id: NodeId): ?TNode {
return this.nodes[id];
}
addEdge(
from: NodeId,
to: NodeId,
type: TEdgeType | NullEdgeType = 1,
): boolean {
if (Number(type) === 0) {
throw new Error(`Edge type "${type}" not allowed`);
}
if (this.getNode(from) == null) {
throw new Error(`"from" node '${fromNodeId(from)}' not found`);
}
if (this.getNode(to) == null) {
throw new Error(`"to" node '${fromNodeId(to)}' not found`);
}
return this.adjacencyList.addEdge(from, to, type);
}
hasEdge(
from: NodeId,
to: NodeId,
type?: TEdgeType | NullEdgeType | Array<TEdgeType | NullEdgeType> = 1,
): boolean {
return this.adjacencyList.hasEdge(from, to, type);
}
getNodeIdsConnectedTo(
nodeId: NodeId,
type:
| TEdgeType
| NullEdgeType
| Array<TEdgeType | NullEdgeType>
| AllEdgeTypes = 1,
): Array<NodeId> {
this._assertHasNodeId(nodeId);
return this.adjacencyList.getNodeIdsConnectedTo(nodeId, type);
}
getNodeIdsConnectedFrom(
nodeId: NodeId,
type:
| TEdgeType
| NullEdgeType
| Array<TEdgeType | NullEdgeType>
| AllEdgeTypes = 1,
): Array<NodeId> {
this._assertHasNodeId(nodeId);
return this.adjacencyList.getNodeIdsConnectedFrom(nodeId, type);
}
// Removes node and any edges coming from or to that node
removeNode(nodeId: NodeId) {
if (!this.hasNode(nodeId)) {
return;
}
for (let {type, from} of this.adjacencyList.getInboundEdgesByType(nodeId)) {
this._removeEdge(
from,
nodeId,
type,
// Do not allow orphans to be removed as this node could be one
// and is already being removed.
false,
);
}
for (let {type, to} of this.adjacencyList.getOutboundEdgesByType(nodeId)) {
this._removeEdge(nodeId, to, type);
}
this.nodes[nodeId] = null;
}
removeEdges(nodeId: NodeId, type: TEdgeType | NullEdgeType = 1) {
if (!this.hasNode(nodeId)) {
return;
}
for (let to of this.getNodeIdsConnectedFrom(nodeId, type)) {
this._removeEdge(nodeId, to, type);
}
}
removeEdge(
from: NodeId,
to: NodeId,
type: TEdgeType | NullEdgeType = 1,
removeOrphans: boolean = true,
) {
if (!this.adjacencyList.hasEdge(from, to, type)) {
throw new Error(
`Edge from ${fromNodeId(from)} to ${fromNodeId(to)} not found!`,
);
}
this._removeEdge(from, to, type, removeOrphans);
}
// Removes edge and node the edge is to if the node is orphaned
_removeEdge(
from: NodeId,
to: NodeId,
type: TEdgeType | NullEdgeType = 1,
removeOrphans: boolean = true,
) {
if (!this.adjacencyList.hasEdge(from, to, type)) {
return;
}
this.adjacencyList.removeEdge(from, to, type);
if (removeOrphans && this.isOrphanedNode(to)) {
this.removeNode(to);
}
}
isOrphanedNode(nodeId: NodeId): boolean {
if (!this.hasNode(nodeId)) {
return false;
}
if (this.rootNodeId == null) {
// If the graph does not have a root, and there are inbound edges,
// this node should not be considered orphaned.
return !this.adjacencyList.hasInboundEdges(nodeId);
}
// Otherwise, attempt to traverse backwards to the root. If there is a path,
// then this is not an orphaned node.
let hasPathToRoot = false;
// go back to traverseAncestors
this.traverseAncestors(
nodeId,
(ancestorId, _, actions) => {
if (ancestorId === this.rootNodeId) {
hasPathToRoot = true;
actions.stop();
}
},
ALL_EDGE_TYPES,
);
if (hasPathToRoot) {
return false;
}
return true;
}
updateNode(nodeId: NodeId, node: TNode): void {
this._assertHasNodeId(nodeId);
this.nodes[nodeId] = node;
}
// Update a node's downstream nodes making sure to prune any orphaned branches
replaceNodeIdsConnectedTo(
fromNodeId: NodeId,
toNodeIds: $ReadOnlyArray<NodeId>,
replaceFilter?: null | (NodeId => boolean),
type?: TEdgeType | NullEdgeType = 1,
): void {
this._assertHasNodeId(fromNodeId);
let outboundEdges = this.getNodeIdsConnectedFrom(fromNodeId, type);
let childrenToRemove = new Set(
replaceFilter
? outboundEdges.filter(toNodeId => replaceFilter(toNodeId))
: outboundEdges,
);
for (let toNodeId of toNodeIds) {
childrenToRemove.delete(toNodeId);
if (!this.hasEdge(fromNodeId, toNodeId, type)) {
this.addEdge(fromNodeId, toNodeId, type);
}
}
for (let child of childrenToRemove) {
this._removeEdge(fromNodeId, child, type);
}
}
traverse<TContext>(
visit: GraphVisitor<NodeId, TContext>,
startNodeId: ?NodeId,
type:
| TEdgeType
| NullEdgeType
| Array<TEdgeType | NullEdgeType>
| AllEdgeTypes = 1,
): ?TContext {
let enter = typeof visit === 'function' ? visit : visit.enter;
if (
type === ALL_EDGE_TYPES &&
enter &&
(typeof visit === 'function' || !visit.exit)
) {
return this.dfsFast(enter, startNodeId);
} else {
return this.dfs({
visit,
startNodeId,
getChildren: nodeId => this.getNodeIdsConnectedFrom(nodeId, type),
});
}
}
filteredTraverse<TValue, TContext>(
filter: (NodeId, TraversalActions) => ?TValue,
visit: GraphVisitor<TValue, TContext>,
startNodeId: ?NodeId,
type?: TEdgeType | Array<TEdgeType | NullEdgeType> | AllEdgeTypes,
): ?TContext {
return this.traverse(mapVisitor(filter, visit), startNodeId, type);
}
traverseAncestors<TContext>(
startNodeId: ?NodeId,
visit: GraphVisitor<NodeId, TContext>,
type:
| TEdgeType
| NullEdgeType
| Array<TEdgeType | NullEdgeType>
| AllEdgeTypes = 1,
): ?TContext {
return this.dfs({
visit,
startNodeId,
getChildren: nodeId => this.getNodeIdsConnectedTo(nodeId, type),
});
}
dfsFast<TContext>(
visit: GraphTraversalCallback<NodeId, TContext>,
startNodeId: ?NodeId,
): ?TContext {
let traversalStartNode = nullthrows(
startNodeId ?? this.rootNodeId,
'A start node is required to traverse',
);
this._assertHasNodeId(traversalStartNode);
let visited;
if (!this._visited || this._visited.capacity < this.nodes.length) {
this._visited = new BitSet(this.nodes.length);
visited = this._visited;
} else {
visited = this._visited;
visited.clear();
}
// Take shared instance to avoid re-entrancy issues.
this._visited = null;
let stopped = false;
let skipped = false;
let actions: TraversalActions = {
skipChildren() {
skipped = true;
},
stop() {
stopped = true;
},
};
let queue = [{nodeId: traversalStartNode, context: null}];
while (queue.length !== 0) {
let {nodeId, context} = queue.pop();
if (!this.hasNode(nodeId) || visited.has(nodeId)) continue;
visited.add(nodeId);
skipped = false;
let newContext = visit(nodeId, context, actions);
if (typeof newContext !== 'undefined') {
// $FlowFixMe[reassign-const]
context = newContext;
}
if (skipped) {
continue;
}
if (stopped) {
this._visited = visited;
return context;
}
this.adjacencyList.forEachNodeIdConnectedFromReverse(nodeId, child => {
if (!visited.has(child)) {
queue.push({nodeId: child, context});
}
return false;
});
}
this._visited = visited;
return null;
}
// A post-order implementation of dfsFast
postOrderDfsFast(
visit: GraphTraversalCallback<NodeId, TraversalActions>,
startNodeId: ?NodeId,
): void {
let traversalStartNode = nullthrows(
startNodeId ?? this.rootNodeId,
'A start node is required to traverse',
);
this._assertHasNodeId(traversalStartNode);
let visited;
if (!this._visited || this._visited.capacity < this.nodes.length) {
this._visited = new BitSet(this.nodes.length);
visited = this._visited;
} else {
visited = this._visited;
visited.clear();
}
this._visited = null;
let stopped = false;
let actions: TraversalActions = {
stop() {
stopped = true;
},
skipChildren() {
throw new Error(
'Calling skipChildren inside a post-order traversal is not allowed',
);
},
};
let queue = [traversalStartNode];
while (queue.length !== 0) {
let nodeId = queue[queue.length - 1];
if (!visited.has(nodeId)) {
visited.add(nodeId);
this.adjacencyList.forEachNodeIdConnectedFromReverse(nodeId, child => {
if (!visited.has(child)) {
queue.push(child);
}
return false;
});
} else {
queue.pop();
visit(nodeId, null, actions);
if (stopped) {
this._visited = visited;
return;
}
}
}
this._visited = visited;
return;
}
dfs<TContext>({
visit,
startNodeId,
getChildren,
}: {|
visit: GraphVisitor<NodeId, TContext>,
getChildren(nodeId: NodeId): Array<NodeId>,
startNodeId?: ?NodeId,
|}): ?TContext {
let traversalStartNode = nullthrows(
startNodeId ?? this.rootNodeId,
'A start node is required to traverse',
);
this._assertHasNodeId(traversalStartNode);
let visited;
if (!this._visited || this._visited.capacity < this.nodes.length) {
this._visited = new BitSet(this.nodes.length);
visited = this._visited;
} else {
visited = this._visited;
visited.clear();
}
// Take shared instance to avoid re-entrancy issues.
this._visited = null;
let stopped = false;
let skipped = false;
let actions: TraversalActions = {
skipChildren() {
skipped = true;
},
stop() {
stopped = true;
},
};
let walk = (nodeId, context: ?TContext) => {
if (!this.hasNode(nodeId)) return;
visited.add(nodeId);
skipped = false;
let enter = typeof visit === 'function' ? visit : visit.enter;
if (enter) {
let newContext = enter(nodeId, context, actions);
if (typeof newContext !== 'undefined') {
// $FlowFixMe[reassign-const]
context = newContext;
}
}
if (skipped) {
return;
}
if (stopped) {
return context;
}
for (let child of getChildren(nodeId)) {
if (visited.has(child)) {
continue;
}
visited.add(child);
let result = walk(child, context);
if (stopped) {
return result;
}
}
if (
typeof visit !== 'function' &&
visit.exit &&
// Make sure the graph still has the node: it may have been removed between enter and exit
this.hasNode(nodeId)
) {
let newContext = visit.exit(nodeId, context, actions);
if (typeof newContext !== 'undefined') {
// $FlowFixMe[reassign-const]
context = newContext;
}
}
if (skipped) {
return;
}
if (stopped) {
return context;
}
};
let result = walk(traversalStartNode);
this._visited = visited;
return result;
}
bfs(visit: (nodeId: NodeId) => ?boolean): ?NodeId {
let rootNodeId = nullthrows(
this.rootNodeId,
'A root node is required to traverse',
);
let queue: Array<NodeId> = [rootNodeId];
let visited = new Set<NodeId>([rootNodeId]);
while (queue.length > 0) {
let node = queue.shift();
let stop = visit(rootNodeId);
if (stop === true) {
return node;
}
for (let child of this.getNodeIdsConnectedFrom(node)) {
if (!visited.has(child)) {
visited.add(child);
queue.push(child);
}
}
}
return null;
}
topoSort(type?: TEdgeType): Array<NodeId> {
let sorted: Array<NodeId> = [];
this.traverse(
{
exit: nodeId => {
sorted.push(nodeId);
},
},
null,
type,
);
return sorted.reverse();
}
findAncestor(nodeId: NodeId, fn: (nodeId: NodeId) => boolean): ?NodeId {
let res = null;
this.traverseAncestors(nodeId, (nodeId, ctx, traversal) => {
if (fn(nodeId)) {
res = nodeId;
traversal.stop();
}
});
return res;
}
findAncestors(
nodeId: NodeId,
fn: (nodeId: NodeId) => boolean,
): Array<NodeId> {
let res = [];
this.traverseAncestors(nodeId, (nodeId, ctx, traversal) => {
if (fn(nodeId)) {
res.push(nodeId);
traversal.skipChildren();
}
});
return res;
}
findDescendant(nodeId: NodeId, fn: (nodeId: NodeId) => boolean): ?NodeId {
let res = null;
this.traverse((nodeId, ctx, traversal) => {
if (fn(nodeId)) {
res = nodeId;
traversal.stop();
}
}, nodeId);
return res;
}
findDescendants(
nodeId: NodeId,
fn: (nodeId: NodeId) => boolean,
): Array<NodeId> {
let res = [];
this.traverse((nodeId, ctx, traversal) => {
if (fn(nodeId)) {
res.push(nodeId);
traversal.skipChildren();
}
}, nodeId);
return res;
}
_assertHasNodeId(nodeId: NodeId) {
if (!this.hasNode(nodeId)) {
throw new Error('Does not have node ' + fromNodeId(nodeId));
}
}
}
export function mapVisitor<NodeId, TValue, TContext>(
filter: (NodeId, TraversalActions) => ?TValue,
visit: GraphVisitor<TValue, TContext>,
): GraphVisitor<NodeId, TContext> {
function makeEnter(visit) {
return function mappedEnter(nodeId, context, actions) {
let value = filter(nodeId, actions);
if (value != null) {
return visit(value, context, actions);
}
};
}
if (typeof visit === 'function') {
return makeEnter(visit);
}
let mapped = {};
if (visit.enter != null) {
mapped.enter = makeEnter(visit.enter);
}
if (visit.exit != null) {
mapped.exit = function mappedExit(nodeId, context, actions) {
let exit = visit.exit;
if (!exit) {
return;
}
let value = filter(nodeId, actions);
if (value != null) {
return exit(value, context, actions);
}
};
}
return mapped;
}

View File

@@ -0,0 +1,9 @@
// @flow strict-local
export type {NodeId, ContentKey, Edge} from './types';
export type {GraphOpts} from './Graph';
export type {ContentGraphOpts, SerializedContentGraph} from './ContentGraph';
export {toNodeId, fromNodeId} from './types';
export {default as Graph, ALL_EDGE_TYPES, mapVisitor} from './Graph';
export {default as ContentGraph} from './ContentGraph';
export {BitSet} from './BitSet';

View File

@@ -0,0 +1,23 @@
// @flow
// Copy from @parcel/utils to fix: https://github.com/stackblitz/core/issues/1855
export let SharedBuffer: Class<ArrayBuffer> | Class<SharedArrayBuffer>;
// $FlowFixMe[prop-missing]
if (process.browser) {
SharedBuffer = ArrayBuffer;
// Safari has removed the constructor
if (typeof SharedArrayBuffer !== 'undefined') {
let channel = new MessageChannel();
try {
// Firefox might throw when sending the Buffer over a MessagePort
channel.port1.postMessage(new SharedArrayBuffer(0));
SharedBuffer = SharedArrayBuffer;
} catch (_) {
// NOOP
}
channel.port1.close();
channel.port2.close();
}
} else {
SharedBuffer = SharedArrayBuffer;
}

View File

@@ -0,0 +1,18 @@
// @flow strict-local
// forcing NodeId to be opaque as it should only be created once
export type NodeId = number;
export function toNodeId(x: number): NodeId {
return x;
}
export function fromNodeId(x: NodeId): number {
return x;
}
export type ContentKey = string;
export type Edge<TEdgeType: number> = {|
from: NodeId,
to: NodeId,
type: TEdgeType,
|};

View File

@@ -0,0 +1,321 @@
// @flow strict-local
import assert from 'assert';
import path from 'path';
import {Worker} from 'worker_threads';
import AdjacencyList, {NodeTypeMap, EdgeTypeMap} from '../src/AdjacencyList';
import {toNodeId} from '../src/types';
describe('AdjacencyList', () => {
it('constructor should initialize an empty graph', () => {
let stats = new AdjacencyList().stats;
assert(stats.nodes === 0);
assert(stats.edges === 0);
});
it('addNode should add a node to the graph', () => {
let graph = new AdjacencyList();
let id = graph.addNode();
assert.equal(id, 0);
assert.equal(graph.stats.nodes, 1);
let id2 = graph.addNode();
assert.equal(id2, 1);
assert.equal(graph.stats.nodes, 2);
});
it('addNode should resize nodes array', () => {
let graph = new AdjacencyList();
let size = graph.serialize().nodes.byteLength;
graph.addNode();
graph.addNode();
graph.addNode();
graph.addNode();
assert(size < graph.serialize().nodes.byteLength);
});
it('removeEdge should remove an edge from the graph', () => {
let graph = new AdjacencyList();
let node0 = graph.addNode();
let node1 = graph.addNode();
let node2 = graph.addNode();
let node3 = graph.addNode();
let node4 = graph.addNode();
let node5 = graph.addNode();
let node6 = graph.addNode();
graph.addEdge(node0, node1);
graph.addEdge(node2, node1);
// this will get removed
graph.addEdge(node3, node1);
graph.addEdge(node4, node1);
graph.addEdge(node5, node1);
graph.addEdge(node6, node1);
assert.deepEqual(graph.getNodeIdsConnectedTo(node1), [0, 2, 3, 4, 5, 6]);
graph.removeEdge(node3, node1);
assert.deepEqual(graph.getNodeIdsConnectedTo(node1), [0, 2, 4, 5, 6]);
});
it('getNodeIdsConnectedTo and getNodeIdsConnectedFrom should remove duplicate values', () => {
let graph = new AdjacencyList();
let a = graph.addNode();
let b = graph.addNode();
let c = graph.addNode();
graph.addEdge(a, b);
graph.addEdge(a, c);
graph.addEdge(a, b, 2);
assert.deepEqual(graph.getNodeIdsConnectedFrom(a, -1), [b, c]);
assert.deepEqual(graph.getNodeIdsConnectedTo(b, -1), [a]);
});
it('removeEdge should remove an edge of a specific type from the graph', () => {
let graph = new AdjacencyList();
let a = graph.addNode();
let b = graph.addNode();
let c = graph.addNode();
let d = graph.addNode();
graph.addEdge(a, b);
graph.addEdge(a, b, 2);
graph.addEdge(a, b, 3);
graph.addEdge(a, c);
graph.addEdge(a, d, 3);
assert.equal(graph.stats.edges, 5);
assert.ok(graph.hasEdge(a, b));
assert.ok(graph.hasEdge(a, b, 2));
assert.ok(graph.hasEdge(a, b, 3));
assert.ok(graph.hasEdge(a, c));
assert.ok(graph.hasEdge(a, d, 3));
assert.deepEqual(Array.from(graph.getAllEdges()), [
{from: a, to: b, type: 1},
{from: a, to: b, type: 2},
{from: a, to: b, type: 3},
{from: a, to: c, type: 1},
{from: a, to: d, type: 3},
]);
graph.removeEdge(a, b, 2);
assert.equal(graph.stats.edges, 4);
assert.ok(graph.hasEdge(a, b));
assert.equal(graph.hasEdge(a, b, 2), false);
assert.ok(graph.hasEdge(a, b, 3));
assert.ok(graph.hasEdge(a, c));
assert.ok(graph.hasEdge(a, d, 3));
assert.deepEqual(Array.from(graph.getAllEdges()), [
{from: a, to: b, type: 1},
{from: a, to: b, type: 3},
{from: a, to: c, type: 1},
{from: a, to: d, type: 3},
]);
});
it('addEdge should add an edge to the graph', () => {
let graph = new AdjacencyList();
let a = graph.addNode();
let b = graph.addNode();
graph.addEdge(a, b);
assert.equal(graph.stats.nodes, 2);
assert.equal(graph.stats.edges, 1);
assert.ok(graph.hasEdge(a, b));
});
it('addEdge should add multiple edges from a node in order', () => {
let graph = new AdjacencyList();
let a = graph.addNode();
let b = graph.addNode();
let c = graph.addNode();
let d = graph.addNode();
graph.addEdge(a, b);
graph.addEdge(a, d);
graph.addEdge(a, c);
assert.deepEqual(graph.getNodeIdsConnectedFrom(a), [b, d, c]);
});
it('addEdge should add multiple edges to a node in order', () => {
let graph = new AdjacencyList();
let a = graph.addNode();
let b = graph.addNode();
let c = graph.addNode();
let d = graph.addNode();
graph.addEdge(a, b);
graph.addEdge(d, b);
graph.addEdge(a, d);
graph.addEdge(c, b);
assert.deepEqual(graph.getNodeIdsConnectedTo(b), [a, d, c]);
});
it('addEdge should add multiple edges of different types in order', () => {
let graph = new AdjacencyList();
let a = graph.addNode();
let b = graph.addNode();
graph.addEdge(a, b);
graph.addEdge(a, b, 1);
graph.addEdge(a, b, 4);
graph.addEdge(a, b, 3);
assert.deepEqual(graph.getNodeIdsConnectedFrom(a), [b]);
assert.deepEqual(Array.from(graph.getAllEdges()), [
{from: a, to: b, type: 1},
{from: a, to: b, type: 4},
{from: a, to: b, type: 3},
]);
});
it('addEdge should return false if an edge is already added', () => {
let graph = new AdjacencyList();
let a = graph.addNode();
let b = graph.addNode();
assert.equal(graph.addEdge(a, b), true);
assert.equal(graph.addEdge(a, b), false);
});
it('addEdge should resize nodes array when necessary', () => {
let graph = new AdjacencyList();
let a = graph.addNode();
let b = graph.addNode();
let size = graph.serialize().nodes.byteLength;
graph.addEdge(a, b, 1);
graph.addEdge(a, b, 2);
graph.addEdge(a, b, 3);
graph.addEdge(a, b, 4);
assert(size < graph.serialize().nodes.byteLength);
});
it('addEdge should resize edges array when necessary', () => {
let graph = new AdjacencyList();
let size = graph.serialize().edges.byteLength;
let a = graph.addNode();
let b = graph.addNode();
graph.addEdge(a, b, 1);
graph.addEdge(a, b, 2);
graph.addEdge(a, b, 3);
assert(size < graph.serialize().edges.byteLength);
});
it('addEdge should error when a node has not been added to the graph', () => {
let graph = new AdjacencyList();
assert.throws(() => graph.addEdge(toNodeId(0), toNodeId(1)));
graph.addNode();
assert.throws(() => graph.addEdge(toNodeId(0), toNodeId(1)));
graph.addNode();
assert.doesNotThrow(() => graph.addEdge(toNodeId(0), toNodeId(1)));
assert.throws(() => graph.addEdge(toNodeId(0), toNodeId(2)));
});
it('addEdge should error when an unsupported edge type is provided', () => {
let graph = new AdjacencyList();
let a = graph.addNode();
let b = graph.addNode();
assert.throws(() => graph.addEdge(a, b, 0));
assert.throws(() => graph.addEdge(a, b, -1));
assert.doesNotThrow(() => graph.addEdge(a, b, 1));
});
it('addEdge should not replace a deleted edge if the edge was already added', () => {
// Mock hash fn to generate collisions
// $FlowFixMe[prop-missing]
let originalHash = AdjacencyList.prototype.hash;
// $FlowFixMe[prop-missing]
AdjacencyList.prototype.hash = () => 1;
let graph = new AdjacencyList();
let n0 = graph.addNode();
let n1 = graph.addNode();
let n2 = graph.addNode();
graph.addEdge(n0, n1, 1);
graph.addEdge(n1, n2, 1);
graph.removeEdge(n1, n2, 1);
assert(graph.addEdge(n0, n1, 1) === false);
assert(graph.stats.edges === 1);
// $FlowFixMe[prop-missing]
AdjacencyList.prototype.hash = originalHash;
});
it('addEdge should replace a deleted edge', () => {
// Mock hash fn to generate collisions
// $FlowFixMe[prop-missing]
let originalHash = AdjacencyList.prototype.hash;
// $FlowFixMe[prop-missing]
AdjacencyList.prototype.hash = () => 1;
try {
let graph = new AdjacencyList({initialCapacity: 3});
let n0 = graph.addNode();
let n1 = graph.addNode();
graph.addEdge(n0, n1, 2);
graph.removeEdge(n0, n1, 2);
assert(graph.addEdge(n0, n1, 2));
assert(graph.stats.edges === 1);
assert(graph.stats.deleted === 1);
// Resize to reclaim deleted edge space.
graph.resizeEdges(2);
assert(graph.stats.edges === 1);
assert(graph.stats.deleted === 0);
} finally {
// $FlowFixMe[prop-missing]
AdjacencyList.prototype.hash = originalHash;
}
});
it('hasEdge should accept an array of edge types', () => {
let graph = new AdjacencyList();
let a = graph.addNode();
let b = graph.addNode();
let c = graph.addNode();
graph.addEdge(a, b, 1);
graph.addEdge(b, c, 2);
assert.ok(!graph.hasEdge(a, b, [2, 3]));
assert.ok(graph.hasEdge(a, b, [1, 2]));
assert.ok(!graph.hasEdge(b, c, [1, 3]));
assert.ok(graph.hasEdge(b, c, [2, 3]));
});
describe('deserialize', function () {
this.timeout(10000);
it('should share the underlying data across worker threads', async () => {
let graph = new AdjacencyList();
let n0 = graph.addNode();
let n1 = graph.addNode();
graph.addEdge(n0, n1, 1);
graph.addEdge(n0, n1, 2);
let worker = new Worker(
path.join(__dirname, 'integration/adjacency-list-shared-array.js'),
);
let originalSerialized = graph.serialize();
let originalNodes = [...originalSerialized.nodes];
let originalEdges = [...originalSerialized.edges];
let work = new Promise(resolve => worker.on('message', resolve));
worker.postMessage(originalSerialized);
let received = AdjacencyList.deserialize(await work);
await worker.terminate();
assert.deepEqual(received.serialize().nodes, graph.serialize().nodes);
assert.deepEqual(received.serialize().edges, graph.serialize().edges);
originalNodes.forEach((v, i) => {
if (i < NodeTypeMap.HEADER_SIZE) {
assert.equal(v, received.serialize().nodes[i]);
assert.equal(v, graph.serialize().nodes[i]);
} else {
assert.equal(v * 2, received.serialize().nodes[i]);
assert.equal(v * 2, graph.serialize().nodes[i]);
}
});
originalEdges.forEach((v, i) => {
if (i < EdgeTypeMap.HEADER_SIZE) {
assert.equal(v, received.serialize().edges[i]);
assert.equal(v, graph.serialize().edges[i]);
} else {
assert.equal(v * 2, received.serialize().edges[i]);
assert.equal(v * 2, graph.serialize().edges[i]);
}
});
});
});
});

View File

@@ -0,0 +1,110 @@
// @flow strict-local
import assert from 'assert';
import {BitSet} from '../src/BitSet';
function assertValues(set: BitSet, values: Array<number>) {
let setValues = [];
set.forEach(bit => {
setValues.push(bit);
});
for (let value of values) {
assert(set.has(value), 'Set.has returned false');
assert(
setValues.some(v => v === value),
'Set values is missing value',
);
}
assert(
setValues.length === values.length,
`Expected ${values.length} values but got ${setValues.length}`,
);
}
describe('BitSet', () => {
it('clone should return a set with the same values', () => {
let set1 = new BitSet(5);
set1.add(1);
set1.add(3);
let set2 = set1.clone();
assertValues(set2, [1, 3]);
});
it('clear should remove all values from the set', () => {
let set1 = new BitSet(5);
set1.add(1);
set1.add(3);
set1.clear();
assertValues(set1, []);
});
it('delete should remove values from the set', () => {
let set1 = new BitSet(5);
set1.add(1);
set1.add(3);
set1.add(5);
set1.delete(3);
assertValues(set1, [1, 5]);
});
it('empty should check if there are no values set', () => {
let set1 = new BitSet(5);
assert(set1.empty());
set1.add(3);
assert(!set1.empty());
set1.delete(3);
assert(set1.empty());
});
it('should intersect with another BitSet', () => {
let set1 = new BitSet(5);
set1.add(1);
set1.add(3);
let set2 = new BitSet(5);
set2.add(3);
set2.add(5);
set1.intersect(set2);
assertValues(set1, [3]);
});
it('should union with another BitSet', () => {
let set1 = new BitSet(5);
set1.add(1);
set1.add(3);
let set2 = new BitSet(5);
set2.add(3);
set2.add(5);
set1.union(set2);
assertValues(set1, [1, 3, 5]);
});
it('BitSet.union should create a new BitSet with the union', () => {
let set1 = new BitSet(5);
set1.add(1);
set1.add(3);
let set2 = new BitSet(5);
set2.add(3);
set2.add(5);
let set3 = BitSet.union(set1, set2);
assertValues(set1, [1, 3]);
assertValues(set2, [3, 5]);
assertValues(set3, [1, 3, 5]);
});
});

View File

@@ -0,0 +1,42 @@
// @flow strict-local
import assert from 'assert';
import ContentGraph from '../src/ContentGraph';
describe('ContentGraph', () => {
it('should addNodeByContentKey if no node exists with the content key', () => {
let graph = new ContentGraph();
const node = {};
const nodeId1 = graph.addNodeByContentKey('contentKey', node);
assert.deepEqual(graph.getNode(nodeId1), node);
assert(graph.hasContentKey('contentKey'));
assert.deepEqual(graph.getNodeByContentKey('contentKey'), node);
});
it('should throw if a node with the content key already exists', () => {
let graph = new ContentGraph();
graph.addNodeByContentKey('contentKey', {});
assert.throws(() => {
graph.addNodeByContentKey('contentKey', {});
}, /already has content key/);
});
it('should remove the content key from graph when node is removed', () => {
let graph = new ContentGraph();
const node1 = {};
const nodeId1 = graph.addNodeByContentKey('contentKey', node1);
assert.equal(graph.getNode(nodeId1), node1);
assert(graph.hasContentKey('contentKey'));
graph.removeNode(nodeId1);
assert(!graph.hasContentKey('contentKey'));
});
});

View File

@@ -0,0 +1,343 @@
// @flow strict-local
import assert from 'assert';
import sinon from 'sinon';
import Graph from '../src/Graph';
import {toNodeId} from '../src/types';
describe('Graph', () => {
it('constructor should initialize an empty graph', () => {
let graph = new Graph();
assert.deepEqual(graph.nodes, []);
assert.deepEqual([...graph.getAllEdges()], []);
});
it('addNode should add a node to the graph', () => {
let graph = new Graph();
let node = {};
let id = graph.addNode(node);
assert.equal(graph.getNode(id), node);
});
it('errors when traversing a graph with no root', () => {
let graph = new Graph();
assert.throws(() => {
graph.traverse(() => {});
}, /A start node is required to traverse/);
});
it("errors when traversing a graph with a startNode that doesn't belong", () => {
let graph = new Graph();
assert.throws(() => {
graph.traverse(() => {}, toNodeId(-1));
}, /Does not have node/);
});
it("errors if replaceNodeIdsConnectedTo is called with a node that doesn't belong", () => {
let graph = new Graph();
assert.throws(() => {
graph.replaceNodeIdsConnectedTo(toNodeId(-1), []);
}, /Does not have node/);
});
it("errors when adding an edge to a node that doesn't exist", () => {
let graph = new Graph();
let node = graph.addNode({});
assert.throws(() => {
graph.addEdge(node, toNodeId(-1));
}, /"to" node '-1' not found/);
});
it("errors when adding an edge from a node that doesn't exist", () => {
let graph = new Graph();
let node = graph.addNode({});
assert.throws(() => {
graph.addEdge(toNodeId(-1), node);
}, /"from" node '-1' not found/);
});
it('hasNode should return a boolean based on whether the node exists in the graph', () => {
let graph = new Graph();
let node = graph.addNode({});
assert(graph.hasNode(node));
assert(!graph.hasNode(toNodeId(-1)));
});
it('addEdge should add an edge to the graph', () => {
let graph = new Graph();
let nodeA = graph.addNode('a');
let nodeB = graph.addNode('b');
graph.addEdge(nodeA, nodeB);
assert(graph.hasEdge(nodeA, nodeB));
});
it('isOrphanedNode should return true or false if the node is orphaned or not', () => {
let graph = new Graph();
let rootNode = graph.addNode('root');
graph.setRootNodeId(rootNode);
let nodeA = graph.addNode('a');
let nodeB = graph.addNode('b');
let nodeC = graph.addNode('c');
graph.addEdge(rootNode, nodeB);
graph.addEdge(nodeB, nodeC, 1);
assert(graph.isOrphanedNode(nodeA));
assert(!graph.isOrphanedNode(nodeB));
assert(!graph.isOrphanedNode(nodeC));
});
it("removeEdge should throw if the edge doesn't exist", () => {
let graph = new Graph();
let nodeA = graph.addNode('a');
let nodeB = graph.addNode('b');
assert.throws(() => {
graph.removeEdge(nodeA, nodeB);
}, /Edge from 0 to 1 not found!/);
});
it('removeEdge should prune the graph at that edge', () => {
// a
// / \
// b - d
// /
// c
let graph = new Graph();
let nodeA = graph.addNode('a');
graph.setRootNodeId(nodeA);
let nodeB = graph.addNode('b');
let nodeC = graph.addNode('c');
let nodeD = graph.addNode('d');
graph.addEdge(nodeA, nodeB);
graph.addEdge(nodeA, nodeD);
graph.addEdge(nodeB, nodeC);
graph.addEdge(nodeB, nodeD);
graph.removeEdge(nodeA, nodeB);
assert(graph.hasNode(nodeA));
assert(graph.hasNode(nodeD));
assert(!graph.hasNode(nodeB));
assert(!graph.hasNode(nodeC));
assert.deepEqual(
[...graph.getAllEdges()],
[{from: nodeA, to: nodeD, type: 1}],
);
});
it('removing a node recursively deletes orphaned nodes', () => {
// before:
// a
// / \
// b c
// / \ \
// d e f
// /
// g
//
// after:
// a
// \
// c
// \
// f
let graph = new Graph();
let nodeA = graph.addNode('a');
graph.setRootNodeId(nodeA);
let nodeB = graph.addNode('b');
let nodeC = graph.addNode('c');
let nodeD = graph.addNode('d');
let nodeE = graph.addNode('e');
let nodeF = graph.addNode('f');
let nodeG = graph.addNode('g');
graph.addEdge(nodeA, nodeB);
graph.addEdge(nodeA, nodeC);
graph.addEdge(nodeB, nodeD);
graph.addEdge(nodeB, nodeE);
graph.addEdge(nodeC, nodeF);
graph.addEdge(nodeD, nodeG);
graph.removeNode(nodeB);
assert.deepEqual(graph.nodes.filter(Boolean), ['a', 'c', 'f']);
assert.deepEqual(Array.from(graph.getAllEdges()), [
{from: nodeA, to: nodeC, type: 1},
{from: nodeC, to: nodeF, type: 1},
]);
});
it('removing a node recursively deletes orphaned nodes if there is no path to the root', () => {
// before:
// a
// / \
// b c
// / \ \
// |-d e f
// |/
// g
//
// after:
// a
// \
// c
// \
// f
let graph = new Graph();
let nodeA = graph.addNode('a');
let nodeB = graph.addNode('b');
let nodeC = graph.addNode('c');
let nodeD = graph.addNode('d');
let nodeE = graph.addNode('e');
let nodeF = graph.addNode('f');
let nodeG = graph.addNode('g');
graph.setRootNodeId(nodeA);
graph.addEdge(nodeA, nodeB);
graph.addEdge(nodeA, nodeC);
graph.addEdge(nodeB, nodeD);
graph.addEdge(nodeG, nodeD);
graph.addEdge(nodeB, nodeE);
graph.addEdge(nodeC, nodeF);
graph.addEdge(nodeD, nodeG);
graph.removeNode(nodeB);
assert.deepEqual(graph.nodes.filter(Boolean), ['a', 'c', 'f']);
assert.deepEqual(Array.from(graph.getAllEdges()), [
{from: nodeA, to: nodeC, type: 1},
{from: nodeC, to: nodeF, type: 1},
]);
});
it('removing an edge to a node that cycles does not remove it if there is a path to the root', () => {
// a
// |
// b <----
// / \ |
// c d |
// \ / |
// e -----
let graph = new Graph();
let nodeA = graph.addNode('a');
let nodeB = graph.addNode('b');
let nodeC = graph.addNode('c');
let nodeD = graph.addNode('d');
let nodeE = graph.addNode('e');
graph.setRootNodeId(nodeA);
graph.addEdge(nodeA, nodeB);
graph.addEdge(nodeB, nodeC);
graph.addEdge(nodeB, nodeD);
graph.addEdge(nodeC, nodeE);
graph.addEdge(nodeD, nodeE);
graph.addEdge(nodeE, nodeB);
const getNodeIds = () => [...graph.nodes.keys()];
let nodesBefore = getNodeIds();
graph.removeEdge(nodeC, nodeE);
assert.deepEqual(nodesBefore, getNodeIds());
assert.deepEqual(Array.from(graph.getAllEdges()), [
{from: nodeA, to: nodeB, type: 1},
{from: nodeB, to: nodeC, type: 1},
{from: nodeB, to: nodeD, type: 1},
{from: nodeD, to: nodeE, type: 1},
{from: nodeE, to: nodeB, type: 1},
]);
});
it('removing a node with only one inbound edge does not cause it to be removed as an orphan', () => {
let graph = new Graph();
let nodeA = graph.addNode('a');
let nodeB = graph.addNode('b');
graph.setRootNodeId(nodeA);
graph.addEdge(nodeA, nodeB);
let spy = sinon.spy(graph, 'removeNode');
try {
graph.removeNode(nodeB);
assert(spy.calledOnceWithExactly(nodeB));
} finally {
spy.restore();
}
});
it("replaceNodeIdsConnectedTo should update a node's downstream nodes", () => {
let graph = new Graph();
let nodeA = graph.addNode('a');
graph.setRootNodeId(nodeA);
let nodeB = graph.addNode('b');
let nodeC = graph.addNode('c');
graph.addEdge(nodeA, nodeB);
graph.addEdge(nodeA, nodeC);
let nodeD = graph.addNode('d');
graph.replaceNodeIdsConnectedTo(nodeA, [nodeB, nodeD]);
assert(graph.hasNode(nodeA));
assert(graph.hasNode(nodeB));
assert(!graph.hasNode(nodeC));
assert(graph.hasNode(nodeD));
assert.deepEqual(Array.from(graph.getAllEdges()), [
{from: nodeA, to: nodeB, type: 1},
{from: nodeA, to: nodeD, type: 1},
]);
});
it('traverses along edge types if a filter is given', () => {
let graph = new Graph();
let nodeA = graph.addNode('a');
let nodeB = graph.addNode('b');
let nodeC = graph.addNode('c');
let nodeD = graph.addNode('d');
graph.addEdge(nodeA, nodeB, 2);
graph.addEdge(nodeA, nodeD);
graph.addEdge(nodeB, nodeC);
graph.addEdge(nodeB, nodeD, 2);
graph.setRootNodeId(nodeA);
let visited = [];
graph.traverse(
nodeId => {
visited.push(nodeId);
},
null, // use root as startNode
2,
);
assert.deepEqual(visited, [nodeA, nodeB, nodeD]);
});
it('correctly removes non-tree subgraphs', () => {
let graph = new Graph();
let nodeRoot = graph.addNode('root');
let node1 = graph.addNode('1');
let node2 = graph.addNode('2');
let node3 = graph.addNode('3');
graph.addEdge(nodeRoot, node1);
graph.addEdge(node1, node2);
graph.addEdge(node1, node3);
graph.addEdge(node2, node3);
graph.setRootNodeId(nodeRoot);
graph.removeNode(node1);
assert.deepEqual(graph.nodes.filter(Boolean), ['root']);
assert.deepStrictEqual(Array.from(graph.getAllEdges()), []);
});
});

View File

@@ -0,0 +1,20 @@
require('@parcel/babel-register');
const {parentPort} = require('worker_threads');
const {
default: AdjacencyList,
NodeTypeMap,
EdgeTypeMap,
} = require('../../src/AdjacencyList');
parentPort.once('message', (serialized) => {
let graph = AdjacencyList.deserialize(serialized);
serialized.nodes.forEach((v, i) => {
if (i < NodeTypeMap.HEADER_SIZE) return;
serialized.nodes[i] = v * 2;
});
serialized.edges.forEach((v, i) => {
if (i < EdgeTypeMap.HEADER_SIZE) return;
serialized.edges[i] = v * 2;
});
parentPort.postMessage(graph.serialize());
});