fix: 120 type errors
All checks were successful
Deploy to GitHub Pages / build_site (push) Successful in 2m47s
All checks were successful
Deploy to GitHub Pages / build_site (push) Successful in 2m47s
This commit is contained in:
@@ -52,7 +52,7 @@
|
||||
|
||||
if (event.key === "Enter") {
|
||||
if (activeNodeId && position) {
|
||||
graph.createNode({ type: activeNodeId, position });
|
||||
graph.createNode({ type: activeNodeId, position, props: {} });
|
||||
position = null;
|
||||
}
|
||||
return;
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
<script lang="ts">
|
||||
import type { Hst } from "@histoire/plugin-svelte";
|
||||
export let Hst: Hst;
|
||||
import Background from "./Background.svelte";
|
||||
import { Canvas } from "@threlte/core";
|
||||
import Camera from "../Camera.svelte";
|
||||
let width = globalThis.innerWidth || 100;
|
||||
let height = globalThis.innerHeight || 100;
|
||||
|
||||
let cameraPosition: [number, number, number] = [0, 1, 0];
|
||||
</script>
|
||||
|
||||
<svelte:window bind:innerWidth={width} bind:innerHeight={height} />
|
||||
|
||||
<Hst.Story>
|
||||
<Canvas shadows={false}>
|
||||
<Camera bind:position={cameraPosition} />
|
||||
|
||||
<Background {cameraPosition} {width} {height} />
|
||||
</Canvas>
|
||||
</Hst.Story>
|
||||
@@ -11,7 +11,7 @@ import { fastHashString } from "@nodes/utils";
|
||||
import { SvelteMap } from "svelte/reactivity";
|
||||
import EventEmitter from "./helpers/EventEmitter";
|
||||
import { createLogger } from "./helpers/index";
|
||||
import throttle from "./helpers/throttle";
|
||||
import throttle from "$lib/helpers/throttle";
|
||||
import { HistoryManager } from "./history-manager";
|
||||
|
||||
const logger = createLogger("graph-manager");
|
||||
@@ -24,7 +24,7 @@ const clone =
|
||||
|
||||
function areSocketsCompatible(
|
||||
output: string | undefined,
|
||||
inputs: string | string[] | undefined,
|
||||
inputs: string | (string | undefined)[] | undefined,
|
||||
) {
|
||||
if (Array.isArray(inputs) && output) {
|
||||
return inputs.includes(output);
|
||||
@@ -99,7 +99,6 @@ export class GraphManager extends EventEmitter<{
|
||||
|
||||
private lastSettingsHash = 0;
|
||||
setSettings(settings: Record<string, unknown>) {
|
||||
console.log("GraphManager.setSettings", settings);
|
||||
let hash = fastHashString(JSON.stringify(settings));
|
||||
if (hash === this.lastSettingsHash) return;
|
||||
this.lastSettingsHash = hash;
|
||||
@@ -154,7 +153,7 @@ export class GraphManager extends EventEmitter<{
|
||||
|
||||
private _init(graph: Graph) {
|
||||
const nodes = new Map(
|
||||
graph.nodes.map((node) => {
|
||||
graph.nodes.map((node: Node) => {
|
||||
const nodeType = this.registry.getNode(node.type);
|
||||
if (nodeType) {
|
||||
node.tmp = {
|
||||
|
||||
@@ -23,8 +23,6 @@
|
||||
invalidate();
|
||||
});
|
||||
|
||||
$effect(() => console.log({ nodes }));
|
||||
|
||||
const graphState = getGraphState();
|
||||
|
||||
const isNodeInView = getContext<(n: NodeType) => boolean>("isNodeInView");
|
||||
@@ -50,8 +48,6 @@
|
||||
}),
|
||||
);
|
||||
|
||||
const nodeArray = $derived(Array.from(nodes.values()));
|
||||
|
||||
onMount(() => {
|
||||
for (const node of nodes.values()) {
|
||||
if (node?.tmp?.ref) {
|
||||
@@ -86,9 +82,9 @@
|
||||
style:transform={`scale(${cameraPosition[2] * 0.1})`}
|
||||
class:hovering-sockets={graphState.activeSocket}
|
||||
>
|
||||
{#each nodeArray as node, i (node.id)}
|
||||
{#each nodes.values() as node (node.id)}
|
||||
<Node
|
||||
bind:node={nodeArray[i]}
|
||||
{node}
|
||||
inView={cameraPosition && isNodeInView(node)}
|
||||
z={cameraPosition[2]}
|
||||
/>
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
import throttle from './throttle.js';
|
||||
import throttle from "$lib/helpers/throttle";
|
||||
|
||||
type EventMap = Record<string, unknown>;
|
||||
type EventKey<T extends EventMap> = string & keyof T;
|
||||
type EventReceiver<T> = (params: T, stuff?: Record<string, unknown>) => unknown;
|
||||
|
||||
|
||||
export default class EventEmitter<T extends EventMap = { [key: string]: unknown }> {
|
||||
export default class EventEmitter<
|
||||
T extends EventMap = { [key: string]: unknown },
|
||||
> {
|
||||
index = 0;
|
||||
public eventMap: T = {} as T;
|
||||
constructor() {
|
||||
}
|
||||
constructor() {}
|
||||
|
||||
private cbs: { [key: string]: ((data?: unknown) => unknown)[] } = {};
|
||||
private cbsOnce: { [key: string]: ((data?: unknown) => unknown)[] } = {};
|
||||
@@ -29,7 +29,11 @@ export default class EventEmitter<T extends EventMap = { [key: string]: unknown
|
||||
}
|
||||
}
|
||||
|
||||
public on<K extends EventKey<T>>(event: K, cb: EventReceiver<T[K]>, throttleTimer = 0) {
|
||||
public on<K extends EventKey<T>>(
|
||||
event: K,
|
||||
cb: EventReceiver<T[K]>,
|
||||
throttleTimer = 0,
|
||||
) {
|
||||
if (throttleTimer > 0) cb = throttle(cb, throttleTimer);
|
||||
const cbs = Object.assign(this.cbs, {
|
||||
[event]: [...(this.cbs[event] || []), cb],
|
||||
@@ -38,7 +42,7 @@ export default class EventEmitter<T extends EventMap = { [key: string]: unknown
|
||||
|
||||
// console.log('New EventEmitter ', this.constructor.name);
|
||||
return () => {
|
||||
cbs[event]?.splice(cbs[event].indexOf(cb), 1);
|
||||
this.cbs[event]?.splice(cbs[event].indexOf(cb), 1);
|
||||
};
|
||||
}
|
||||
|
||||
@@ -48,10 +52,17 @@ export default class EventEmitter<T extends EventMap = { [key: string]: unknown
|
||||
* @param {function} cb Listener, gets called everytime the event is emitted
|
||||
* @returns {function} Returns a function which removes the listener when called
|
||||
*/
|
||||
public once<K extends EventKey<T>>(event: K, cb: EventReceiver<T[K]>): () => void {
|
||||
this.cbsOnce[event] = [...(this.cbsOnce[event] || []), cb];
|
||||
public once<K extends EventKey<T>>(
|
||||
event: K,
|
||||
cb: EventReceiver<T[K]>,
|
||||
): () => void {
|
||||
const cbsOnce = Object.assign(this.cbsOnce, {
|
||||
[event]: [...(this.cbsOnce[event] || []), cb],
|
||||
});
|
||||
this.cbsOnce = cbsOnce;
|
||||
|
||||
return () => {
|
||||
this.cbsOnce[event].splice(this.cbsOnce[event].indexOf(cb), 1);
|
||||
cbsOnce[event]?.splice(cbsOnce[event].indexOf(cb), 1);
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
export default <R, A extends any[]>(
|
||||
fn: (...args: A) => R,
|
||||
delay: number
|
||||
): ((...args: A) => R) => {
|
||||
let wait = false;
|
||||
|
||||
return (...args: A) => {
|
||||
if (wait) return undefined;
|
||||
|
||||
const val = fn(...args);
|
||||
|
||||
wait = true;
|
||||
|
||||
setTimeout(() => {
|
||||
wait = false;
|
||||
}, delay);
|
||||
|
||||
return val;
|
||||
}
|
||||
};
|
||||
@@ -2,23 +2,22 @@ import { create, type Delta } from "jsondiffpatch";
|
||||
import type { Graph } from "@nodes/types";
|
||||
import { createLogger, clone } from "./helpers/index.js";
|
||||
|
||||
|
||||
const diff = create({
|
||||
objectHash: function (obj, index) {
|
||||
if (obj === null) return obj;
|
||||
if ("id" in obj) return obj.id;
|
||||
if ("id" in obj) return obj.id as string;
|
||||
if ("_id" in obj) return obj._id as string;
|
||||
if (Array.isArray(obj)) {
|
||||
return obj.join("-")
|
||||
return obj.join("-");
|
||||
}
|
||||
return obj?.id || obj._id || '$$index:' + index;
|
||||
}
|
||||
})
|
||||
return "$$index:" + index;
|
||||
},
|
||||
});
|
||||
|
||||
const log = createLogger("history")
|
||||
const log = createLogger("history");
|
||||
log.mute();
|
||||
|
||||
export class HistoryManager {
|
||||
|
||||
index: number = -1;
|
||||
history: Delta[] = [];
|
||||
private initialState: Graph | undefined;
|
||||
@@ -27,26 +26,25 @@ export class HistoryManager {
|
||||
private opts = {
|
||||
debounce: 400,
|
||||
maxHistory: 100,
|
||||
}
|
||||
};
|
||||
|
||||
constructor({ maxHistory = 100, debounce = 100 } = {}) {
|
||||
this.history = [];
|
||||
this.index = -1;
|
||||
this.opts.debounce = debounce;
|
||||
this.opts.maxHistory = maxHistory;
|
||||
globalThis["_history"] = this;
|
||||
}
|
||||
|
||||
save(state: Graph) {
|
||||
if (!this.state) {
|
||||
this.state = clone(state);
|
||||
this.initialState = this.state;
|
||||
log.log("initial state saved")
|
||||
log.log("initial state saved");
|
||||
} else {
|
||||
const newState = state;
|
||||
const delta = diff.diff(this.state, newState);
|
||||
if (delta) {
|
||||
log.log("saving state")
|
||||
log.log("saving state");
|
||||
// Add the delta to history
|
||||
if (this.index < this.history.length - 1) {
|
||||
// Clear the history after the current index if new changes are made
|
||||
@@ -62,7 +60,7 @@ export class HistoryManager {
|
||||
}
|
||||
this.state = newState;
|
||||
} else {
|
||||
log.log("no changes")
|
||||
log.log("no changes");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -76,7 +74,7 @@ export class HistoryManager {
|
||||
|
||||
undo() {
|
||||
if (this.index === -1 && this.initialState) {
|
||||
log.log("reached start, loading initial state")
|
||||
log.log("reached start, loading initial state");
|
||||
return clone(this.initialState);
|
||||
} else {
|
||||
const delta = this.history[this.index];
|
||||
@@ -96,7 +94,7 @@ export class HistoryManager {
|
||||
this.state = nextState;
|
||||
return clone(nextState);
|
||||
} else {
|
||||
log.log("reached end")
|
||||
log.log("reached end");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -41,6 +41,7 @@
|
||||
const height = getNodeHeight?.(node.type);
|
||||
|
||||
$effect(() => {
|
||||
if (!node?.tmp) node.tmp = {};
|
||||
node.tmp.mesh = meshRef;
|
||||
});
|
||||
|
||||
|
||||
@@ -27,9 +27,7 @@
|
||||
const zOffset = (node.tmp?.random || 0) * 0.5;
|
||||
const zLimit = 2 - zOffset;
|
||||
|
||||
const type = node?.tmp?.type;
|
||||
|
||||
const parameters = Object.entries(type?.inputs || {}).filter(
|
||||
const parameters = Object.entries(node?.tmp?.type?.inputs || {}).filter(
|
||||
(p) =>
|
||||
p[1].type !== "seed" && !("setting" in p[1]) && p[1]?.hidden !== true,
|
||||
);
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
};
|
||||
|
||||
const {
|
||||
node,
|
||||
node = $bindable(),
|
||||
input,
|
||||
id,
|
||||
elementId = `input-${Math.random().toString(36).substring(7)}`,
|
||||
|
||||
@@ -82,7 +82,7 @@
|
||||
class:disabled={!graphState?.possibleSocketIds.has(socketId)}
|
||||
>
|
||||
{#key id && graphId}
|
||||
<div class="content" class:disabled={graph.inputSockets?.has(socketId)}>
|
||||
<div class="content" class:disabled={graph?.inputSockets?.has(socketId)}>
|
||||
{#if inputType.label !== ""}
|
||||
<label for={elementId}>{input.label || id}</label>
|
||||
{/if}
|
||||
|
||||
@@ -7,8 +7,6 @@
|
||||
|
||||
const { children } = $props();
|
||||
|
||||
console.log("RowChildren", children);
|
||||
|
||||
let registerIndex = 0;
|
||||
setContext("registerCell", function () {
|
||||
let index = registerIndex;
|
||||
|
||||
@@ -1,20 +1,19 @@
|
||||
export default <R, A extends any[]>(
|
||||
fn: (...args: A) => R,
|
||||
delay: number
|
||||
): ((...args: A) => R) => {
|
||||
let wait = false;
|
||||
export default <T extends unknown[]>(
|
||||
callback: (...args: T) => void,
|
||||
delay: number,
|
||||
) => {
|
||||
let isWaiting = false;
|
||||
|
||||
return (...args: A) => {
|
||||
if (wait) return undefined;
|
||||
return (...args: T) => {
|
||||
if (isWaiting) {
|
||||
return;
|
||||
}
|
||||
|
||||
const val = fn(...args);
|
||||
|
||||
wait = true;
|
||||
callback(...args);
|
||||
isWaiting = true;
|
||||
|
||||
setTimeout(() => {
|
||||
wait = false;
|
||||
isWaiting = false;
|
||||
}, delay);
|
||||
|
||||
return val;
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { createWasmWrapper } from "@nodes/utils"
|
||||
import fs from "fs/promises"
|
||||
import path from "path"
|
||||
import { createWasmWrapper } from "@nodes/utils";
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
|
||||
export async function getWasm(id: `${string}/${string}/${string}`) {
|
||||
const filePath = path.resolve(`../nodes/${id}/pkg/index_bg.wasm`);
|
||||
@@ -8,17 +8,15 @@ export async function getWasm(id: `${string}/${string}/${string}`) {
|
||||
try {
|
||||
await fs.access(filePath);
|
||||
} catch (e) {
|
||||
return null
|
||||
return null;
|
||||
}
|
||||
|
||||
const file = await fs.readFile(filePath);
|
||||
|
||||
return new Uint8Array(file);
|
||||
|
||||
}
|
||||
|
||||
export async function getNodeWasm(id: `${string}/${string}/${string}`) {
|
||||
|
||||
const wasmBytes = await getWasm(id);
|
||||
if (!wasmBytes) return null;
|
||||
|
||||
@@ -27,9 +25,7 @@ export async function getNodeWasm(id: `${string}/${string}/${string}`) {
|
||||
return wrapper;
|
||||
}
|
||||
|
||||
|
||||
export async function getNode(id: `${string}/${string}/${string}`) {
|
||||
|
||||
const wrapper = await getNodeWasm(id);
|
||||
|
||||
const definition = wrapper?.get_definition?.();
|
||||
@@ -37,18 +33,17 @@ export async function getNode(id: `${string}/${string}/${string}`) {
|
||||
if (!definition) return null;
|
||||
|
||||
return definition;
|
||||
|
||||
}
|
||||
|
||||
export async function getCollectionNodes(userId: `${string}/${string}`) {
|
||||
const nodes = await fs.readdir(path.resolve(`../nodes/${userId}`));
|
||||
return nodes
|
||||
.filter(n => n !== "pkg" && n !== ".template")
|
||||
.map(n => {
|
||||
.filter((n) => n !== "pkg" && n !== ".template")
|
||||
.map((n) => {
|
||||
return {
|
||||
id: `${userId}/${n}`,
|
||||
}
|
||||
})
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
export async function getCollection(userId: `${string}/${string}`) {
|
||||
@@ -56,36 +51,40 @@ export async function getCollection(userId: `${string}/${string}`) {
|
||||
return {
|
||||
id: userId,
|
||||
nodes,
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export async function getUserCollections(userId: string) {
|
||||
const collections = await fs.readdir(path.resolve(`../nodes/${userId}`));
|
||||
return Promise.all(collections.map(async n => {
|
||||
const nodes = await getCollectionNodes(`${userId}/${n}`);
|
||||
return {
|
||||
id: `${userId}/${n}`,
|
||||
nodes,
|
||||
}
|
||||
}));
|
||||
return Promise.all(
|
||||
collections.map(async (n) => {
|
||||
const nodes = await getCollectionNodes(`${userId}/${n}`);
|
||||
return {
|
||||
id: `${userId}/${n}`,
|
||||
nodes,
|
||||
};
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
export async function getUser(userId: string) {
|
||||
const collections = await getUserCollections(userId);
|
||||
return {
|
||||
id: userId,
|
||||
collections
|
||||
}
|
||||
collections,
|
||||
};
|
||||
}
|
||||
|
||||
export async function getUsers() {
|
||||
const nodes = await fs.readdir(path.resolve("../nodes"));
|
||||
const users = await Promise.all(nodes.map(async n => {
|
||||
const collections = await getUserCollections(n);
|
||||
return {
|
||||
id: n,
|
||||
collections
|
||||
}
|
||||
}))
|
||||
const users = await Promise.all(
|
||||
nodes.map(async (n) => {
|
||||
const collections = await getUserCollections(n);
|
||||
return {
|
||||
id: n,
|
||||
collections,
|
||||
};
|
||||
}),
|
||||
);
|
||||
return users;
|
||||
}
|
||||
|
||||
@@ -27,10 +27,12 @@
|
||||
function constructPath() {
|
||||
max = max !== undefined ? max : Math.max(...points);
|
||||
min = min !== undefined ? min : Math.min(...points);
|
||||
const mi = min as number;
|
||||
const ma = max as number;
|
||||
return points
|
||||
.map((point, i) => {
|
||||
const x = (i / (points.length - 1)) * 100;
|
||||
const y = 100 - ((point - min) / (max - min)) * 100;
|
||||
const y = 100 - ((point - mi) / (ma - mi)) * 100;
|
||||
return `${x},${y}`;
|
||||
})
|
||||
.join(" ");
|
||||
|
||||
@@ -42,11 +42,9 @@
|
||||
export const invalidate = function () {
|
||||
if (scene) {
|
||||
geometries = scene.children
|
||||
.filter(
|
||||
(child) => "geometry" in child && child.isObject3D && child.geometry,
|
||||
)
|
||||
.filter((child) => "geometry" in child && child.isObject3D)
|
||||
.map((child) => {
|
||||
return child.geometry;
|
||||
return (child as Mesh).geometry;
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -1,20 +1,27 @@
|
||||
import { fastHashArrayBuffer } from "@nodes/utils";
|
||||
import { BufferAttribute, BufferGeometry, Float32BufferAttribute, Group, InstancedMesh, Material, Matrix4, Mesh } from "three";
|
||||
import {
|
||||
BufferAttribute,
|
||||
BufferGeometry,
|
||||
Float32BufferAttribute,
|
||||
Group,
|
||||
InstancedMesh,
|
||||
Material,
|
||||
Matrix4,
|
||||
Mesh,
|
||||
} from "three";
|
||||
|
||||
function fastArrayHash(arr: ArrayBuffer) {
|
||||
let ints = new Uint8Array(arr);
|
||||
function fastArrayHash(arr: Int32Array) {
|
||||
const sampleDistance = Math.max(Math.floor(arr.length / 100), 1);
|
||||
const sampleCount = Math.floor(arr.length / sampleDistance);
|
||||
|
||||
const sampleDistance = Math.max(Math.floor(ints.length / 100), 1);
|
||||
const sampleCount = Math.floor(ints.length / sampleDistance);
|
||||
|
||||
let hash = new Uint8Array(sampleCount);
|
||||
let hash = new Int32Array(sampleCount);
|
||||
|
||||
for (let i = 0; i < sampleCount; i++) {
|
||||
const index = i * sampleDistance;
|
||||
hash[i] = ints[index];
|
||||
hash[i] = arr[index];
|
||||
}
|
||||
|
||||
return fastHashArrayBuffer(hash.buffer);
|
||||
return fastHashArrayBuffer(hash);
|
||||
}
|
||||
|
||||
export function createGeometryPool(parentScene: Group, material: Material) {
|
||||
@@ -26,8 +33,10 @@ export function createGeometryPool(parentScene: Group, material: Material) {
|
||||
let totalVertices = 0;
|
||||
let totalFaces = 0;
|
||||
|
||||
function updateSingleGeometry(data: Int32Array, existingMesh: Mesh | null = null) {
|
||||
|
||||
function updateSingleGeometry(
|
||||
data: Int32Array,
|
||||
existingMesh: Mesh | null = null,
|
||||
) {
|
||||
let hash = fastArrayHash(data);
|
||||
|
||||
let geometry = existingMesh ? existingMesh.geometry : new BufferGeometry();
|
||||
@@ -50,11 +59,7 @@ export function createGeometryPool(parentScene: Group, material: Material) {
|
||||
index = indicesEnd;
|
||||
|
||||
// Vertices
|
||||
const vertices = new Float32Array(
|
||||
data.buffer,
|
||||
index * 4,
|
||||
vertexCount * 3,
|
||||
);
|
||||
const vertices = new Float32Array(data.buffer, index * 4, vertexCount * 3);
|
||||
index = index + vertexCount * 3;
|
||||
|
||||
let posAttribute = geometry.getAttribute(
|
||||
@@ -71,11 +76,7 @@ export function createGeometryPool(parentScene: Group, material: Material) {
|
||||
);
|
||||
}
|
||||
|
||||
const normals = new Float32Array(
|
||||
data.buffer,
|
||||
index * 4,
|
||||
vertexCount * 3,
|
||||
);
|
||||
const normals = new Float32Array(data.buffer, index * 4, vertexCount * 3);
|
||||
index = index + vertexCount * 3;
|
||||
|
||||
if (
|
||||
@@ -109,11 +110,8 @@ export function createGeometryPool(parentScene: Group, material: Material) {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return {
|
||||
update(
|
||||
newData: Int32Array[],
|
||||
) {
|
||||
update(newData: Int32Array[]) {
|
||||
totalVertices = 0;
|
||||
totalFaces = 0;
|
||||
for (let i = 0; i < Math.max(newData.length, meshes.length); i++) {
|
||||
@@ -127,11 +125,14 @@ export function createGeometryPool(parentScene: Group, material: Material) {
|
||||
}
|
||||
}
|
||||
return { totalVertices, totalFaces };
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export function createInstancedGeometryPool(parentScene: Group, material: Material) {
|
||||
export function createInstancedGeometryPool(
|
||||
parentScene: Group,
|
||||
material: Material,
|
||||
) {
|
||||
const scene = new Group();
|
||||
parentScene.add(scene);
|
||||
|
||||
@@ -139,19 +140,25 @@ export function createInstancedGeometryPool(parentScene: Group, material: Materi
|
||||
let totalVertices = 0;
|
||||
let totalFaces = 0;
|
||||
|
||||
function updateSingleInstance(data: Int32Array, existingInstance: InstancedMesh | null = null) {
|
||||
|
||||
function updateSingleInstance(
|
||||
data: Int32Array,
|
||||
existingInstance: InstancedMesh | null = null,
|
||||
) {
|
||||
let hash = fastArrayHash(data);
|
||||
|
||||
let geometry = existingInstance ? existingInstance.geometry : new BufferGeometry();
|
||||
let geometry = existingInstance
|
||||
? existingInstance.geometry
|
||||
: new BufferGeometry();
|
||||
|
||||
// Extract data from the encoded array
|
||||
let index = 0;
|
||||
const geometryType = data[index++];
|
||||
// const geometryType = data[index++];
|
||||
index++;
|
||||
const vertexCount = data[index++];
|
||||
const faceCount = data[index++];
|
||||
const instanceCount = data[index++];
|
||||
const stemDepth = data[index++];
|
||||
// const stemDepth = data[index++];
|
||||
index++;
|
||||
totalVertices += vertexCount * instanceCount;
|
||||
totalFaces += faceCount * instanceCount;
|
||||
|
||||
@@ -168,11 +175,7 @@ export function createInstancedGeometryPool(parentScene: Group, material: Materi
|
||||
}
|
||||
|
||||
// Vertices
|
||||
const vertices = new Float32Array(
|
||||
data.buffer,
|
||||
index * 4,
|
||||
vertexCount * 3,
|
||||
);
|
||||
const vertices = new Float32Array(data.buffer, index * 4, vertexCount * 3);
|
||||
index = index + vertexCount * 3;
|
||||
let posAttribute = geometry.getAttribute(
|
||||
"position",
|
||||
@@ -187,11 +190,7 @@ export function createInstancedGeometryPool(parentScene: Group, material: Materi
|
||||
);
|
||||
}
|
||||
|
||||
const normals = new Float32Array(
|
||||
data.buffer,
|
||||
index * 4,
|
||||
vertexCount * 3,
|
||||
);
|
||||
const normals = new Float32Array(data.buffer, index * 4, vertexCount * 3);
|
||||
index = index + vertexCount * 3;
|
||||
const normalsAttribute = geometry.getAttribute(
|
||||
"normal",
|
||||
@@ -203,20 +202,23 @@ export function createInstancedGeometryPool(parentScene: Group, material: Materi
|
||||
geometry.setAttribute("normal", new Float32BufferAttribute(normals, 3));
|
||||
}
|
||||
|
||||
if (existingInstance && instanceCount > existingInstance.geometry.userData.count) {
|
||||
console.log("recreating instance")
|
||||
if (
|
||||
existingInstance &&
|
||||
instanceCount > existingInstance.geometry.userData.count
|
||||
) {
|
||||
console.log("recreating instance");
|
||||
scene.remove(existingInstance);
|
||||
instances.splice(instances.indexOf(existingInstance), 1);
|
||||
existingInstance = new InstancedMesh(geometry, material, instanceCount);
|
||||
scene.add(existingInstance)
|
||||
instances.push(existingInstance)
|
||||
scene.add(existingInstance);
|
||||
instances.push(existingInstance);
|
||||
} else if (!existingInstance) {
|
||||
console.log("creating instance")
|
||||
console.log("creating instance");
|
||||
existingInstance = new InstancedMesh(geometry, material, instanceCount);
|
||||
scene.add(existingInstance)
|
||||
instances.push(existingInstance)
|
||||
scene.add(existingInstance);
|
||||
instances.push(existingInstance);
|
||||
} else {
|
||||
console.log("updating instance")
|
||||
console.log("updating instance");
|
||||
existingInstance.count = instanceCount;
|
||||
}
|
||||
|
||||
@@ -225,28 +227,31 @@ export function createInstancedGeometryPool(parentScene: Group, material: Materi
|
||||
const matrices = new Float32Array(
|
||||
data.buffer,
|
||||
index * 4,
|
||||
instanceCount * 16);
|
||||
instanceCount * 16,
|
||||
);
|
||||
|
||||
for (let i = 0; i < instanceCount; i++) {
|
||||
const matrix = new Matrix4().fromArray(matrices.subarray(i * 16, i * 16 + 16));
|
||||
const matrix = new Matrix4().fromArray(
|
||||
matrices.subarray(i * 16, i * 16 + 16),
|
||||
);
|
||||
existingInstance.setMatrixAt(i, matrix);
|
||||
}
|
||||
|
||||
geometry.userData = {
|
||||
vertexCount,
|
||||
faceCount,
|
||||
count: Math.max(instanceCount, existingInstance.geometry.userData.count || 0),
|
||||
count: Math.max(
|
||||
instanceCount,
|
||||
existingInstance.geometry.userData.count || 0,
|
||||
),
|
||||
hash,
|
||||
};
|
||||
|
||||
existingInstance.instanceMatrix.needsUpdate = true;
|
||||
|
||||
}
|
||||
|
||||
return {
|
||||
update(
|
||||
newData: Int32Array[],
|
||||
) {
|
||||
update(newData: Int32Array[]) {
|
||||
totalVertices = 0;
|
||||
totalFaces = 0;
|
||||
for (let i = 0; i < Math.max(newData.length, instances.length); i++) {
|
||||
@@ -260,6 +265,6 @@ export function createInstancedGeometryPool(parentScene: Group, material: Materi
|
||||
}
|
||||
}
|
||||
return { totalVertices, totalFaces };
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,12 +1,26 @@
|
||||
import type { Graph, NodeDefinition, NodeInput, NodeRegistry, RuntimeExecutor, SyncCache } from "@nodes/types";
|
||||
import { concatEncodedArrays, createLogger, encodeFloat, fastHashArrayBuffer, type PerformanceStore } from "@nodes/utils";
|
||||
import type {
|
||||
Graph,
|
||||
Node,
|
||||
NodeDefinition,
|
||||
NodeInput,
|
||||
NodeRegistry,
|
||||
RuntimeExecutor,
|
||||
SyncCache,
|
||||
} from "@nodes/types";
|
||||
import {
|
||||
concatEncodedArrays,
|
||||
createLogger,
|
||||
encodeFloat,
|
||||
fastHashArrayBuffer,
|
||||
type PerformanceStore,
|
||||
} from "@nodes/utils";
|
||||
|
||||
const log = createLogger("runtime-executor");
|
||||
log.mute()
|
||||
log.mute();
|
||||
|
||||
function getValue(input: NodeInput, value?: unknown) {
|
||||
if (value === undefined && "value" in input) {
|
||||
value = input.value
|
||||
value = input.value;
|
||||
}
|
||||
|
||||
if (input.type === "float") {
|
||||
@@ -15,7 +29,13 @@ function getValue(input: NodeInput, value?: unknown) {
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
if (input.type === "vec3") {
|
||||
return [0, value.length + 1, ...value.map(v => encodeFloat(v)), 1, 1] as number[];
|
||||
return [
|
||||
0,
|
||||
value.length + 1,
|
||||
...value.map((v) => encodeFloat(v)),
|
||||
1,
|
||||
1,
|
||||
] as number[];
|
||||
}
|
||||
return [0, value.length + 1, ...value, 1, 1] as number[];
|
||||
}
|
||||
@@ -36,22 +56,23 @@ function getValue(input: NodeInput, value?: unknown) {
|
||||
}
|
||||
|
||||
export class MemoryRuntimeExecutor implements RuntimeExecutor {
|
||||
|
||||
private definitionMap: Map<string, NodeDefinition> = new Map();
|
||||
|
||||
private randomSeed = Math.floor(Math.random() * 100000000);
|
||||
|
||||
perf?: PerformanceStore;
|
||||
|
||||
constructor(private registry: NodeRegistry, private cache?: SyncCache<Int32Array>) { }
|
||||
constructor(
|
||||
private registry: NodeRegistry,
|
||||
private cache?: SyncCache<Int32Array>,
|
||||
) {}
|
||||
|
||||
private async getNodeDefinitions(graph: Graph) {
|
||||
|
||||
if (this.registry.status !== "ready") {
|
||||
throw new Error("Node registry is not ready");
|
||||
}
|
||||
|
||||
await this.registry.load(graph.nodes.map(node => node.type));
|
||||
await this.registry.load(graph.nodes.map((node) => node.type));
|
||||
|
||||
const typeMap = new Map<string, NodeDefinition>();
|
||||
for (const node of graph.nodes) {
|
||||
@@ -66,18 +87,22 @@ export class MemoryRuntimeExecutor implements RuntimeExecutor {
|
||||
}
|
||||
|
||||
private async addMetaData(graph: Graph) {
|
||||
|
||||
// First, lets check if all nodes have a definition
|
||||
this.definitionMap = await this.getNodeDefinitions(graph);
|
||||
|
||||
const outputNode = graph.nodes.find(node => node.type.endsWith("/output"));
|
||||
const outputNode = graph.nodes.find((node) =>
|
||||
node.type.endsWith("/output"),
|
||||
) as Node;
|
||||
if (!outputNode) {
|
||||
throw new Error("No output node found");
|
||||
}
|
||||
|
||||
outputNode.tmp = outputNode.tmp || {};
|
||||
outputNode.tmp.depth = 0;
|
||||
|
||||
const nodeMap = new Map(graph.nodes.map(node => [node.id, node]));
|
||||
const nodeMap = new Map<number, Node>(
|
||||
graph.nodes.map((node) => [node.id, node]),
|
||||
);
|
||||
|
||||
// loop through all edges and assign the parent and child nodes to each node
|
||||
for (const edge of graph.edges) {
|
||||
@@ -96,7 +121,7 @@ export class MemoryRuntimeExecutor implements RuntimeExecutor {
|
||||
}
|
||||
}
|
||||
|
||||
const nodes = []
|
||||
const nodes = [];
|
||||
|
||||
// loop through all the nodes and assign each nodes its depth
|
||||
const stack = [outputNode];
|
||||
@@ -125,7 +150,6 @@ export class MemoryRuntimeExecutor implements RuntimeExecutor {
|
||||
}
|
||||
|
||||
async execute(graph: Graph, settings: Record<string, unknown>) {
|
||||
|
||||
this.perf?.addPoint("runtime");
|
||||
|
||||
let a = performance.now();
|
||||
@@ -137,71 +161,74 @@ export class MemoryRuntimeExecutor implements RuntimeExecutor {
|
||||
this.perf?.addPoint("collect-metadata", b - a);
|
||||
|
||||
/*
|
||||
* Here we sort the nodes into buckets, which we then execute one by one
|
||||
* +-b2-+-b1-+---b0---+
|
||||
* | | | |
|
||||
* | n3 | n2 | Output |
|
||||
* | n6 | n4 | Level |
|
||||
* | | n5 | |
|
||||
* | | | |
|
||||
* +----+----+--------+
|
||||
*/
|
||||
* Here we sort the nodes into buckets, which we then execute one by one
|
||||
* +-b2-+-b1-+---b0---+
|
||||
* | | | |
|
||||
* | n3 | n2 | Output |
|
||||
* | n6 | n4 | Level |
|
||||
* | | n5 | |
|
||||
* | | | |
|
||||
* +----+----+--------+
|
||||
*/
|
||||
|
||||
// we execute the nodes from the bottom up
|
||||
const sortedNodes = nodes.sort((a, b) => (b.tmp?.depth || 0) - (a.tmp?.depth || 0));
|
||||
const sortedNodes = nodes.sort(
|
||||
(a, b) => (b.tmp?.depth || 0) - (a.tmp?.depth || 0),
|
||||
);
|
||||
|
||||
// here we store the intermediate results of the nodes
|
||||
const results: Record<string, Int32Array> = {};
|
||||
|
||||
for (const node of sortedNodes) {
|
||||
|
||||
const node_type = this.definitionMap.get(node.type)!;
|
||||
|
||||
if (!node_type || !node.tmp || !node_type.execute) {
|
||||
log.warn(`Node ${node.id} has no definition`);
|
||||
continue;
|
||||
};
|
||||
}
|
||||
|
||||
a = performance.now();
|
||||
|
||||
// Collect the inputs for the node
|
||||
const inputs = Object.entries(node_type.inputs || {}).map(([key, input]) => {
|
||||
|
||||
if (input.type === "seed") {
|
||||
if (settings["randomSeed"] === true) {
|
||||
return Math.floor(Math.random() * 100000000)
|
||||
} else {
|
||||
return this.randomSeed
|
||||
const inputs = Object.entries(node_type.inputs || {}).map(
|
||||
([key, input]) => {
|
||||
if (input.type === "seed") {
|
||||
if (settings["randomSeed"] === true) {
|
||||
return Math.floor(Math.random() * 100000000);
|
||||
} else {
|
||||
return this.randomSeed;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If the input is linked to a setting, we use that value
|
||||
if (input.setting) {
|
||||
return getValue(input, settings[input.setting]);
|
||||
}
|
||||
|
||||
// check if the input is connected to another node
|
||||
const inputNode = node.tmp?.inputNodes?.[key];
|
||||
if (inputNode) {
|
||||
if (results[inputNode.id] === undefined) {
|
||||
throw new Error(`Node ${node.type} is missing input from node ${inputNode.type}`);
|
||||
// If the input is linked to a setting, we use that value
|
||||
if (input.setting) {
|
||||
return getValue(input, settings[input.setting]);
|
||||
}
|
||||
return results[inputNode.id];
|
||||
}
|
||||
|
||||
// If the value is stored in the node itself, we use that value
|
||||
if (node.props?.[key] !== undefined) {
|
||||
return getValue(input, node.props[key]);
|
||||
}
|
||||
// check if the input is connected to another node
|
||||
const inputNode = node.tmp?.inputNodes?.[key];
|
||||
if (inputNode) {
|
||||
if (results[inputNode.id] === undefined) {
|
||||
throw new Error(
|
||||
`Node ${node.type} is missing input from node ${inputNode.type}`,
|
||||
);
|
||||
}
|
||||
return results[inputNode.id];
|
||||
}
|
||||
|
||||
return getValue(input);
|
||||
});
|
||||
// If the value is stored in the node itself, we use that value
|
||||
if (node.props?.[key] !== undefined) {
|
||||
return getValue(input, node.props[key]);
|
||||
}
|
||||
|
||||
return getValue(input);
|
||||
},
|
||||
);
|
||||
b = performance.now();
|
||||
|
||||
this.perf?.addPoint("collected-inputs", b - a);
|
||||
|
||||
try {
|
||||
|
||||
a = performance.now();
|
||||
const encoded_inputs = concatEncodedArrays(inputs);
|
||||
b = performance.now();
|
||||
@@ -234,13 +261,10 @@ export class MemoryRuntimeExecutor implements RuntimeExecutor {
|
||||
this.perf?.addPoint("node/" + node_type.id, b - a);
|
||||
log.log("Result:", results[node.id]);
|
||||
log.groupEnd();
|
||||
|
||||
} catch (e) {
|
||||
log.groupEnd();
|
||||
log.error(`Error executing node ${node_type.id || node.id}`, e);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
// return the result of the parent of the output node
|
||||
@@ -253,11 +277,9 @@ export class MemoryRuntimeExecutor implements RuntimeExecutor {
|
||||
this.perf?.endPoint("runtime");
|
||||
|
||||
return res as unknown as Int32Array;
|
||||
|
||||
}
|
||||
|
||||
getPerformanceData() {
|
||||
return this.perf?.get();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -6,14 +6,16 @@ import { MemoryRuntimeCache } from "./runtime-executor-cache";
|
||||
|
||||
const cache = new MemoryRuntimeCache();
|
||||
const indexDbCache = new IndexDBCache("node-registry");
|
||||
const nodeRegistry = new RemoteNodeRegistry("");
|
||||
nodeRegistry.cache = indexDbCache;
|
||||
const nodeRegistry = new RemoteNodeRegistry("", indexDbCache);
|
||||
const executor = new MemoryRuntimeExecutor(nodeRegistry, cache);
|
||||
|
||||
const performanceStore = createPerformanceStore();
|
||||
executor.perf = performanceStore;
|
||||
|
||||
export async function executeGraph(graph: Graph, settings: Record<string, unknown>): Promise<Int32Array> {
|
||||
export async function executeGraph(
|
||||
graph: Graph,
|
||||
settings: Record<string, unknown>,
|
||||
): Promise<Int32Array> {
|
||||
await nodeRegistry.load(graph.nodes.map((n) => n.type));
|
||||
performanceStore.startRun();
|
||||
let res = await executor.execute(graph, settings);
|
||||
|
||||
@@ -1,7 +1,3 @@
|
||||
<script module lang="ts">
|
||||
let openSections = localState<Record<string, boolean>>("open-details", {});
|
||||
</script>
|
||||
|
||||
<script lang="ts">
|
||||
import NestedSettings from "./NestedSettings.svelte";
|
||||
import { localState } from "$lib/helpers/localState.svelte";
|
||||
@@ -12,10 +8,15 @@
|
||||
|
||||
type InputType = NodeInput | Button;
|
||||
|
||||
interface Nested {
|
||||
[key: string]: (Nested & { title?: string }) | InputType;
|
||||
type SettingsNode = InputType | SettingsGroup;
|
||||
|
||||
interface SettingsGroup {
|
||||
title?: string;
|
||||
[key: string]: any;
|
||||
}
|
||||
type SettingsType = Record<string, Nested>;
|
||||
|
||||
type SettingsType = Record<string, SettingsNode>;
|
||||
|
||||
type SettingsValue = Record<
|
||||
string,
|
||||
Record<string, unknown> | string | number | boolean | number[]
|
||||
@@ -29,38 +30,57 @@
|
||||
depth?: number;
|
||||
};
|
||||
|
||||
// Local persistent state for <details> sections
|
||||
const openSections = localState<Record<string, boolean>>("open-details", {});
|
||||
|
||||
let { id, key = "", value = $bindable(), type, depth = 0 }: Props = $props();
|
||||
|
||||
function isNodeInput(v: InputType | Nested): v is InputType {
|
||||
return v && "type" in v;
|
||||
function isNodeInput(v: SettingsNode | undefined): v is InputType {
|
||||
return !!v && typeof v === "object" && "type" in v;
|
||||
}
|
||||
|
||||
function getDefaultValue() {
|
||||
if (key === "") return;
|
||||
if (key === "title") return;
|
||||
if (Array.isArray(type[key]?.options)) {
|
||||
if (value?.[key] !== undefined) {
|
||||
return type[key]?.options?.indexOf(value?.[key]);
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
if (value?.[key] !== undefined) return value?.[key];
|
||||
if (type[key]?.value !== undefined) return type[key]?.value;
|
||||
function getDefaultValue(): unknown {
|
||||
if (key === "" || key === "title") return;
|
||||
|
||||
if (isNodeInput(type[key])) {
|
||||
if (type[key].type === "boolean") return 0;
|
||||
if (type[key].type === "float") return 0.5;
|
||||
if (type[key].type === "integer") return 0;
|
||||
if (type[key].type === "select") return 0;
|
||||
const node = type[key];
|
||||
|
||||
if (!isNodeInput(node)) return;
|
||||
|
||||
const anyNode = node as any;
|
||||
|
||||
// select input: use index into options
|
||||
if (Array.isArray(anyNode.options)) {
|
||||
if (value?.[key] !== undefined) {
|
||||
return anyNode.options.indexOf(value[key]);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (value?.[key] !== undefined) return value[key];
|
||||
|
||||
if ("value" in node && anyNode.value !== undefined) {
|
||||
return anyNode.value;
|
||||
}
|
||||
|
||||
switch (node.type) {
|
||||
case "boolean":
|
||||
return 0;
|
||||
case "float":
|
||||
return 0.5;
|
||||
case "integer":
|
||||
case "select":
|
||||
return 0;
|
||||
default:
|
||||
return 0;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
let internalValue = $state(getDefaultValue());
|
||||
|
||||
let open = $state(openSections[id]);
|
||||
if (depth > 0 && !isNodeInput(type[key])) {
|
||||
|
||||
// Persist <details> open/closed state for groups
|
||||
if (depth > 0 && !isNodeInput(type[key!])) {
|
||||
$effect(() => {
|
||||
if (open !== undefined) {
|
||||
openSections[id] = open;
|
||||
@@ -68,21 +88,26 @@
|
||||
});
|
||||
}
|
||||
|
||||
// Sync internalValue back into `value`
|
||||
$effect(() => {
|
||||
if (key === "" || internalValue === undefined) return;
|
||||
|
||||
const node = type[key];
|
||||
|
||||
if (
|
||||
isNodeInput(type[key]) &&
|
||||
Array.isArray(type[key]?.options) &&
|
||||
isNodeInput(node) &&
|
||||
Array.isArray((node as any).options) &&
|
||||
typeof internalValue === "number"
|
||||
) {
|
||||
value[key] = type[key].options?.[internalValue];
|
||||
value[key] = (node as any).options[internalValue] as any;
|
||||
} else {
|
||||
value[key] = internalValue;
|
||||
value[key] = internalValue as any;
|
||||
}
|
||||
});
|
||||
</script>
|
||||
|
||||
{#if key && isNodeInput(type?.[key])}
|
||||
<!-- Leaf input -->
|
||||
<div class="input input-{type[key].type}" class:first-level={depth === 1}>
|
||||
{#if type[key].type === "button"}
|
||||
<button onclick={() => console.log(type[key])}>
|
||||
@@ -94,7 +119,8 @@
|
||||
{/if}
|
||||
</div>
|
||||
{:else if depth === 0}
|
||||
{#each Object.keys(type ?? {}).filter((key) => key !== "title") as childKey}
|
||||
<!-- Root: iterate over top-level keys -->
|
||||
{#each Object.keys(type ?? {}).filter((k) => k !== "title") as childKey}
|
||||
<NestedSettings
|
||||
id={`${id}.${childKey}`}
|
||||
key={childKey}
|
||||
@@ -105,18 +131,19 @@
|
||||
{/each}
|
||||
<hr />
|
||||
{:else if key && type?.[key]}
|
||||
<!-- Group -->
|
||||
{#if depth > 0}
|
||||
<hr />
|
||||
{/if}
|
||||
<details bind:open>
|
||||
<summary><p>{type[key]?.title || key}</p></summary>
|
||||
<summary><p>{(type[key] as SettingsGroup).title || key}</p></summary>
|
||||
<div class="content">
|
||||
{#each Object.keys(type[key]).filter((key) => key !== "title") as childKey}
|
||||
{#each Object.keys(type[key] as SettingsGroup).filter((k) => k !== "title") as childKey}
|
||||
<NestedSettings
|
||||
id={`${id}.${childKey}`}
|
||||
key={childKey}
|
||||
value={value[key] as SettingsValue}
|
||||
type={type[key] as SettingsType}
|
||||
type={type[key] as unknown as SettingsType}
|
||||
depth={depth + 1}
|
||||
/>
|
||||
{/each}
|
||||
@@ -156,6 +183,7 @@
|
||||
flex-direction: row;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.input-boolean > label {
|
||||
order: 2;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { localState } from "$lib/helpers/localState.svelte";
|
||||
import type { NodeInput } from "@nodes/types";
|
||||
import type { SettingsType } from ".";
|
||||
|
||||
const themes = [
|
||||
"dark",
|
||||
@@ -118,7 +119,7 @@ export const AppSettingTypes = {
|
||||
},
|
||||
},
|
||||
},
|
||||
} as const;
|
||||
} as const satisfies SettingsType;
|
||||
|
||||
type IsInputDefinition<T> = T extends NodeInput ? T : never;
|
||||
type HasTitle = { title: string };
|
||||
|
||||
@@ -2,12 +2,26 @@ import type { NodeInput } from "@nodes/types";
|
||||
|
||||
type Button = { type: "button"; label?: string };
|
||||
|
||||
export type SettingsStore = {
|
||||
[key: string]: SettingsStore | string | number | boolean;
|
||||
};
|
||||
|
||||
type InputType = NodeInput | Button;
|
||||
|
||||
export interface SettingsType {
|
||||
[key: string]: (SettingsType & { title?: string }) | InputType;
|
||||
type SettingsNode = InputType | SettingsGroup;
|
||||
|
||||
export interface SettingsGroup {
|
||||
title?: string;
|
||||
[key: string]: SettingsNode | string | number | undefined;
|
||||
}
|
||||
|
||||
export type SettingsStore = {
|
||||
[key: string]: SettingsStore | string | number | boolean
|
||||
};
|
||||
export type SettingsType = Record<string, SettingsNode>;
|
||||
|
||||
export type SettingsValue = Record<
|
||||
string,
|
||||
Record<string, unknown> | string | number | boolean | number[]
|
||||
>;
|
||||
|
||||
export function isNodeInput(v: SettingsNode | undefined): v is InputType {
|
||||
return !!v && "type" in v;
|
||||
}
|
||||
|
||||
@@ -26,6 +26,7 @@
|
||||
import { IndexDBCache, RemoteNodeRegistry } from "@nodes/registry";
|
||||
import { createPerformanceStore } from "@nodes/utils";
|
||||
import BenchmarkPanel from "$lib/sidebar/panels/BenchmarkPanel.svelte";
|
||||
import { debounceAsyncFunction } from "$lib/helpers";
|
||||
|
||||
let performanceStore = createPerformanceStore();
|
||||
|
||||
@@ -79,40 +80,39 @@
|
||||
});
|
||||
|
||||
let runIndex = 0;
|
||||
const handleUpdate = async (
|
||||
g: Graph,
|
||||
s: Record<string, any> = graphSettings,
|
||||
) => {
|
||||
runIndex++;
|
||||
performanceStore.startRun();
|
||||
try {
|
||||
let a = performance.now();
|
||||
const graphResult = await runtime.execute(
|
||||
$state.snapshot(g),
|
||||
$state.snapshot(s),
|
||||
);
|
||||
let b = performance.now();
|
||||
const handleUpdate = debounceAsyncFunction(
|
||||
async (g: Graph, s: Record<string, any> = graphSettings) => {
|
||||
runIndex++;
|
||||
performanceStore.startRun();
|
||||
try {
|
||||
let a = performance.now();
|
||||
const graphResult = await runtime.execute(
|
||||
$state.snapshot(g),
|
||||
$state.snapshot(s),
|
||||
);
|
||||
let b = performance.now();
|
||||
|
||||
if (appSettings.debug.useWorker) {
|
||||
let perfData = await runtime.getPerformanceData();
|
||||
let lastRun = perfData?.at(-1);
|
||||
if (lastRun?.total) {
|
||||
lastRun.runtime = lastRun.total;
|
||||
delete lastRun.total;
|
||||
performanceStore.mergeData(lastRun);
|
||||
performanceStore.addPoint(
|
||||
"worker-transfer",
|
||||
b - a - lastRun.runtime[0],
|
||||
);
|
||||
if (appSettings.debug.useWorker) {
|
||||
let perfData = await runtime.getPerformanceData();
|
||||
let lastRun = perfData?.at(-1);
|
||||
if (lastRun?.total) {
|
||||
lastRun.runtime = lastRun.total;
|
||||
delete lastRun.total;
|
||||
performanceStore.mergeData(lastRun);
|
||||
performanceStore.addPoint(
|
||||
"worker-transfer",
|
||||
b - a - lastRun.runtime[0],
|
||||
);
|
||||
}
|
||||
}
|
||||
viewerComponent?.update(graphResult);
|
||||
} catch (error) {
|
||||
console.log("errors", error);
|
||||
} finally {
|
||||
performanceStore.stopRun();
|
||||
}
|
||||
viewerComponent?.update(graphResult);
|
||||
} catch (error) {
|
||||
console.log("errors", error);
|
||||
} finally {
|
||||
performanceStore.stopRun();
|
||||
}
|
||||
};
|
||||
},
|
||||
);
|
||||
|
||||
$effect(() => {
|
||||
//@ts-ignore
|
||||
|
||||
@@ -1,27 +1,36 @@
|
||||
import type { RequestHandler } from "./$types";
|
||||
import type { EntryGenerator, RequestHandler } from "./$types";
|
||||
import * as registry from "$lib/node-registry";
|
||||
import type { EntryGenerator } from "../$types";
|
||||
|
||||
export const prerender = true;
|
||||
|
||||
export const entries: EntryGenerator = async () => {
|
||||
const users = await registry.getUsers();
|
||||
return users.map(user => {
|
||||
return user.collections.map(collection => {
|
||||
return collection.nodes.map(node => {
|
||||
return { user: user.id, collection: collection.id.split("/")[1], node: node.id.split("/")[2] }
|
||||
return users
|
||||
.map((user) => {
|
||||
return user.collections.map((collection) => {
|
||||
return collection.nodes.map((node) => {
|
||||
return {
|
||||
user: user.id,
|
||||
collection: collection.id.split("/")[1],
|
||||
node: node.id.split("/")[2],
|
||||
};
|
||||
});
|
||||
});
|
||||
})
|
||||
}).flat(2);
|
||||
}
|
||||
.flat(2);
|
||||
};
|
||||
|
||||
export const GET: RequestHandler = async function GET({ params }) {
|
||||
|
||||
const wasm = await registry.getWasm(`${params.user}/${params.collection}/${params.node}`);
|
||||
const wasm = await registry.getWasm(
|
||||
`${params.user}/${params.collection}/${params.node}`,
|
||||
);
|
||||
|
||||
if (!wasm) {
|
||||
return new Response("Not found", { status: 404 });
|
||||
}
|
||||
|
||||
return new Response(wasm, { status: 200, headers: { "Content-Type": "application/wasm" } });
|
||||
}
|
||||
return new Response(wasm, {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/wasm" },
|
||||
});
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user