Merge branch 'main' of github.com:jim-fx/nodarium
All checks were successful
Deploy to GitHub Pages / build_site (push) Successful in 1m57s

This commit is contained in:
Felix Hungenberg
2026-01-22 14:06:44 +01:00
6 changed files with 65 additions and 63 deletions

View File

@@ -1,4 +1,5 @@
import throttle from '$lib/helpers/throttle'; import throttle from '$lib/helpers/throttle';
import { RemoteNodeRegistry } from '@nodarium/registry';
import type { import type {
Edge, Edge,
Graph, Graph,
@@ -18,6 +19,8 @@ import { HistoryManager } from './history-manager';
const logger = createLogger('graph-manager'); const logger = createLogger('graph-manager');
logger.mute(); logger.mute();
const remoteRegistry = new RemoteNodeRegistry('');
const clone = 'structuredClone' in self const clone = 'structuredClone' in self
? self.structuredClone ? self.structuredClone
: (args: any) => JSON.parse(JSON.stringify(args)); : (args: any) => JSON.parse(JSON.stringify(args));
@@ -173,7 +176,9 @@ export class GraphManager extends EventEmitter<{
return areSocketsCompatible(edgeOutputSocketType, accepted); return areSocketsCompatible(edgeOutputSocketType, accepted);
}); });
const bestOutputIdx = draggedOutputs.findIndex(outputType => areSocketsCompatible(outputType, targetAcceptedTypes)); const bestOutputIdx = draggedOutputs.findIndex(outputType =>
areSocketsCompatible(outputType, targetAcceptedTypes)
);
if (!bestInputEntry || bestOutputIdx === -1) { if (!bestInputEntry || bestOutputIdx === -1) {
logger.error('Could not find compatible sockets for drop'); logger.error('Could not find compatible sockets for drop');
@@ -308,6 +313,21 @@ export class GraphManager extends EventEmitter<{
const nodeIds = Array.from(new Set([...graph.nodes.map((n) => n.type)])); const nodeIds = Array.from(new Set([...graph.nodes.map((n) => n.type)]));
await this.registry.load(nodeIds); await this.registry.load(nodeIds);
// Fetch all nodes from all collections of the loaded nodes
const allCollections = new Set<`${string}/${string}`>();
for (const id of nodeIds) {
const [user, collection] = id.split('/');
allCollections.add(`${user}/${collection}`);
}
for (const collection of allCollections) {
remoteRegistry
.fetchCollection(collection)
.then((collection: { nodes: { id: NodeId }[] }) => {
const ids = collection.nodes.map((n) => n.id);
return this.registry.load(ids);
});
}
logger.info('loaded node types', this.registry.getAllNodes()); logger.info('loaded node types', this.registry.getAllNodes());
for (const node of this.graph.nodes) { for (const node of this.graph.nodes) {

View File

@@ -62,7 +62,7 @@ export class GraphState {
); );
camera = $state<OrthographicCamera>(null!); camera = $state<OrthographicCamera>(null!);
cameraPosition: [number, number, number] = $state([0, 0, 100]); cameraPosition: [number, number, number] = $state([140, 100, 3.5]);
clipboard: null | { clipboard: null | {
nodes: NodeInstance[]; nodes: NodeInstance[];

View File

@@ -1,32 +1,31 @@
import { import {
NodeDefinitionSchema,
type AsyncCache, type AsyncCache,
type NodeDefinition, type NodeDefinition,
type NodeRegistry, NodeDefinitionSchema,
} from "@nodarium/types"; type NodeRegistry
import { createLogger, createWasmWrapper } from "@nodarium/utils"; } from '@nodarium/types';
import { createLogger, createWasmWrapper } from '@nodarium/utils';
const log = createLogger("node-registry"); const log = createLogger('node-registry');
log.mute(); log.mute();
export class RemoteNodeRegistry implements NodeRegistry { export class RemoteNodeRegistry implements NodeRegistry {
status: "loading" | "ready" | "error" = "loading"; status: 'loading' | 'ready' | 'error' = 'loading';
private nodes: Map<string, NodeDefinition> = new Map(); private nodes: Map<string, NodeDefinition> = new Map();
constructor( constructor(
private url: string, private url: string,
public cache?: AsyncCache<ArrayBuffer | string>, public cache?: AsyncCache<ArrayBuffer | string>
) { } ) { }
async fetchJson(url: string, skipCache = false) { async fetchJson(url: string, skipCache = false) {
const finalUrl = `${this.url}/${url}`; const finalUrl = `${this.url}/${url}`;
if (!skipCache && this.cache) { if (!skipCache && this.cache) {
const cachedValue = await this.cache?.get<string>(finalUrl); const cachedValue = await this.cache?.get<string>(finalUrl);
if (cachedValue) { if (cachedValue) {
// fetch again in the background, maybe implement that only refetch after a certain time // fetch again in the background, maybe implement that only refetch after a certain time
this.fetchJson(url, true) this.fetchJson(url, true);
return JSON.parse(cachedValue); return JSON.parse(cachedValue);
} }
} }
@@ -46,14 +45,13 @@ export class RemoteNodeRegistry implements NodeRegistry {
} }
async fetchArrayBuffer(url: string, skipCache = false) { async fetchArrayBuffer(url: string, skipCache = false) {
const finalUrl = `${this.url}/${url}`; const finalUrl = `${this.url}/${url}`;
if (!skipCache && this.cache) { if (!skipCache && this.cache) {
const cachedNode = await this.cache?.get<ArrayBuffer>(finalUrl); const cachedNode = await this.cache?.get<ArrayBuffer>(finalUrl);
if (cachedNode) { if (cachedNode) {
// fetch again in the background, maybe implement that only refetch after a certain time // fetch again in the background, maybe implement that only refetch after a certain time
this.fetchArrayBuffer(url, true) this.fetchArrayBuffer(url, true);
return cachedNode; return cachedNode;
} }
} }
@@ -79,7 +77,7 @@ export class RemoteNodeRegistry implements NodeRegistry {
async fetchCollection(userCollectionId: `${string}/${string}`) { async fetchCollection(userCollectionId: `${string}/${string}`) {
const col = await this.fetchJson(`nodes/${userCollectionId}.json`); const col = await this.fetchJson(`nodes/${userCollectionId}.json`);
return col return col;
} }
async fetchNodeDefinition(nodeId: `${string}/${string}/${string}`) { async fetchNodeDefinition(nodeId: `${string}/${string}/${string}`) {
@@ -87,7 +85,6 @@ export class RemoteNodeRegistry implements NodeRegistry {
} }
private async fetchNodeWasm(nodeId: `${string}/${string}/${string}`) { private async fetchNodeWasm(nodeId: `${string}/${string}/${string}`) {
const node = await this.fetchArrayBuffer(`nodes/${nodeId}.wasm`); const node = await this.fetchArrayBuffer(`nodes/${nodeId}.wasm`);
if (!node) { if (!node) {
throw new Error(`Failed to load node wasm ${nodeId}`); throw new Error(`Failed to load node wasm ${nodeId}`);
@@ -99,7 +96,7 @@ export class RemoteNodeRegistry implements NodeRegistry {
async load(nodeIds: `${string}/${string}/${string}`[]) { async load(nodeIds: `${string}/${string}/${string}`[]) {
const a = performance.now(); const a = performance.now();
const nodes = await Promise.all( const nodes = (await Promise.all(
[...new Set(nodeIds).values()].map(async (id) => { [...new Set(nodeIds).values()].map(async (id) => {
if (this.nodes.has(id)) { if (this.nodes.has(id)) {
return this.nodes.get(id)!; return this.nodes.get(id)!;
@@ -107,17 +104,23 @@ export class RemoteNodeRegistry implements NodeRegistry {
const wasmBuffer = await this.fetchNodeWasm(id); const wasmBuffer = await this.fetchNodeWasm(id);
return this.register(wasmBuffer); try {
}), return await this.register(wasmBuffer);
); } catch (e) {
console.log('Failed to register: ', id);
console.error(e);
return;
}
})
)).filter(Boolean) as NodeDefinition[];
const duration = performance.now() - a; const duration = performance.now() - a;
log.group("loaded nodes in", duration, "ms"); log.group('loaded nodes in', duration, 'ms');
log.info(nodeIds); log.info(nodeIds);
log.info(nodes); log.info(nodes);
log.groupEnd(); log.groupEnd();
this.status = "ready"; this.status = 'ready';
return nodes; return nodes;
} }
@@ -128,17 +131,16 @@ export class RemoteNodeRegistry implements NodeRegistry {
const definition = NodeDefinitionSchema.safeParse(wrapper.get_definition()); const definition = NodeDefinitionSchema.safeParse(wrapper.get_definition());
if (definition.error) { if (definition.error) {
console.error(definition.error);
throw definition.error; throw definition.error;
} }
if (this.cache) { if (this.cache) {
await this.cache.set(definition.data.id, wasmBuffer); this.cache.set(definition.data.id, wasmBuffer);
} }
let node = { let node = {
...definition.data, ...definition.data,
execute: wrapper.execute, execute: wrapper.execute
}; };
this.nodes.set(definition.data.id, node); this.nodes.set(definition.data.id, node);

View File

@@ -2,15 +2,15 @@
name = "nodarium_utils" name = "nodarium_utils"
version = "0.1.0" version = "0.1.0"
edition = "2021" edition = "2021"
description = "A collection of utilities for Nodarium"
license = "MIT" license = "MIT"
repository = "https://github.com/jim-fx/nodes" repository = "https://github.com/jim-fx/nodes"
description = "A collection of utilities for Nodarium"
[lib] [lib]
crate-type = ["rlib"] crate-type = ["rlib"]
[dependencies] [dependencies]
serde = { version = "1.0", features = ["derive"] }
serde_json = { version = "1.0", default-features = false, features = ["alloc"] }
glam = "0.30.10" glam = "0.30.10"
noise = "0.9.0" noise = "0.9.0"
serde = { version = "1.0", features = ["derive"] }
serde_json = { version = "1.0", default-features = false, features = ["alloc"] }

View File

@@ -33,3 +33,19 @@ macro_rules! log {
}}; }};
} }
#[allow(dead_code)]
#[rustfmt::skip]
fn test_split_args(){
let inputs = vec![
vec![0, 1, 0, 4, 1056964608, 1065353216, 1056964608, 1, 4, 1080872141, 1054951342, 32, 1, 1 ],
vec![0, 4, 1056964608, 1065353216, 1056964608, 1, 4],
vec![0, 1, 0, 3, 0, 0, 0, 5, 0, 0, 1073741824, 1073741824, 1, 1, 1, 1, 1, 4, 1065353216, 1054615798, 5, 1, 1 ],
vec![ 0, 1, 0, 3, 0, 0, 0, 1, 4, 1073741824, 1073741824, 32, 1, 1 ],
vec![0, 1, 0, 1, 0, 14, 0, 1056964608, 1056964608, 1056964608, 1058810102, 1056964608, 1069547520, 1056964608, 1050421494, 1056964608, 1075838976, 1056964608, 0, 1, 1, 1, 2, 13, 1, 1],
vec![ 0, 1, 0, 2, 0, 0, 5, 0, 0, 1073741824, 1073741824, 1, 2, 0, 1, 4, 1088212173, 1083388723, 20, 1, 1 ]
];
for input in inputs {
println!("RESULT: {:?}", split_args(&input));
}
}

View File

@@ -1,36 +0,0 @@
use nodarium_utils::{
geometry::{create_multiple_paths, wrap_multiple_paths},
split_args,
};
#[allow(dead_code)]
#[rustfmt::skip]
fn test_split_args(){
let inputs = vec![
vec![0, 1, 0, 4, 1056964608, 1065353216, 1056964608, 1, 4, 1080872141, 1054951342, 32, 1, 1 ],
vec![0, 4, 1056964608, 1065353216, 1056964608, 1, 4],
vec![0, 1, 0, 3, 0, 0, 0, 5, 0, 0, 1073741824, 1073741824, 1, 1, 1, 1, 1, 4, 1065353216, 1054615798, 5, 1, 1 ],
vec![ 0, 1, 0, 3, 0, 0, 0, 1, 4, 1073741824, 1073741824, 32, 1, 1 ],
vec![0, 1, 0, 1, 0, 14, 0, 1056964608, 1056964608, 1056964608, 1058810102, 1056964608, 1069547520, 1056964608, 1050421494, 1056964608, 1075838976, 1056964608, 0, 1, 1, 1, 2, 13, 1, 1],
vec![ 0, 1, 0, 2, 0, 0, 5, 0, 0, 1073741824, 1073741824, 1, 2, 0, 1, 4, 1088212173, 1083388723, 20, 1, 1 ]
];
for input in inputs {
println!("RESULT: {:?}", split_args(&input));
}
}
fn test_path() {
// let path_data = create_path(3, 2);
// println!("{:?}", path_data);
let mut multiple_paths = create_multiple_paths(1, 4, 1);
let mut wrapped_paths = wrap_multiple_paths(&mut multiple_paths);
wrapped_paths[0].points[0] = 1.0;
println!("{:?}", wrapped_paths);
println!("{:?}", multiple_paths);
}
fn main() {
test_path()
}