diff --git a/.cargo/config.toml b/.cargo/config.toml
new file mode 100644
index 0000000..dd1352f
--- /dev/null
+++ b/.cargo/config.toml
@@ -0,0 +1,9 @@
+[target.wasm32-unknown-unknown]
+rustflags = [
+ "-C",
+ "link-arg=--import-memory",
+ "-C",
+ "link-arg=--initial-memory=67108864", # 64 MiB
+ "-C",
+ "link-arg=--max-memory=536870912", # 512 MiB
+]
diff --git a/Cargo.lock b/Cargo.lock
index e11370a..a35c74e 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -24,6 +24,14 @@ dependencies = [
"nodarium_utils",
]
+[[package]]
+name = "debug"
+version = "0.1.0"
+dependencies = [
+ "nodarium_macros",
+ "nodarium_utils",
+]
+
[[package]]
name = "float"
version = "0.1.0"
diff --git a/README.md b/README.md
index a78425a..669e91b 100644
--- a/README.md
+++ b/README.md
@@ -2,17 +2,17 @@ Nodarium
-
Nodarium
+
Nodarium
- Nodarium is a WebAssembly based visual programming language.
+ Nodarium is a WebAssembly based visual programming language.
-Currently this visual programming language is used to develop https://nodes.max-richter.dev, a procedural modelling tool for 3d-plants.
+Currently this visual programming language is used to develop , a procedural modelling tool for 3d-plants.
# Table of contents
@@ -22,12 +22,11 @@ Currently this visual programming language is used to develop https://nodes.max-
# Developing
-### Install prerequisites:
+### Install prerequisites
- [Node.js](https://nodejs.org/en/download)
- [pnpm](https://pnpm.io/installation)
- [rust](https://www.rust-lang.org/tools/install)
-- wasm-pack
### Install dependencies
diff --git a/SHARED_MEMORY_REFACTOR_PLAN.md b/SHARED_MEMORY_REFACTOR_PLAN.md
new file mode 100644
index 0000000..e330046
--- /dev/null
+++ b/SHARED_MEMORY_REFACTOR_PLAN.md
@@ -0,0 +1,783 @@
+# Shared Memory Refactor Plan
+
+## Executive Summary
+
+Migrate to a single shared `WebAssembly.Memory` instance imported by all nodes using `--import-memory`. The `#[nodarium_execute]` macro writes the function's return value directly to shared memory at the specified offset.
+
+## Architecture Overview
+
+```
+┌─────────────────────────────────────────────────────────────────────┐
+│ Shared WebAssembly.Memory │
+│ ┌───────────────────────────────────────────────────────────────┐ │
+│ │ [Node A output] [Node B output] [Node C output] ... │ │
+│ │ ┌────────────┐ ┌────────────┐ ┌────────────┐ │ │
+│ │ │ Vec │ │ Vec │ │ Vec │ │ │
+│ │ │ 4 bytes │ │ 12 bytes │ │ 2KB │ │ │
+│ │ └────────────┘ └────────────┘ └────────────┘ │ │
+│ │ │ │
+│ │ offset: 0 ────────────────────────────────────────────────► │ │
+│ └───────────────────────────────────────────────────────────────┘ │
+└─────────────────────────────────────────────────────────────────────┘
+ ▲
+ │
+ │ import { memory } from "env"
+ ┌─────────────────────────┼─────────────────────────┐
+ │ │ │
+ ┌────┴────┐ ┌────┴────┐ ┌────┴────┐
+ │ Node A │ │ Node B │ │ Node C │
+ │ WASM │ │ WASM │ │ WASM │
+ └─────────┘ └─────────┘ └─────────┘
+```
+
+## Phase 1: Compilation Configuration
+
+### 1.1 Cargo Config
+
+```toml
+# nodes/max/plantarium/box/.cargo/config.toml
+[build]
+rustflags = ["-C", "link-arg=--import-memory"]
+```
+
+Or globally in `Cargo.toml`:
+
+```toml
+[profile.release]
+rustflags = ["-C", "link-arg=--import-memory"]
+```
+
+### 1.2 Import Memory Semantics
+
+With `--import-memory`:
+
+- Nodes **import** memory from the host (not export their own)
+- All nodes receive the same `WebAssembly.Memory` instance
+- Memory is read/write accessible from all modules
+- No `memory.grow` needed (host manages allocation)
+
+## Phase 2: Macro Design
+
+### 2.1 Clean Node API
+
+```rust
+// input.json has 3 inputs: op_type, a, b
+nodarium_definition_file!("src/input.json");
+
+#[nodarium_execute]
+pub fn execute(op_type: *i32, a: *i32, b: *i32) -> Vec {
+ // Read inputs directly from shared memory
+ let op = unsafe { *op_type };
+ let a_val = f32::from_bits(unsafe { *a } as u32);
+ let b_val = f32::from_bits(unsafe { *b } as u32);
+
+ let result = match op {
+ 0 => a_val + b_val,
+ 1 => a_val - b_val,
+ 2 => a_val * b_val,
+ 3 => a_val / b_val,
+ _ => 0.0,
+ };
+
+ // Return Vec, macro handles writing to shared memory
+ vec![result.to_bits()]
+}
+```
+
+### 2.2 Macro Implementation
+
+```rust
+// packages/macros/src/lib.rs
+
+#[proc_macro_attribute]
+pub fn nodarium_execute(_attr: TokenStream, item: TokenStream) -> TokenStream {
+ let input_fn = parse_macro_input!(item as syn::ItemFn);
+ let fn_name = &input_fn.sig.ident;
+
+ // Parse definition to get input count
+ let project_dir = env::var("CARGO_MANIFEST_DIR").unwrap();
+ let def: NodeDefinition = serde_json::from_str(&fs::read_to_string(
+ Path::new(&project_dir).join("src/input.json")
+ ).unwrap()).unwrap();
+
+ let input_count = def.inputs.as_ref().map(|i| i.len()).unwrap_or(0);
+
+ // Validate signature
+ validate_signature(&input_fn, input_count);
+
+ // Generate wrapper
+ generate_execute_wrapper(input_fn, fn_name, input_count)
+}
+
+fn validate_signature(fn_sig: &syn::Signature, expected_inputs: usize) {
+ let param_count = fn_sig.inputs.len();
+ if param_count != expected_inputs {
+ panic!(
+ "Execute function has {} parameters but definition has {} inputs\n\
+ Definition inputs: {:?}\n\
+ Expected signature:\n\
+ pub fn execute({}) -> Vec",
+ param_count,
+ expected_inputs,
+ def.inputs.as_ref().map(|i| i.keys().collect::>()),
+ (0..expected_inputs)
+ .map(|i| format!("arg{}: *const i32", i))
+ .collect::>()
+ .join(", ")
+ );
+ }
+
+ // Verify return type is Vec
+ match &fn_sig.output {
+ syn::ReturnType::Type(_, ty) => {
+ if !matches!(&**ty, syn::Type::Path(tp) if tp.path.is_ident("Vec")) {
+ panic!("Execute function must return Vec");
+ }
+ }
+ syn::ReturnType::Default => {
+ panic!("Execute function must return Vec");
+ }
+ }
+}
+
+fn generate_execute_wrapper(
+ input_fn: syn::ItemFn,
+ fn_name: &syn::Ident,
+ input_count: usize,
+) -> TokenStream {
+ let arg_names: Vec<_> = (0..input_count)
+ .map(|i| syn::Ident::new(&format!("arg{}", i), proc_macro2::Span::call_site()))
+ .collect();
+
+ let expanded = quote! {
+ #input_fn
+
+ #[no_mangle]
+ pub extern "C" fn execute(
+ output_pos: i32,
+ #( #arg_names: i32 ),*
+ ) -> i32 {
+ extern "C" {
+ fn __nodarium_log(ptr: *const u8, len: usize);
+ fn __nodarium_log_panic(ptr: *const u8, len: usize);
+ }
+
+ // Setup panic hook
+ static SET_HOOK: std::sync::Once = std::sync::Once::new();
+ SET_HOOK.call_once(|| {
+ std::panic::set_hook(Box::new(|info| {
+ let msg = info.to_string();
+ unsafe { __nodarium_log_panic(msg.as_ptr(), msg.len()); }
+ }));
+ });
+
+ // Call user function
+ let result = #fn_name(
+ #( #arg_names as *const i32 ),*
+ );
+
+ // Write result directly to shared memory at output_pos
+ let len_bytes = result.len() * 4;
+ unsafe {
+ let src = result.as_ptr() as *const u8;
+ let dst = output_pos as *mut u8;
+ dst.copy_from_nonoverlapping(src, len_bytes);
+ }
+
+ // Forget the Vec to prevent deallocation (data is in shared memory now)
+ core::mem::forget(result);
+
+ len_bytes as i32
+ }
+ };
+
+ TokenStream::from(expanded)
+}
+```
+
+### 2.3 Generated Assembly
+
+The macro generates:
+
+```asm
+; Input: output_pos in register r0, arg0 in r1, arg1 in r2, arg2 in r3
+execute:
+ ; Call user function
+ bl user_execute ; returns pointer to Vec in r0
+
+ ; Calculate byte length
+ ldr r4, [r0, #8] ; Vec::len field
+ lsl r4, r4, #2 ; len * 4 (i32 = 4 bytes)
+
+ ; Copy Vec data to shared memory at output_pos
+ ldr r5, [r0, #0] ; Vec::ptr field
+ ldr r6, [r0, #4] ; capacity (unused)
+
+ ; memcpy(dst=output_pos, src=r5, len=r4)
+ ; (implemented via copy_from_nonoverlapping)
+
+ ; Return length
+ mov r0, r4
+ bx lr
+```
+
+## Phase 3: Input Reading Helpers
+
+```rust
+// packages/utils/src/accessor.rs
+
+/// Read i32 from shared memory
+#[inline]
+pub unsafe fn read_i32(ptr: *const i32) -> i32 {
+ *ptr
+}
+
+/// Read f32 from shared memory (stored as i32 bits)
+#[inline]
+pub unsafe fn read_f32(ptr: *const i32) -> f32 {
+ f32::from_bits(*ptr as u32)
+}
+
+/// Read boolean from shared memory
+#[inline]
+pub unsafe fn read_bool(ptr: *const i32) -> bool {
+ *ptr != 0
+}
+
+/// Read vec3 (3 f32s) from shared memory
+#[inline]
+pub unsafe fn read_vec3(ptr: *const i32) -> [f32; 3] {
+ let p = ptr as *const f32;
+ [p.read(), p.add(1).read(), p.add(2).read()]
+}
+
+/// Read slice from shared memory
+#[inline]
+pub unsafe fn read_i32_slice(ptr: *const i32, len: usize) -> &[i32] {
+ std::slice::from_raw_parts(ptr, len)
+}
+
+/// Read f32 slice from shared memory
+#[inline]
+pub unsafe fn read_f32_slice(ptr: *const i32, len: usize) -> &[f32] {
+ std::slice::from_raw_parts(ptr as *const f32, len)
+}
+
+/// Read with default value
+#[inline]
+pub unsafe fn read_f32_default(ptr: *const i32, default: f32) -> f32 {
+ if ptr.is_null() { default } else { read_f32(ptr) }
+}
+
+#[inline]
+pub unsafe fn read_i32_default(ptr: *const i32, default: i32) -> i32 {
+ if ptr.is_null() { default } else { read_i32(ptr) }
+}
+```
+
+## Phase 4: Node Implementation Examples
+
+### 4.1 Math Node
+
+```rust
+// nodes/max/plantarium/math/src/lib.rs
+
+nodarium_definition_file!("src/input.json");
+
+#[nodarium_execute]
+pub fn execute(op_type: *const i32, a: *const i32, b: *const i32) -> Vec {
+ use nodarium_utils::{read_i32, read_f32};
+
+ let op = unsafe { read_i32(op_type) };
+ let a_val = unsafe { read_f32(a) };
+ let b_val = unsafe { read_f32(b) };
+
+ let result = match op {
+ 0 => a_val + b_val, // add
+ 1 => a_val - b_val, // subtract
+ 2 => a_val * b_val, // multiply
+ 3 => a_val / b_val, // divide
+ _ => 0.0,
+ };
+
+ vec![result.to_bits()]
+}
+```
+
+### 4.2 Vec3 Node
+
+```rust
+// nodes/max/plantarium/vec3/src/lib.rs
+
+nodarium_definition_file!("src/input.json");
+
+#[nodarium_execute]
+pub fn execute(x: *const i32, y: *const i32, z: *const i32) -> Vec {
+ use nodarium_utils::read_f32;
+
+ let x_val = unsafe { read_f32(x) };
+ let y_val = unsafe { read_f32(y) };
+ let z_val = unsafe { read_f32(z) };
+
+ vec![x_val.to_bits(), y_val.to_bits(), z_val.to_bits()]
+}
+```
+
+### 4.3 Box Node
+
+```rust
+// nodes/max/plantarium/box/src/lib.rs
+
+nodarium_definition_file!("src/input.json");
+
+#[nodarium_execute]
+pub fn execute(size: *const i32) -> Vec {
+ use nodarium_utils::{read_f32, encode_float, calculate_normals};
+
+ let size = unsafe { read_f32(size) };
+ let p = encode_float(size);
+ let n = encode_float(-size);
+
+ let mut cube_geometry = vec![
+ 1, // 1: geometry
+ 8, // 8 vertices
+ 12, // 12 faces
+
+ // Face indices
+ 0, 1, 2, 0, 2, 3,
+ 0, 3, 4, 4, 5, 0,
+ 6, 1, 0, 5, 6, 0,
+ 7, 2, 1, 6, 7, 1,
+ 2, 7, 3, 3, 7, 4,
+ 7, 6, 4, 4, 6, 5,
+
+ // Bottom plate
+ p, n, n, p, n, p, n, n, p, n, n, n,
+
+ // Top plate
+ n, p, n, p, p, n, p, p, p, n, p, p,
+
+ // Normals
+ 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ ];
+
+ calculate_normals(&mut cube_geometry);
+ cube_geometry
+}
+```
+
+### 4.4 Stem Node
+
+```rust
+// nodes/max/plantarium/stem/src/lib.rs
+
+nodarium_definition_file!("src/input.json");
+
+#[nodarium_execute]
+pub fn execute(
+ origin: *const i32,
+ amount: *const i32,
+ length: *const i32,
+ thickness: *const i32,
+ resolution: *const i32,
+) -> Vec {
+ use nodarium_utils::{
+ read_vec3, read_i32, read_f32,
+ geometry::{create_multiple_paths, wrap_multiple_paths},
+ };
+
+ let origin = unsafe { read_vec3(origin) };
+ let amount = unsafe { read_i32(amount) } as usize;
+ let length = unsafe { read_f32(length) };
+ let thickness = unsafe { read_f32(thickness) };
+ let resolution = unsafe { read_i32(resolution) } as usize;
+
+ let mut stem_data = create_multiple_paths(amount, resolution, 1);
+ let mut stems = wrap_multiple_paths(&mut stem_data);
+
+ for stem in stems.iter_mut() {
+ let points = stem.get_points_mut();
+ for (i, point) in points.iter_mut().enumerate() {
+ let t = i as f32 / (resolution as f32 - 1.0);
+ point.x = origin[0];
+ point.y = origin[1] + t * length;
+ point.z = origin[2];
+ point.w = thickness * (1.0 - t);
+ }
+ }
+
+ stem_data
+}
+```
+
+## Phase 5: Runtime Implementation
+
+```typescript
+// app/src/lib/runtime/memory-manager.ts
+
+export const SHARED_MEMORY = new WebAssembly.Memory({
+ initial: 1024, // 64MB initial
+ maximum: 4096, // 256MB maximum
+});
+
+export class MemoryManager {
+ private offset: number = 0;
+ private readonly start: number = 0;
+
+ reset() {
+ this.offset = this.start;
+ }
+
+ alloc(bytes: number): number {
+ const pos = this.offset;
+ this.offset += bytes;
+ return pos;
+ }
+
+ readInt32(pos: number): number {
+ return new Int32Array(SHARED_MEMORY.buffer)[pos / 4];
+ }
+
+ readFloat32(pos: number): number {
+ return new Float32Array(SHARED_MEMORY.buffer)[pos / 4];
+ }
+
+ readBytes(pos: number, length: number): Uint8Array {
+ return new Uint8Array(SHARED_MEMORY.buffer, pos, length);
+ }
+
+ getInt32View(): Int32Array {
+ return new Int32Array(SHARED_MEMORY.buffer);
+ }
+
+ getFloat32View(): Float32Array {
+ return new Float32Array(SHARED_MEMORY.buffer);
+ }
+
+ getRemaining(): number {
+ return SHARED_MEMORY.buffer.byteLength - this.offset;
+ }
+}
+```
+
+```typescript
+// app/src/lib/runtime/imports.ts
+
+import { SHARED_MEMORY } from "./memory-manager";
+
+export function createImportObject(nodeId: string): WebAssembly.Imports {
+ return {
+ env: {
+ // Import shared memory
+ memory: SHARED_MEMORY,
+
+ // Logging
+ __nodarium_log: (ptr: number, len: number) => {
+ const msg = new TextDecoder().decode(
+ new Uint8Array(SHARED_MEMORY.buffer, ptr, len),
+ );
+ console.log(`[${nodeId}] ${msg}`);
+ },
+
+ __nodarium_log_panic: (ptr: number, len: number) => {
+ const msg = new TextDecoder().decode(
+ new Uint8Array(SHARED_MEMORY.buffer, ptr, len),
+ );
+ console.error(`[${nodeId}] PANIC: ${msg}`);
+ },
+ },
+ };
+}
+```
+
+```typescript
+// app/src/lib/runtime/executor.ts
+
+import { SHARED_MEMORY } from "./memory-manager";
+import { createImportObject } from "./imports";
+
+export class SharedMemoryRuntimeExecutor implements RuntimeExecutor {
+ private memory: MemoryManager;
+ private results: Map = new Map();
+ private instances: Map = new Map();
+
+ constructor(private registry: NodeRegistry) {
+ this.memory = new MemoryManager();
+ }
+
+ async execute(graph: Graph, settings: Record) {
+ this.memory.reset();
+ this.results.clear();
+
+ const [outputNode, nodes] = await this.addMetaData(graph);
+ const sortedNodes = nodes.sort((a, b) => b.depth - a.depth);
+
+ for (const node of sortedNodes) {
+ await this.executeNode(node, settings);
+ }
+
+ const result = this.results.get(outputNode.id);
+ const view = this.memory.getInt32View();
+ return view.subarray(result.pos / 4, result.pos / 4 + result.len / 4);
+ }
+
+ private async executeNode(
+ node: RuntimeNode,
+ settings: Record,
+ ) {
+ const def = this.definitionMap.get(node.type)!;
+ const inputs = def.inputs || {};
+ const inputNames = Object.keys(inputs);
+
+ const outputSize = this.estimateOutputSize(def);
+ const outputPos = this.memory.alloc(outputSize);
+ const args: number[] = [outputPos];
+
+ for (const inputName of inputNames) {
+ const inputDef = inputs[inputName];
+ const inputNode = node.state.inputNodes[inputName];
+ if (inputNode) {
+ const parentResult = this.results.get(inputNode.id)!;
+ args.push(parentResult.pos);
+ continue;
+ }
+
+ const valuePos = this.memory.alloc(16);
+ this.writeValue(
+ valuePos,
+ inputDef,
+ node.props?.[inputName] ??
+ settings[inputDef.setting ?? ""] ??
+ inputDef.value,
+ );
+ args.push(valuePos);
+ }
+
+ let instance = this.instances.get(node.type);
+ if (!instance) {
+ instance = await this.instantiateNode(node.type);
+ this.instances.set(node.type, instance);
+ }
+
+ const writtenLen = instance.exports.execute(...args);
+ this.results.set(node.id, { pos: outputPos, len: writtenLen });
+ }
+
+ private writeValue(pos: number, inputDef: NodeInput, value: unknown) {
+ const view = this.memory.getFloat32View();
+ const intView = this.memory.getInt32View();
+
+ switch (inputDef.type) {
+ case "float":
+ view[pos / 4] = value as number;
+ break;
+ case "integer":
+ case "select":
+ case "seed":
+ intView[pos / 4] = value as number;
+ break;
+ case "boolean":
+ intView[pos / 4] = value ? 1 : 0;
+ break;
+ case "vec3":
+ const arr = value as number[];
+ view[pos / 4] = arr[0];
+ view[pos / 4 + 1] = arr[1];
+ view[pos / 4 + 2] = arr[2];
+ break;
+ }
+ }
+
+ private estimateOutputSize(def: NodeDefinition): number {
+ const sizes: Record = {
+ float: 16,
+ integer: 16,
+ boolean: 16,
+ vec3: 16,
+ geometry: 8192,
+ path: 4096,
+ };
+ return sizes[def.outputs?.[0] || "float"] || 64;
+ }
+
+ private async instantiateNode(
+ nodeType: string,
+ ): Promise {
+ const wasmBytes = await this.fetchWasm(nodeType);
+ const module = await WebAssembly.compile(wasmBytes);
+ const importObject = createImportObject(nodeType);
+ return WebAssembly.instantiate(module, importObject);
+ }
+}
+```
+
+## Phase 7: Execution Flow Visualization
+
+```
+┌─────────────────────────────────────────────────────────────────────────────┐
+│ Execution Timeline │
+└─────────────────────────────────────────────────────────────────────────────┘
+
+Step 1: Setup
+ SHARED_MEMORY = new WebAssembly.Memory({ initial: 1024 })
+ memory.offset = 0
+
+Step 2: Execute Node A (math with 3 inputs)
+ outputPos = memory.alloc(16) = 0
+ args = [0, ptr_to_op_type, ptr_to_a, ptr_to_b]
+
+ Node A reads:
+ *ptr_to_op_type → op
+ *ptr_to_a → a
+ *ptr_to_b → b
+
+ Node A returns: vec![result.to_bits()]
+
+ Macro writes result directly to SHARED_MEMORY[0..4]
+ Returns: 4
+
+ results['A'] = { pos: 0, len: 4 }
+ memory.offset = 4
+
+Step 3: Execute Node B (stem with 5 inputs, input[0] from A)
+ outputPos = memory.alloc(4096) = 4
+ args = [4, results['A'].pos, ptr_to_amount, ptr_to_length, ...]
+
+ Node B reads:
+ *results['A'].pos → value from Node A
+ *ptr_to_amount → amount
+ ...
+
+ Node B returns: stem_data Vec (1000 elements = 4000 bytes)
+
+ Macro writes stem_data directly to SHARED_MEMORY[4..4004]
+ Returns: 4000
+
+ results['B'] = { pos: 4, len: 4000 }
+ memory.offset = 4004
+
+Step 4: Execute Node C (output, 1 input from B)
+ outputPos = memory.alloc(16) = 4004
+ args = [4004, results['B'].pos, results['B'].len]
+
+ Node C reads:
+ *results['B'].pos → stem geometry
+
+ Node C returns: vec![1] (identity)
+ Macro writes to SHARED_MEMORY[4004..4008]
+
+ results['C'] = { pos: 4004, len: 4 }
+
+Final: Return SHARED_MEMORY[4004..4008] as geometry result
+```
+
+## Phase 6: Memory Growth Strategy
+
+```typescript
+class MemoryManager {
+ alloc(bytes: number): number {
+ const required = this.offset + bytes;
+ const currentBytes = SHARED_MEMORY.buffer.byteLength;
+
+ if (required > currentBytes) {
+ const pagesNeeded = Math.ceil((required - currentBytes) / 65536);
+ const success = SHARED_MEMORY.grow(pagesNeeded);
+
+ if (!success) {
+ throw new Error(`Out of memory: need ${bytes} bytes`);
+ }
+
+ this.int32View = new Int32Array(SHARED_MEMORY.buffer);
+ this.float32View = new Float32Array(SHARED_MEMORY.buffer);
+ }
+
+ const pos = this.offset;
+ this.offset += bytes;
+ return pos;
+ }
+}
+```
+
+## Phase 8: Migration Checklist
+
+### Build Configuration
+
+- [ ] Add `--import-memory` to Rust flags in `Cargo.toml`
+- [ ] Ensure no nodes export memory
+
+### Runtime
+
+- [ ] Create `SHARED_MEMORY` instance
+- [ ] Implement `MemoryManager` with alloc/read/write
+- [ ] Create import object factory
+- [ ] Implement `SharedMemoryRuntimeExecutor`
+
+### Macro
+
+- [ ] Parse definition JSON
+- [ ] Validate function signature (N params, Vec return)
+- [ ] Generate wrapper that writes return value to `output_pos`
+- [ ] Add panic hook
+
+### Utilities
+
+- [ ] `read_i32(ptr: *const i32) -> i32`
+- [ ] `read_f32(ptr: *const i32) -> f32`
+- [ ] `read_bool(ptr: *const i32) -> bool`
+- [ ] `read_vec3(ptr: *const i32) -> [f32; 3]`
+- [ ] `read_i32_slice(ptr: *const i32, len: usize) -> &[i32]`
+
+### Nodes
+
+- [ ] `float`, `integer`, `boolean` nodes
+- [ ] `vec3` node
+- [ ] `math` node
+- [ ] `random` node
+- [ ] `box` node
+- [ ] `stem` node
+- [ ] `branch` node
+- [ ] `instance` node
+- [ ] `output` node
+
+## Phase 9: Before vs After
+
+### Before (per-node memory)
+
+```rust
+#[nodarium_execute]
+pub fn execute(input: &[i32]) -> Vec {
+ let args = split_args(input);
+ let a = evaluate_float(args[0]);
+ let b = evaluate_float(args[1]);
+ vec![(a + b).to_bits()]
+}
+```
+
+### After (shared memory)
+
+```rust
+#[nodarium_execute]
+pub fn execute(a: *const i32, b: *const i32) -> Vec {
+ use nodarium_utils::read_f32;
+ let a_val = unsafe { read_f32(a) };
+ let b_val = unsafe { read_f32(b) };
+ vec![(a_val + b_val).to_bits()]
+}
+```
+
+**Key differences:**
+
+- Parameters are input pointers, not a slice
+- Use `read_f32` helper instead of `evaluate_float`
+- Macro writes result directly to shared memory
+- All nodes share the same memory import
+
+## Phase 10: Benefits
+
+| Aspect | Before | After |
+| ----------------- | -------------- | -------------------- |
+| Memory | N × ~1MB heaps | 1 × 64-256MB shared |
+| Cross-node access | Copy via JS | Direct read |
+| API | `&[i32]` slice | `*const i32` pointer |
+| Validation | Runtime | Compile-time |
diff --git a/SUMMARY.md b/SUMMARY.md
new file mode 100644
index 0000000..2c472b0
--- /dev/null
+++ b/SUMMARY.md
@@ -0,0 +1,227 @@
+# Nodarium - AI Coding Agent Summary
+
+## Project Overview
+
+Nodarium is a WebAssembly-based visual programming language used to build , a procedural 3D plant modeling tool. The system allows users to create visual node graphs where each node is a compiled WebAssembly module.
+
+## Technology Stack
+
+**Frontend (SvelteKit):**
+
+- Framework: SvelteKit with Svelte 5
+- 3D Rendering: Three.js via Threlte
+- Styling: Tailwind CSS 4
+- Build Tool: Vite
+- State Management: Custom store-client package
+- WASM Integration: vite-plugin-wasm, comlink
+
+**Backend/Core (Rust/WASM):**
+
+- Language: Rust
+- Output: WebAssembly (wasm32-unknown-unknown target)
+- Build Tool: cargo
+- Procedural Macros: custom macros package
+
+**Package Management:**
+
+- Node packages: pnpm workspace (v10.28.1)
+- Rust packages: Cargo workspace
+
+## Directory Structure
+
+```
+nodarium/
+├── app/ # SvelteKit web application
+│ ├── src/
+│ │ ├── lib/ # App-specific components and utilities
+│ │ ├── routes/ # SvelteKit routes (pages)
+│ │ ├── app.css # Global styles
+│ │ └── app.html # HTML template
+│ ├── static/
+│ │ └── nodes/ # Compiled WASM node files served statically
+│ ├── package.json # App dependencies
+│ ├── svelte.config.js # SvelteKit configuration
+│ ├── vite.config.ts # Vite configuration
+│ └── tsconfig.json # TypeScript configuration
+│
+├── packages/ # Shared workspace packages
+│ ├── ui/ # Svelte UI component library (published as @nodarium/ui)
+│ │ ├── src/ # UI components
+│ │ ├── static/ # Static assets for UI
+│ │ ├── dist/ # Built output
+│ │ └── package.json
+│ ├── registry/ # Node registry with IndexedDB persistence (@nodarium/registry)
+│ │ └── src/
+│ ├── types/ # Shared TypeScript types (@nodarium/types)
+│ │ └── src/
+│ ├── utils/ # Shared utilities (@nodarium/utils)
+│ │ └── src/
+│ └── macros/ # Rust procedural macros for node development
+│
+├── nodes/ # WebAssembly node packages (Rust)
+│ └── max/plantarium/ # Plantarium nodes namespace
+│ ├── box/ # Box geometry node
+│ ├── branch/ # Branch generation node
+│ ├── float/ # Float value node
+│ ├── gravity/ # Gravity simulation node
+│ ├── instance/ # Geometry instancing node
+│ ├── math/ # Math operations node
+│ ├── noise/ # Noise generation node
+│ ├── output/ # Output node for results
+│ ├── random/ # Random value node
+│ ├── rotate/ # Rotation transformation node
+│ ├── stem/ # Stem geometry node
+│ ├── triangle/ # Triangle geometry node
+│ ├── vec3/ # Vector3 manipulation node
+│ └── .template/ # Node template for creating new nodes
+│
+├── docs/ # Documentation
+│ ├── ARCHITECTURE.md # System architecture overview
+│ ├── DEVELOPING_NODES.md # Guide for creating new nodes
+│ ├── NODE_DEFINITION.md # Node definition schema
+│ └── PLANTARIUM.md # Plantarium-specific documentation
+│
+├── Cargo.toml # Rust workspace configuration
+├── package.json # Root npm scripts
+├── pnpm-workspace.yaml # pnpm workspace configuration
+├── pnpm-lock.yaml # Locked dependency versions
+└── README.md # Project readme
+```
+
+## Node System Architecture
+
+### What is a Node?
+
+Nodes are WebAssembly modules that:
+
+- Have a unique ID (e.g., `max/plantarium/stem`)
+- Define inputs with types and default values
+- Define outputs they produce
+- Execute logic when called with arguments
+
+### Node Definition Schema
+
+Nodes are defined via `definition.json` embedded in each WASM module:
+
+```json
+{
+ "id": "namespace/category/node-name",
+ "outputs": ["geometry"],
+ "inputs": {
+ "height": { "type": "float", "value": 1.0 },
+ "radius": { "type": "float", "value": 0.1 }
+ }
+}
+```
+
+For now the outputs are limited to a single output.
+
+### Node Execution
+
+Nodes receive serialized arguments and return serialized outputs. The `nodarium_utils` Rust crate provides helpers for:
+
+- Parsing input arguments
+- Creating geometry data
+- Concatenating output vectors
+
+### Node Registration
+
+Nodes are:
+
+1. Compiled to WASM files in `target/wasm32-unknown-unknown/release/`
+2. Copied to `app/static/nodes/` for serving
+3. Registered in the browser via IndexedDB using the registry package
+
+## Key Dependencies
+
+**Frontend:**
+
+- `@sveltejs/kit` - Application framework
+- `@threlte/core` & `@threlte/extras` - Three.js Svelte integration
+- `three` - 3D graphics library
+- `tailwindcss` - CSS framework
+- `comlink` - WebWorker RPC
+- `idb` - IndexedDB wrapper
+- `wabt` - WebAssembly binary toolkit
+
+**Rust/WASM:**
+
+- Language: Rust (compiled with plain cargo)
+- Output: WebAssembly (wasm32-unknown-unknown target)
+- Generic WASM wrapper for language-agnostic node development
+- `glam` - Math library (Vec2, Vec3, Mat4, etc.)
+- `nodarium_macros` - Custom procedural macros
+- `nodarium_utils` - Shared node utilities
+
+## Build Commands
+
+From root directory:
+
+```bash
+# Install dependencies
+pnpm i
+
+# Build all WASM nodes (compiles Rust, copies to app/static)
+pnpm build:nodes
+
+# Build the app (builds UI library + SvelteKit app)
+pnpm build:app
+
+# Full build (nodes + app)
+pnpm build
+
+# Development
+pnpm dev # Run all dev commands in parallel
+pnpm dev:nodes # Watch nodes/, auto-rebuild on changes
+pnpm dev:app_ui # Watch app and UI package
+pnpm dev_ui # Watch UI package only
+```
+
+## Workspace Packages
+
+The project uses pnpm workspaces with the following packages:
+
+| Package | Location | Purpose |
+| ------------------ | ------------------ | ------------------------------ |
+| @nodarium/app | app/ | Main SvelteKit application |
+| @nodarium/ui | packages/ui/ | Reusable UI component library |
+| @nodarium/registry | packages/registry/ | Node registry with persistence |
+| @nodarium/types | packages/types/ | Shared TypeScript types |
+| @nodarium/utils | packages/utils/ | Shared utilities |
+| nodarium macros | packages/macros/ | Rust procedural macros |
+
+## Configuration Files
+
+- `.dprint.jsonc` - Dprint formatter configuration
+- `svelte.config.js` - SvelteKit configuration (app and ui)
+- `vite.config.ts` - Vite bundler configuration
+- `tsconfig.json` - TypeScript configuration (app and packages)
+- `Cargo.toml` - Rust workspace with member packages
+- `flake.nix` - Nix development environment
+
+## Development Workflow
+
+### Adding a New Node
+
+1. Copy the `.template` directory in `nodes/max/plantarium/` to create a new node directory
+2. Define node in `src/definition.json`
+3. Implement logic in `src/lib.rs`
+4. Build with `cargo build --release --target wasm32-unknown-unknown`
+5. Test by dragging onto the node graph
+
+### Modifying UI Components
+
+1. Changes to `packages/ui/` automatically rebuild with watch mode
+2. App imports from `@nodarium/ui`
+3. Run `pnpm dev:app_ui` for hot reload
+
+## Important Notes for AI Agents
+
+1. **WASM Compilation**: Nodes require `wasm32-unknown-unknown` target (`rustup target add wasm32-unknown-unknown`)
+2. **Cross-Compilation**: WASM build happens on host, not in containers/VMs
+3. **Static Serving**: Compiled WASM files must exist in `app/static/nodes/` before dev server runs
+4. **Workspace Dependencies**: Use `workspace:*` protocol for internal packages
+5. **Threlte Version**: Uses Threlte 8.x, not 7.x (important for 3D component APIs)
+6. **Svelte 5**: Project uses Svelte 5 with runes (`$state`, `$derived`, `$effect`)
+7. **Tailwind 4**: Uses Tailwind CSS v4 with `@tailwindcss/vite` plugin
+8. **IndexedDB**: Registry uses IDB for persistent node storage in browser
diff --git a/SUMMARY_RUNTIME.md b/SUMMARY_RUNTIME.md
new file mode 100644
index 0000000..bd8a13b
--- /dev/null
+++ b/SUMMARY_RUNTIME.md
@@ -0,0 +1,294 @@
+# Node Compilation and Runtime Execution
+
+## Overview
+
+Nodarium nodes are WebAssembly modules written in Rust. Each node is a compiled WASM binary that exposes a standardized C ABI interface. The system uses procedural macros to generate the necessary boilerplate for node definitions, memory management, and execution.
+
+## Node Compilation
+
+### 1. Node Definition (JSON)
+
+Each node has a `src/input.json` file that defines:
+
+```json
+{
+ "id": "max/plantarium/stem",
+ "meta": { "description": "Creates a stem" },
+ "outputs": ["path"],
+ "inputs": {
+ "origin": { "type": "vec3", "value": [0, 0, 0], "external": true },
+ "amount": { "type": "integer", "value": 1, "min": 1, "max": 64 },
+ "length": { "type": "float", "value": 5 },
+ "thickness": { "type": "float", "value": 0.2 }
+ }
+}
+```
+
+### 2. Procedural Macros
+
+The `nodarium_macros` crate provides two procedural macros:
+
+#### `#[nodarium_execute]`
+
+Transforms a Rust function into a WASM-compatible entry point:
+
+```rust
+#[nodarium_execute]
+pub fn execute(input: &[i32]) -> Vec {
+ // Node logic here
+}
+```
+
+The macro generates:
+- **C ABI wrapper**: Converts the WASM interface to a standard C FFI
+- **`execute` function**: Takes `(ptr: *const i32, len: usize)` and returns `*mut i32`
+- **Memory allocation**: `__alloc(len: usize) -> *mut i32` for buffer allocation
+- **Memory deallocation**: `__free(ptr: *mut i32, len: usize)` for cleanup
+- **Static output buffer**: `OUTPUT_BUFFER` for returning results
+- **Panic hook**: Routes panics through `host_log_panic` for debugging
+- **Internal logic wrapper**: Wraps the original function
+
+#### `nodarium_definition_file!("path")`
+
+Embeds the node definition JSON into the WASM binary:
+
+```rust
+nodarium_definition_file!("src/input.json");
+```
+
+Generates:
+- **`DEFINITION_DATA`**: Static byte array in `nodarium_definition` section
+- **`get_definition_ptr()`**: Returns pointer to definition data
+- **`get_definition_len()`**: Returns length of definition data
+
+### 3. Build Process
+
+Nodes are compiled with:
+```bash
+cargo build --release --target wasm32-unknown-unknown
+```
+
+The resulting `.wasm` files are copied to `app/static/nodes/` for serving.
+
+## Node Execution Runtime
+
+### Architecture
+
+```
+┌─────────────────────────────────────────────────────────────┐
+│ WebWorker Thread │
+│ ┌─────────────────────────────────────────────────────────┐│
+│ │ WorkerRuntimeExecutor ││
+│ │ ┌───────────────────────────────────────────────────┐ ││
+│ │ │ MemoryRuntimeExecutor ││
+│ │ │ ┌─────────────────────────────────────────────┐ ││
+│ │ │ │ Node Registry (WASM + Definitions) ││
+│ │ │ └─────────────────────────────────────────────┘ ││
+│ │ │ ┌─────────────────────────────────────────────┐ ││
+│ │ │ │ Execution Engine (Bottom-Up Evaluation) ││
+│ │ │ └─────────────────────────────────────────────┘ ││
+│ │ └───────────────────────────────────────────────────┘ ││
+│ └─────────────────────────────────────────────────────────┘│
+└─────────────────────────────────────────────────────────────┘
+```
+
+### 1. MemoryRuntimeExecutor
+
+The core execution engine in `runtime-executor.ts`:
+
+#### Metadata Collection (`addMetaData`)
+
+1. Load node definitions from registry
+2. Build parent/child relationships from graph edges
+3. Calculate execution depth via reverse BFS from output node
+
+#### Node Sorting
+
+Nodes are sorted by depth (highest depth first) for bottom-up execution:
+
+```
+Depth 3: n3 n6
+Depth 2: n2 n4 n5
+Depth 1: n1
+Depth 0: Output
+Execution order: n3, n6, n2, n4, n5, n1, Output
+```
+
+#### Input Collection
+
+For each node, inputs are gathered from:
+1. **Connected nodes**: Results from parent nodes in the graph
+2. **Node props**: Values stored directly on the node instance
+3. **Settings**: Global settings mapped via `setting` property
+4. **Defaults**: Values from node definition
+
+#### Input Encoding
+
+Values are encoded as `Int32Array`:
+- **Floats**: IEEE 754 bits cast to i32
+- **Vectors**: `[0, count, v1, v2, v3, 1, 1]` (nested bracket format)
+- **Booleans**: `0` or `1`
+- **Integers**: Direct i32 value
+
+#### Caching
+
+Results are cached using:
+```typescript
+inputHash = `node-${node.id}-${fastHashArrayBuffer(encoded_inputs)}`
+```
+
+The cache uses LRU eviction (default size: 50 entries).
+
+### 2. Execution Flow
+
+```typescript
+async execute(graph: Graph, settings) {
+ // 1. Load definitions and build node relationships
+ const [outputNode, nodes] = await this.addMetaData(graph);
+
+ // 2. Sort nodes by depth (bottom-up)
+ const sortedNodes = nodes.sort((a, b) => b.depth - a.depth);
+
+ // 3. Execute each node
+ for (const node of sortedNodes) {
+ const inputs = this.collectInputs(node, settings);
+ const encoded = concatEncodedArrays(inputs);
+ const result = nodeType.execute(encoded);
+ this.results[node.id] = result;
+ }
+
+ // 4. Return output node result
+ return this.results[outputNode.id];
+}
+```
+
+### 3. Worker Isolation
+
+`WorkerRuntimeExecutor` runs execution in a WebWorker via Comlink:
+
+```typescript
+class WorkerRuntimeExecutor implements RuntimeExecutor {
+ private worker = new ComlinkWorker(...);
+
+ async execute(graph, settings) {
+ return this.worker.executeGraph(graph, settings);
+ }
+}
+```
+
+The worker backend (`worker-runtime-executor-backend.ts`):
+- Creates a single `MemoryRuntimeExecutor` instance
+- Manages caching state
+- Collects performance metrics
+
+### 4. Remote Execution (Optional)
+
+`RemoteRuntimeExecutor` can execute graphs on a remote server:
+
+```typescript
+class RemoteRuntimeExecutor implements RuntimeExecutor {
+ async execute(graph, settings) {
+ const res = await fetch(this.url, {
+ method: "POST",
+ body: JSON.stringify({ graph, settings })
+ });
+ return new Int32Array(await res.arrayBuffer());
+ }
+}
+```
+
+## Data Encoding Format
+
+### Bracket Notation
+
+Inputs and outputs use a nested bracket encoding:
+
+```
+[0, count, item1, item2, ..., 1, 1]
+ ^ ^ items ^ ^
+ | | | |
+ | | | +-- closing bracket
+ | +-- number of items + 1 |
+ +-- opening bracket (0) +-- closing bracket (1)
+```
+
+### Example Encodings
+
+**Float (5.0)**:
+```typescript
+encodeFloat(5.0) // → 1084227584 (IEEE 754 bits as i32)
+```
+
+**Vec3 ([1, 2, 3])**:
+```typescript
+[0, 4, encodeFloat(1), encodeFloat(2), encodeFloat(3), 1, 1]
+```
+
+**Nested Math Expression**:
+```
+[0, 3, 0, 2, 0, 3, 0, 0, 0, 3, 7549747, 127, 1, 1, ...]
+```
+
+### Decoding Utilities
+
+From `packages/utils/src/tree.rs`:
+- `split_args()`: Parses nested bracket arrays into segments
+- `evaluate_float()`: Recursively evaluates and decodes float expressions
+- `evaluate_int()`: Evaluates integer/math node expressions
+- `evaluate_vec3()`: Decodes vec3 arrays
+
+## Geometry Data Format
+
+### Path Data
+
+Paths represent procedural plant structures:
+
+```
+[0, count, [0, header_size, node_type, depth, x, y, z, w, ...], 1, 1]
+```
+
+Each point has 4 values: x, y, z position + thickness (w).
+
+### Geometry Data
+
+Meshes use a similar format with vertices and face indices.
+
+## Performance Tracking
+
+The runtime collects detailed performance metrics:
+- `collect-metadata`: Time to build node graph
+- `collected-inputs`: Time to gather inputs
+- `encoded-inputs`: Time to encode inputs
+- `hash-inputs`: Time to compute cache hash
+- `cache-hit`: 1 if cache hit, 0 if miss
+- `node/{node_type}`: Time per node execution
+
+## Caching Strategy
+
+### MemoryRuntimeCache
+
+LRU cache implementation:
+```typescript
+class MemoryRuntimeCache {
+ private map = new Map();
+ size: number = 50;
+
+ get(key) { /* move to front */ }
+ set(key, value) { /* evict oldest if at capacity */ }
+}
+```
+
+### IndexDBCache
+
+For persistence across sessions, the registry uses IndexedDB caching.
+
+## Summary
+
+The Nodarium node system works as follows:
+
+1. **Compilation**: Rust functions are decorated with macros that generate C ABI WASM exports
+2. **Registration**: Node definitions are embedded in WASM and loaded at runtime
+3. **Graph Analysis**: Runtime builds node relationships and execution order
+4. **Bottom-Up Execution**: Nodes execute from leaves to output
+5. **Caching**: Results are cached per-node-inputs hash for performance
+6. **Isolation**: Execution runs in a WebWorker to prevent main thread blocking
diff --git a/app/Dockerfile b/app/Dockerfile
index a059a66..3b43fbd 100644
--- a/app/Dockerfile
+++ b/app/Dockerfile
@@ -39,5 +39,6 @@ server {
EOF
COPY --from=builder /app/app/build /app
+COPY --from=builder /app/packages/ui/build /app/ui
EXPOSE 80
diff --git a/app/src/lib/graph-interface/graph-manager.svelte.ts b/app/src/lib/graph-interface/graph-manager.svelte.ts
index d0134d7..78bd296 100644
--- a/app/src/lib/graph-interface/graph-manager.svelte.ts
+++ b/app/src/lib/graph-interface/graph-manager.svelte.ts
@@ -25,14 +25,14 @@ const clone = 'structuredClone' in self
? self.structuredClone
: (args: any) => JSON.parse(JSON.stringify(args));
-function areSocketsCompatible(
+export function areSocketsCompatible(
output: string | undefined,
inputs: string | (string | undefined)[] | undefined
) {
if (Array.isArray(inputs) && output) {
- return inputs.includes(output);
+ return inputs.includes('*') || inputs.includes(output);
}
- return inputs === output;
+ return inputs === output || inputs === '*';
}
function areEdgesEqual(firstEdge: Edge, secondEdge: Edge) {
@@ -268,14 +268,7 @@ export class GraphManager extends EventEmitter<{
private _init(graph: Graph) {
const nodes = new Map(
graph.nodes.map((node) => {
- const nodeType = this.registry.getNode(node.type);
- const n = node as NodeInstance;
- if (nodeType) {
- n.state = {
- type: nodeType
- };
- }
- return [node.id, n];
+ return [node.id, node as NodeInstance];
})
);
@@ -300,6 +293,30 @@ export class GraphManager extends EventEmitter<{
this.execute();
}
+ private async loadAllCollections() {
+ // Fetch all nodes from all collections of the loaded nodes
+ const nodeIds = Array.from(new Set([...this.graph.nodes.map((n) => n.type)]));
+ const allCollections = new Set<`${string}/${string}`>();
+ for (const id of nodeIds) {
+ const [user, collection] = id.split('/');
+ allCollections.add(`${user}/${collection}`);
+ }
+
+ const allCollectionIds = await Promise
+ .all([...allCollections]
+ .map(async (collection) =>
+ remoteRegistry
+ .fetchCollection(collection)
+ .then((collection: { nodes: { id: NodeId }[] }) => {
+ return collection.nodes.map(n => n.id.replace(/\.wasm$/, '') as NodeId);
+ })
+ ));
+
+ const missingNodeIds = [...new Set(allCollectionIds.flat())];
+
+ this.registry.load(missingNodeIds);
+ }
+
async load(graph: Graph) {
const a = performance.now();
@@ -308,25 +325,16 @@ export class GraphManager extends EventEmitter<{
this.status = 'loading';
this.id = graph.id;
- logger.info('loading graph', { nodes: graph.nodes, edges: graph.edges, id: graph.id });
-
const nodeIds = Array.from(new Set([...graph.nodes.map((n) => n.type)]));
- await this.registry.load(nodeIds);
- // Fetch all nodes from all collections of the loaded nodes
- const allCollections = new Set<`${string}/${string}`>();
- for (const id of nodeIds) {
- const [user, collection] = id.split('/');
- allCollections.add(`${user}/${collection}`);
- }
- for (const collection of allCollections) {
- remoteRegistry
- .fetchCollection(collection)
- .then((collection: { nodes: { id: NodeId }[] }) => {
- const ids = collection.nodes.map((n) => n.id);
- return this.registry.load(ids);
- });
- }
+ logger.info('loading graph', {
+ nodes: graph.nodes,
+ edges: graph.edges,
+ id: graph.id,
+ ids: nodeIds
+ });
+
+ await this.registry.load(nodeIds);
logger.info('loaded node types', this.registry.getAllNodes());
@@ -384,7 +392,9 @@ export class GraphManager extends EventEmitter<{
this.loaded = true;
logger.log(`Graph loaded in ${performance.now() - a}ms`);
+
setTimeout(() => this.execute(), 100);
+ this.loadAllCollections(); // lazily load all nodes from all collections
}
getAllNodes() {
@@ -491,10 +501,10 @@ export class GraphManager extends EventEmitter<{
const inputs = Object.entries(to.state?.type?.inputs ?? {});
const outputs = from.state?.type?.outputs ?? [];
for (let i = 0; i < inputs.length; i++) {
- const [inputName, input] = inputs[0];
+ const [inputName, input] = inputs[i];
for (let o = 0; o < outputs.length; o++) {
- const output = outputs[0];
- if (input.type === output) {
+ const output = outputs[o];
+ if (input.type === output || input.type === '*') {
return this.createEdge(from, o, to, inputName);
}
}
@@ -596,11 +606,14 @@ export class GraphManager extends EventEmitter<{
return;
}
+ const fromType = from.state.type || this.registry.getNode(from.type);
+ const toType = to.state.type || this.registry.getNode(to.type);
+
// check if socket types match
- const fromSocketType = from.state?.type?.outputs?.[fromSocket];
- const toSocketType = [to.state?.type?.inputs?.[toSocket]?.type];
- if (to.state?.type?.inputs?.[toSocket]?.accepts) {
- toSocketType.push(...(to?.state?.type?.inputs?.[toSocket]?.accepts || []));
+ const fromSocketType = fromType?.outputs?.[fromSocket];
+ const toSocketType = [toType?.inputs?.[toSocket]?.type];
+ if (toType?.inputs?.[toSocket]?.accepts) {
+ toSocketType.push(...(toType?.inputs?.[toSocket]?.accepts || []));
}
if (!areSocketsCompatible(fromSocketType, toSocketType)) {
@@ -723,8 +736,9 @@ export class GraphManager extends EventEmitter<{
}
getPossibleSockets({ node, index }: Socket): [NodeInstance, string | number][] {
- const nodeType = node?.state?.type;
+ const nodeType = this.registry.getNode(node.type);
if (!nodeType) return [];
+ console.log({ index });
const sockets: [NodeInstance, string | number][] = [];
@@ -739,7 +753,7 @@ export class GraphManager extends EventEmitter<{
const ownType = nodeType?.inputs?.[index].type;
for (const node of nodes) {
- const nodeType = node?.state?.type;
+ const nodeType = this.registry.getNode(node.type);
const inputs = nodeType?.outputs;
if (!inputs) continue;
for (let index = 0; index < inputs.length; index++) {
@@ -767,7 +781,7 @@ export class GraphManager extends EventEmitter<{
const ownType = nodeType.outputs?.[index];
for (const node of nodes) {
- const inputs = node?.state?.type?.inputs;
+ const inputs = this.registry.getNode(node.type)?.inputs;
if (!inputs) continue;
for (const key in inputs) {
const otherType = [inputs[key].type];
@@ -783,6 +797,7 @@ export class GraphManager extends EventEmitter<{
}
}
+ console.log(`Found ${sockets.length} possible sockets`, sockets);
return sockets;
}
diff --git a/app/src/lib/graph-interface/graph-state.svelte.ts b/app/src/lib/graph-interface/graph-state.svelte.ts
index aaf1f7c..37931fa 100644
--- a/app/src/lib/graph-interface/graph-state.svelte.ts
+++ b/app/src/lib/graph-interface/graph-state.svelte.ts
@@ -170,11 +170,14 @@ export class GraphState {
(node?.state?.y ?? node.position[1]) + 2.5 + 10 * index
];
} else {
- const _index = Object.keys(node.state?.type?.inputs || {}).indexOf(index);
- return [
+ const inputs = node.state.type?.inputs || this.graph.registry.getNode(node.type)?.inputs
+ || {};
+ const _index = Object.keys(inputs).indexOf(index);
+ const pos = [
node?.state?.x ?? node.position[0],
(node?.state?.y ?? node.position[1]) + 10 + 10 * _index
- ];
+ ] as [number, number];
+ return pos;
}
}
@@ -250,7 +253,7 @@ export class GraphState {
let { node, index, position } = socket;
- // remove existing edge
+ // if the socket is an input socket -> remove existing edges
if (typeof index === 'string') {
const edges = this.graph.getEdgesToNode(node);
for (const edge of edges) {
diff --git a/app/src/lib/graph-interface/graph/Graph.svelte b/app/src/lib/graph-interface/graph/Graph.svelte
index d0dbd5f..84ddaea 100644
--- a/app/src/lib/graph-interface/graph/Graph.svelte
+++ b/app/src/lib/graph-interface/graph/Graph.svelte
@@ -1,23 +1,23 @@
-
+
{@render children()}
diff --git a/app/src/routes/dev/+page.svelte b/app/src/routes/dev/+page.svelte
index 6166fa2..428cc01 100644
--- a/app/src/routes/dev/+page.svelte
+++ b/app/src/routes/dev/+page.svelte
@@ -1,113 +1,226 @@
-
- {#if nodeInstance}
-
- {/if}
-
+ ev.key === "r" && handleResult()}
+/>
-
-
- {JSON.stringify(nodeInstance?.props)}
-
-
+ {#if visibleRows?.length}
+
+
+ {/if}
-
- {#if nodeWasm}
-
- {/if}
-
+ {#if isCalculating}
+
+ {/if}
+
+ handleSave(g)}
+ onresult={(res) => handleResult(res)}
+ />
+
+
+
-
- {#await nodeRegistry.fetchCollection("max/plantarium")}
-
Loading Nodes...
- {:then result}
- {#each result.nodes as n}
-
- {/each}
- {/await}
-
-
+ >