feat/arena-runtime #27

Open
max wants to merge 9 commits from feat/arena-runtime into main
22 changed files with 1668 additions and 273 deletions
Showing only changes of commit 47882a832d - Show all commits

9
.cargo/config.toml Normal file
View File

@@ -0,0 +1,9 @@
[build]
rustflags = [
"-C",
"link-arg=--import-memory",
"-C",
"link-arg=--initial-memory=67108864", # 64 MiB
"-C",
"link-arg=--max-memory=536870912", # 512 MiB
]

View File

@@ -12,7 +12,7 @@ Nodarium
</div> </div>
Currently this visual programming language is used to develop https://nodes.max-richter.dev, a procedural modelling tool for 3d-plants. Currently this visual programming language is used to develop <https://nodes.max-richter.dev>, a procedural modelling tool for 3d-plants.
# Table of contents # Table of contents
@@ -22,12 +22,11 @@ Currently this visual programming language is used to develop https://nodes.max-
# Developing # Developing
### Install prerequisites: ### Install prerequisites
- [Node.js](https://nodejs.org/en/download) - [Node.js](https://nodejs.org/en/download)
- [pnpm](https://pnpm.io/installation) - [pnpm](https://pnpm.io/installation)
- [rust](https://www.rust-lang.org/tools/install) - [rust](https://www.rust-lang.org/tools/install)
- wasm-pack
### Install dependencies ### Install dependencies

View File

@@ -0,0 +1,783 @@
# Shared Memory Refactor Plan
## Executive Summary
Migrate to a single shared `WebAssembly.Memory` instance imported by all nodes using `--import-memory`. The `#[nodarium_execute]` macro writes the function's return value directly to shared memory at the specified offset.
## Architecture Overview
```
┌─────────────────────────────────────────────────────────────────────┐
│ Shared WebAssembly.Memory │
│ ┌───────────────────────────────────────────────────────────────┐ │
│ │ [Node A output] [Node B output] [Node C output] ... │ │
│ │ ┌────────────┐ ┌────────────┐ ┌────────────┐ │ │
│ │ │ Vec<i32> │ │ Vec<i32> │ │ Vec<i32> │ │ │
│ │ │ 4 bytes │ │ 12 bytes │ │ 2KB │ │ │
│ │ └────────────┘ └────────────┘ └────────────┘ │ │
│ │ │ │
│ │ offset: 0 ────────────────────────────────────────────────► │ │
│ └───────────────────────────────────────────────────────────────┘ │
└─────────────────────────────────────────────────────────────────────┘
│ import { memory } from "env"
┌─────────────────────────┼─────────────────────────┐
│ │ │
┌────┴────┐ ┌────┴────┐ ┌────┴────┐
│ Node A │ │ Node B │ │ Node C │
│ WASM │ │ WASM │ │ WASM │
└─────────┘ └─────────┘ └─────────┘
```
## Phase 1: Compilation Configuration
### 1.1 Cargo Config
```toml
# nodes/max/plantarium/box/.cargo/config.toml
[build]
rustflags = ["-C", "link-arg=--import-memory"]
```
Or globally in `Cargo.toml`:
```toml
[profile.release]
rustflags = ["-C", "link-arg=--import-memory"]
```
### 1.2 Import Memory Semantics
With `--import-memory`:
- Nodes **import** memory from the host (not export their own)
- All nodes receive the same `WebAssembly.Memory` instance
- Memory is read/write accessible from all modules
- No `memory.grow` needed (host manages allocation)
## Phase 2: Macro Design
### 2.1 Clean Node API
```rust
// input.json has 3 inputs: op_type, a, b
nodarium_definition_file!("src/input.json");
#[nodarium_execute]
pub fn execute(op_type: *i32, a: *i32, b: *i32) -> Vec<i32> {
// Read inputs directly from shared memory
let op = unsafe { *op_type };
let a_val = f32::from_bits(unsafe { *a } as u32);
let b_val = f32::from_bits(unsafe { *b } as u32);
let result = match op {
0 => a_val + b_val,
1 => a_val - b_val,
2 => a_val * b_val,
3 => a_val / b_val,
_ => 0.0,
};
// Return Vec<i32>, macro handles writing to shared memory
vec![result.to_bits()]
}
```
### 2.2 Macro Implementation
```rust
// packages/macros/src/lib.rs
#[proc_macro_attribute]
pub fn nodarium_execute(_attr: TokenStream, item: TokenStream) -> TokenStream {
let input_fn = parse_macro_input!(item as syn::ItemFn);
let fn_name = &input_fn.sig.ident;
// Parse definition to get input count
let project_dir = env::var("CARGO_MANIFEST_DIR").unwrap();
let def: NodeDefinition = serde_json::from_str(&fs::read_to_string(
Path::new(&project_dir).join("src/input.json")
).unwrap()).unwrap();
let input_count = def.inputs.as_ref().map(|i| i.len()).unwrap_or(0);
// Validate signature
validate_signature(&input_fn, input_count);
// Generate wrapper
generate_execute_wrapper(input_fn, fn_name, input_count)
}
fn validate_signature(fn_sig: &syn::Signature, expected_inputs: usize) {
let param_count = fn_sig.inputs.len();
if param_count != expected_inputs {
panic!(
"Execute function has {} parameters but definition has {} inputs\n\
Definition inputs: {:?}\n\
Expected signature:\n\
pub fn execute({}) -> Vec<i32>",
param_count,
expected_inputs,
def.inputs.as_ref().map(|i| i.keys().collect::<Vec<_>>()),
(0..expected_inputs)
.map(|i| format!("arg{}: *const i32", i))
.collect::<Vec<_>>()
.join(", ")
);
}
// Verify return type is Vec<i32>
match &fn_sig.output {
syn::ReturnType::Type(_, ty) => {
if !matches!(&**ty, syn::Type::Path(tp) if tp.path.is_ident("Vec")) {
panic!("Execute function must return Vec<i32>");
}
}
syn::ReturnType::Default => {
panic!("Execute function must return Vec<i32>");
}
}
}
fn generate_execute_wrapper(
input_fn: syn::ItemFn,
fn_name: &syn::Ident,
input_count: usize,
) -> TokenStream {
let arg_names: Vec<_> = (0..input_count)
.map(|i| syn::Ident::new(&format!("arg{}", i), proc_macro2::Span::call_site()))
.collect();
let expanded = quote! {
#input_fn
#[no_mangle]
pub extern "C" fn execute(
output_pos: i32,
#( #arg_names: i32 ),*
) -> i32 {
extern "C" {
fn __nodarium_log(ptr: *const u8, len: usize);
fn __nodarium_log_panic(ptr: *const u8, len: usize);
}
// Setup panic hook
static SET_HOOK: std::sync::Once = std::sync::Once::new();
SET_HOOK.call_once(|| {
std::panic::set_hook(Box::new(|info| {
let msg = info.to_string();
unsafe { __nodarium_log_panic(msg.as_ptr(), msg.len()); }
}));
});
// Call user function
let result = #fn_name(
#( #arg_names as *const i32 ),*
);
// Write result directly to shared memory at output_pos
let len_bytes = result.len() * 4;
unsafe {
let src = result.as_ptr() as *const u8;
let dst = output_pos as *mut u8;
dst.copy_from_nonoverlapping(src, len_bytes);
}
// Forget the Vec to prevent deallocation (data is in shared memory now)
core::mem::forget(result);
len_bytes as i32
}
};
TokenStream::from(expanded)
}
```
### 2.3 Generated Assembly
The macro generates:
```asm
; Input: output_pos in register r0, arg0 in r1, arg1 in r2, arg2 in r3
execute:
; Call user function
bl user_execute ; returns pointer to Vec<i32> in r0
; Calculate byte length
ldr r4, [r0, #8] ; Vec::len field
lsl r4, r4, #2 ; len * 4 (i32 = 4 bytes)
; Copy Vec data to shared memory at output_pos
ldr r5, [r0, #0] ; Vec::ptr field
ldr r6, [r0, #4] ; capacity (unused)
; memcpy(dst=output_pos, src=r5, len=r4)
; (implemented via copy_from_nonoverlapping)
; Return length
mov r0, r4
bx lr
```
## Phase 3: Input Reading Helpers
```rust
// packages/utils/src/accessor.rs
/// Read i32 from shared memory
#[inline]
pub unsafe fn read_i32(ptr: *const i32) -> i32 {
*ptr
}
/// Read f32 from shared memory (stored as i32 bits)
#[inline]
pub unsafe fn read_f32(ptr: *const i32) -> f32 {
f32::from_bits(*ptr as u32)
}
/// Read boolean from shared memory
#[inline]
pub unsafe fn read_bool(ptr: *const i32) -> bool {
*ptr != 0
}
/// Read vec3 (3 f32s) from shared memory
#[inline]
pub unsafe fn read_vec3(ptr: *const i32) -> [f32; 3] {
let p = ptr as *const f32;
[p.read(), p.add(1).read(), p.add(2).read()]
}
/// Read slice from shared memory
#[inline]
pub unsafe fn read_i32_slice(ptr: *const i32, len: usize) -> &[i32] {
std::slice::from_raw_parts(ptr, len)
}
/// Read f32 slice from shared memory
#[inline]
pub unsafe fn read_f32_slice(ptr: *const i32, len: usize) -> &[f32] {
std::slice::from_raw_parts(ptr as *const f32, len)
}
/// Read with default value
#[inline]
pub unsafe fn read_f32_default(ptr: *const i32, default: f32) -> f32 {
if ptr.is_null() { default } else { read_f32(ptr) }
}
#[inline]
pub unsafe fn read_i32_default(ptr: *const i32, default: i32) -> i32 {
if ptr.is_null() { default } else { read_i32(ptr) }
}
```
## Phase 4: Node Implementation Examples
### 4.1 Math Node
```rust
// nodes/max/plantarium/math/src/lib.rs
nodarium_definition_file!("src/input.json");
#[nodarium_execute]
pub fn execute(op_type: *const i32, a: *const i32, b: *const i32) -> Vec<i32> {
use nodarium_utils::{read_i32, read_f32};
let op = unsafe { read_i32(op_type) };
let a_val = unsafe { read_f32(a) };
let b_val = unsafe { read_f32(b) };
let result = match op {
0 => a_val + b_val, // add
1 => a_val - b_val, // subtract
2 => a_val * b_val, // multiply
3 => a_val / b_val, // divide
_ => 0.0,
};
vec![result.to_bits()]
}
```
### 4.2 Vec3 Node
```rust
// nodes/max/plantarium/vec3/src/lib.rs
nodarium_definition_file!("src/input.json");
#[nodarium_execute]
pub fn execute(x: *const i32, y: *const i32, z: *const i32) -> Vec<i32> {
use nodarium_utils::read_f32;
let x_val = unsafe { read_f32(x) };
let y_val = unsafe { read_f32(y) };
let z_val = unsafe { read_f32(z) };
vec![x_val.to_bits(), y_val.to_bits(), z_val.to_bits()]
}
```
### 4.3 Box Node
```rust
// nodes/max/plantarium/box/src/lib.rs
nodarium_definition_file!("src/input.json");
#[nodarium_execute]
pub fn execute(size: *const i32) -> Vec<i32> {
use nodarium_utils::{read_f32, encode_float, calculate_normals};
let size = unsafe { read_f32(size) };
let p = encode_float(size);
let n = encode_float(-size);
let mut cube_geometry = vec![
1, // 1: geometry
8, // 8 vertices
12, // 12 faces
// Face indices
0, 1, 2, 0, 2, 3,
0, 3, 4, 4, 5, 0,
6, 1, 0, 5, 6, 0,
7, 2, 1, 6, 7, 1,
2, 7, 3, 3, 7, 4,
7, 6, 4, 4, 6, 5,
// Bottom plate
p, n, n, p, n, p, n, n, p, n, n, n,
// Top plate
n, p, n, p, p, n, p, p, p, n, p, p,
// Normals
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
];
calculate_normals(&mut cube_geometry);
cube_geometry
}
```
### 4.4 Stem Node
```rust
// nodes/max/plantarium/stem/src/lib.rs
nodarium_definition_file!("src/input.json");
#[nodarium_execute]
pub fn execute(
origin: *const i32,
amount: *const i32,
length: *const i32,
thickness: *const i32,
resolution: *const i32,
) -> Vec<i32> {
use nodarium_utils::{
read_vec3, read_i32, read_f32,
geometry::{create_multiple_paths, wrap_multiple_paths},
};
let origin = unsafe { read_vec3(origin) };
let amount = unsafe { read_i32(amount) } as usize;
let length = unsafe { read_f32(length) };
let thickness = unsafe { read_f32(thickness) };
let resolution = unsafe { read_i32(resolution) } as usize;
let mut stem_data = create_multiple_paths(amount, resolution, 1);
let mut stems = wrap_multiple_paths(&mut stem_data);
for stem in stems.iter_mut() {
let points = stem.get_points_mut();
for (i, point) in points.iter_mut().enumerate() {
let t = i as f32 / (resolution as f32 - 1.0);
point.x = origin[0];
point.y = origin[1] + t * length;
point.z = origin[2];
point.w = thickness * (1.0 - t);
}
}
stem_data
}
```
## Phase 5: Runtime Implementation
```typescript
// app/src/lib/runtime/memory-manager.ts
export const SHARED_MEMORY = new WebAssembly.Memory({
initial: 1024, // 64MB initial
maximum: 4096, // 256MB maximum
});
export class MemoryManager {
private offset: number = 0;
private readonly start: number = 0;
reset() {
this.offset = this.start;
}
alloc(bytes: number): number {
const pos = this.offset;
this.offset += bytes;
return pos;
}
readInt32(pos: number): number {
return new Int32Array(SHARED_MEMORY.buffer)[pos / 4];
}
readFloat32(pos: number): number {
return new Float32Array(SHARED_MEMORY.buffer)[pos / 4];
}
readBytes(pos: number, length: number): Uint8Array {
return new Uint8Array(SHARED_MEMORY.buffer, pos, length);
}
getInt32View(): Int32Array {
return new Int32Array(SHARED_MEMORY.buffer);
}
getFloat32View(): Float32Array {
return new Float32Array(SHARED_MEMORY.buffer);
}
getRemaining(): number {
return SHARED_MEMORY.buffer.byteLength - this.offset;
}
}
```
```typescript
// app/src/lib/runtime/imports.ts
import { SHARED_MEMORY } from "./memory-manager";
export function createImportObject(nodeId: string): WebAssembly.Imports {
return {
env: {
// Import shared memory
memory: SHARED_MEMORY,
// Logging
__nodarium_log: (ptr: number, len: number) => {
const msg = new TextDecoder().decode(
new Uint8Array(SHARED_MEMORY.buffer, ptr, len),
);
console.log(`[${nodeId}] ${msg}`);
},
__nodarium_log_panic: (ptr: number, len: number) => {
const msg = new TextDecoder().decode(
new Uint8Array(SHARED_MEMORY.buffer, ptr, len),
);
console.error(`[${nodeId}] PANIC: ${msg}`);
},
},
};
}
```
```typescript
// app/src/lib/runtime/executor.ts
import { SHARED_MEMORY } from "./memory-manager";
import { createImportObject } from "./imports";
export class SharedMemoryRuntimeExecutor implements RuntimeExecutor {
private memory: MemoryManager;
private results: Map<string, { pos: number; len: number }> = new Map();
private instances: Map<string, WebAssembly.Instance> = new Map();
constructor(private registry: NodeRegistry) {
this.memory = new MemoryManager();
}
async execute(graph: Graph, settings: Record<string, unknown>) {
this.memory.reset();
this.results.clear();
const [outputNode, nodes] = await this.addMetaData(graph);
const sortedNodes = nodes.sort((a, b) => b.depth - a.depth);
for (const node of sortedNodes) {
await this.executeNode(node, settings);
}
const result = this.results.get(outputNode.id);
const view = this.memory.getInt32View();
return view.subarray(result.pos / 4, result.pos / 4 + result.len / 4);
}
private async executeNode(
node: RuntimeNode,
settings: Record<string, unknown>,
) {
const def = this.definitionMap.get(node.type)!;
const inputs = def.inputs || {};
const inputNames = Object.keys(inputs);
const outputSize = this.estimateOutputSize(def);
const outputPos = this.memory.alloc(outputSize);
const args: number[] = [outputPos];
for (const inputName of inputNames) {
const inputDef = inputs[inputName];
const inputNode = node.state.inputNodes[inputName];
if (inputNode) {
const parentResult = this.results.get(inputNode.id)!;
args.push(parentResult.pos);
continue;
}
const valuePos = this.memory.alloc(16);
this.writeValue(
valuePos,
inputDef,
node.props?.[inputName] ??
settings[inputDef.setting ?? ""] ??
inputDef.value,
);
args.push(valuePos);
}
let instance = this.instances.get(node.type);
if (!instance) {
instance = await this.instantiateNode(node.type);
this.instances.set(node.type, instance);
}
const writtenLen = instance.exports.execute(...args);
this.results.set(node.id, { pos: outputPos, len: writtenLen });
}
private writeValue(pos: number, inputDef: NodeInput, value: unknown) {
const view = this.memory.getFloat32View();
const intView = this.memory.getInt32View();
switch (inputDef.type) {
case "float":
view[pos / 4] = value as number;
break;
case "integer":
case "select":
case "seed":
intView[pos / 4] = value as number;
break;
case "boolean":
intView[pos / 4] = value ? 1 : 0;
break;
case "vec3":
const arr = value as number[];
view[pos / 4] = arr[0];
view[pos / 4 + 1] = arr[1];
view[pos / 4 + 2] = arr[2];
break;
}
}
private estimateOutputSize(def: NodeDefinition): number {
const sizes: Record<string, number> = {
float: 16,
integer: 16,
boolean: 16,
vec3: 16,
geometry: 8192,
path: 4096,
};
return sizes[def.outputs?.[0] || "float"] || 64;
}
private async instantiateNode(
nodeType: string,
): Promise<WebAssembly.Instance> {
const wasmBytes = await this.fetchWasm(nodeType);
const module = await WebAssembly.compile(wasmBytes);
const importObject = createImportObject(nodeType);
return WebAssembly.instantiate(module, importObject);
}
}
```
## Phase 7: Execution Flow Visualization
```
┌─────────────────────────────────────────────────────────────────────────────┐
│ Execution Timeline │
└─────────────────────────────────────────────────────────────────────────────┘
Step 1: Setup
SHARED_MEMORY = new WebAssembly.Memory({ initial: 1024 })
memory.offset = 0
Step 2: Execute Node A (math with 3 inputs)
outputPos = memory.alloc(16) = 0
args = [0, ptr_to_op_type, ptr_to_a, ptr_to_b]
Node A reads:
*ptr_to_op_type → op
*ptr_to_a → a
*ptr_to_b → b
Node A returns: vec![result.to_bits()]
Macro writes result directly to SHARED_MEMORY[0..4]
Returns: 4
results['A'] = { pos: 0, len: 4 }
memory.offset = 4
Step 3: Execute Node B (stem with 5 inputs, input[0] from A)
outputPos = memory.alloc(4096) = 4
args = [4, results['A'].pos, ptr_to_amount, ptr_to_length, ...]
Node B reads:
*results['A'].pos → value from Node A
*ptr_to_amount → amount
...
Node B returns: stem_data Vec<i32> (1000 elements = 4000 bytes)
Macro writes stem_data directly to SHARED_MEMORY[4..4004]
Returns: 4000
results['B'] = { pos: 4, len: 4000 }
memory.offset = 4004
Step 4: Execute Node C (output, 1 input from B)
outputPos = memory.alloc(16) = 4004
args = [4004, results['B'].pos, results['B'].len]
Node C reads:
*results['B'].pos → stem geometry
Node C returns: vec![1] (identity)
Macro writes to SHARED_MEMORY[4004..4008]
results['C'] = { pos: 4004, len: 4 }
Final: Return SHARED_MEMORY[4004..4008] as geometry result
```
## Phase 6: Memory Growth Strategy
```typescript
class MemoryManager {
alloc(bytes: number): number {
const required = this.offset + bytes;
const currentBytes = SHARED_MEMORY.buffer.byteLength;
if (required > currentBytes) {
const pagesNeeded = Math.ceil((required - currentBytes) / 65536);
const success = SHARED_MEMORY.grow(pagesNeeded);
if (!success) {
throw new Error(`Out of memory: need ${bytes} bytes`);
}
this.int32View = new Int32Array(SHARED_MEMORY.buffer);
this.float32View = new Float32Array(SHARED_MEMORY.buffer);
}
const pos = this.offset;
this.offset += bytes;
return pos;
}
}
```
## Phase 8: Migration Checklist
### Build Configuration
- [ ] Add `--import-memory` to Rust flags in `Cargo.toml`
- [ ] Ensure no nodes export memory
### Runtime
- [ ] Create `SHARED_MEMORY` instance
- [ ] Implement `MemoryManager` with alloc/read/write
- [ ] Create import object factory
- [ ] Implement `SharedMemoryRuntimeExecutor`
### Macro
- [ ] Parse definition JSON
- [ ] Validate function signature (N params, Vec<i32> return)
- [ ] Generate wrapper that writes return value to `output_pos`
- [ ] Add panic hook
### Utilities
- [ ] `read_i32(ptr: *const i32) -> i32`
- [ ] `read_f32(ptr: *const i32) -> f32`
- [ ] `read_bool(ptr: *const i32) -> bool`
- [ ] `read_vec3(ptr: *const i32) -> [f32; 3]`
- [ ] `read_i32_slice(ptr: *const i32, len: usize) -> &[i32]`
### Nodes
- [ ] `float`, `integer`, `boolean` nodes
- [ ] `vec3` node
- [ ] `math` node
- [ ] `random` node
- [ ] `box` node
- [ ] `stem` node
- [ ] `branch` node
- [ ] `instance` node
- [ ] `output` node
## Phase 9: Before vs After
### Before (per-node memory)
```rust
#[nodarium_execute]
pub fn execute(input: &[i32]) -> Vec<i32> {
let args = split_args(input);
let a = evaluate_float(args[0]);
let b = evaluate_float(args[1]);
vec![(a + b).to_bits()]
}
```
### After (shared memory)
```rust
#[nodarium_execute]
pub fn execute(a: *const i32, b: *const i32) -> Vec<i32> {
use nodarium_utils::read_f32;
let a_val = unsafe { read_f32(a) };
let b_val = unsafe { read_f32(b) };
vec![(a_val + b_val).to_bits()]
}
```
**Key differences:**
- Parameters are input pointers, not a slice
- Use `read_f32` helper instead of `evaluate_float`
- Macro writes result directly to shared memory
- All nodes share the same memory import
## Phase 10: Benefits
| Aspect | Before | After |
| ----------------- | -------------- | -------------------- |
| Memory | N × ~1MB heaps | 1 × 64-256MB shared |
| Cross-node access | Copy via JS | Direct read |
| API | `&[i32]` slice | `*const i32` pointer |
| Validation | Runtime | Compile-time |

227
SUMMARY.md Normal file
View File

@@ -0,0 +1,227 @@
# Nodarium - AI Coding Agent Summary
## Project Overview
Nodarium is a WebAssembly-based visual programming language used to build <https://nodes.max-richter.dev>, a procedural 3D plant modeling tool. The system allows users to create visual node graphs where each node is a compiled WebAssembly module.
## Technology Stack
**Frontend (SvelteKit):**
- Framework: SvelteKit with Svelte 5
- 3D Rendering: Three.js via Threlte
- Styling: Tailwind CSS 4
- Build Tool: Vite
- State Management: Custom store-client package
- WASM Integration: vite-plugin-wasm, comlink
**Backend/Core (Rust/WASM):**
- Language: Rust
- Output: WebAssembly (wasm32-unknown-unknown target)
- Build Tool: cargo
- Procedural Macros: custom macros package
**Package Management:**
- Node packages: pnpm workspace (v10.28.1)
- Rust packages: Cargo workspace
## Directory Structure
```
nodarium/
├── app/ # SvelteKit web application
│ ├── src/
│ │ ├── lib/ # App-specific components and utilities
│ │ ├── routes/ # SvelteKit routes (pages)
│ │ ├── app.css # Global styles
│ │ └── app.html # HTML template
│ ├── static/
│ │ └── nodes/ # Compiled WASM node files served statically
│ ├── package.json # App dependencies
│ ├── svelte.config.js # SvelteKit configuration
│ ├── vite.config.ts # Vite configuration
│ └── tsconfig.json # TypeScript configuration
├── packages/ # Shared workspace packages
│ ├── ui/ # Svelte UI component library (published as @nodarium/ui)
│ │ ├── src/ # UI components
│ │ ├── static/ # Static assets for UI
│ │ ├── dist/ # Built output
│ │ └── package.json
│ ├── registry/ # Node registry with IndexedDB persistence (@nodarium/registry)
│ │ └── src/
│ ├── types/ # Shared TypeScript types (@nodarium/types)
│ │ └── src/
│ ├── utils/ # Shared utilities (@nodarium/utils)
│ │ └── src/
│ └── macros/ # Rust procedural macros for node development
├── nodes/ # WebAssembly node packages (Rust)
│ └── max/plantarium/ # Plantarium nodes namespace
│ ├── box/ # Box geometry node
│ ├── branch/ # Branch generation node
│ ├── float/ # Float value node
│ ├── gravity/ # Gravity simulation node
│ ├── instance/ # Geometry instancing node
│ ├── math/ # Math operations node
│ ├── noise/ # Noise generation node
│ ├── output/ # Output node for results
│ ├── random/ # Random value node
│ ├── rotate/ # Rotation transformation node
│ ├── stem/ # Stem geometry node
│ ├── triangle/ # Triangle geometry node
│ ├── vec3/ # Vector3 manipulation node
│ └── .template/ # Node template for creating new nodes
├── docs/ # Documentation
│ ├── ARCHITECTURE.md # System architecture overview
│ ├── DEVELOPING_NODES.md # Guide for creating new nodes
│ ├── NODE_DEFINITION.md # Node definition schema
│ └── PLANTARIUM.md # Plantarium-specific documentation
├── Cargo.toml # Rust workspace configuration
├── package.json # Root npm scripts
├── pnpm-workspace.yaml # pnpm workspace configuration
├── pnpm-lock.yaml # Locked dependency versions
└── README.md # Project readme
```
## Node System Architecture
### What is a Node?
Nodes are WebAssembly modules that:
- Have a unique ID (e.g., `max/plantarium/stem`)
- Define inputs with types and default values
- Define outputs they produce
- Execute logic when called with arguments
### Node Definition Schema
Nodes are defined via `definition.json` embedded in each WASM module:
```json
{
"id": "namespace/category/node-name",
"outputs": ["geometry"],
"inputs": {
"height": { "type": "float", "value": 1.0 },
"radius": { "type": "float", "value": 0.1 }
}
}
```
For now the outputs are limited to a single output.
### Node Execution
Nodes receive serialized arguments and return serialized outputs. The `nodarium_utils` Rust crate provides helpers for:
- Parsing input arguments
- Creating geometry data
- Concatenating output vectors
### Node Registration
Nodes are:
1. Compiled to WASM files in `target/wasm32-unknown-unknown/release/`
2. Copied to `app/static/nodes/` for serving
3. Registered in the browser via IndexedDB using the registry package
## Key Dependencies
**Frontend:**
- `@sveltejs/kit` - Application framework
- `@threlte/core` & `@threlte/extras` - Three.js Svelte integration
- `three` - 3D graphics library
- `tailwindcss` - CSS framework
- `comlink` - WebWorker RPC
- `idb` - IndexedDB wrapper
- `wabt` - WebAssembly binary toolkit
**Rust/WASM:**
- Language: Rust (compiled with plain cargo)
- Output: WebAssembly (wasm32-unknown-unknown target)
- Generic WASM wrapper for language-agnostic node development
- `glam` - Math library (Vec2, Vec3, Mat4, etc.)
- `nodarium_macros` - Custom procedural macros
- `nodarium_utils` - Shared node utilities
## Build Commands
From root directory:
```bash
# Install dependencies
pnpm i
# Build all WASM nodes (compiles Rust, copies to app/static)
pnpm build:nodes
# Build the app (builds UI library + SvelteKit app)
pnpm build:app
# Full build (nodes + app)
pnpm build
# Development
pnpm dev # Run all dev commands in parallel
pnpm dev:nodes # Watch nodes/, auto-rebuild on changes
pnpm dev:app_ui # Watch app and UI package
pnpm dev_ui # Watch UI package only
```
## Workspace Packages
The project uses pnpm workspaces with the following packages:
| Package | Location | Purpose |
| ------------------ | ------------------ | ------------------------------ |
| @nodarium/app | app/ | Main SvelteKit application |
| @nodarium/ui | packages/ui/ | Reusable UI component library |
| @nodarium/registry | packages/registry/ | Node registry with persistence |
| @nodarium/types | packages/types/ | Shared TypeScript types |
| @nodarium/utils | packages/utils/ | Shared utilities |
| nodarium macros | packages/macros/ | Rust procedural macros |
## Configuration Files
- `.dprint.jsonc` - Dprint formatter configuration
- `svelte.config.js` - SvelteKit configuration (app and ui)
- `vite.config.ts` - Vite bundler configuration
- `tsconfig.json` - TypeScript configuration (app and packages)
- `Cargo.toml` - Rust workspace with member packages
- `flake.nix` - Nix development environment
## Development Workflow
### Adding a New Node
1. Copy the `.template` directory in `nodes/max/plantarium/` to create a new node directory
2. Define node in `src/definition.json`
3. Implement logic in `src/lib.rs`
4. Build with `cargo build --release --target wasm32-unknown-unknown`
5. Test by dragging onto the node graph
### Modifying UI Components
1. Changes to `packages/ui/` automatically rebuild with watch mode
2. App imports from `@nodarium/ui`
3. Run `pnpm dev:app_ui` for hot reload
## Important Notes for AI Agents
1. **WASM Compilation**: Nodes require `wasm32-unknown-unknown` target (`rustup target add wasm32-unknown-unknown`)
2. **Cross-Compilation**: WASM build happens on host, not in containers/VMs
3. **Static Serving**: Compiled WASM files must exist in `app/static/nodes/` before dev server runs
4. **Workspace Dependencies**: Use `workspace:*` protocol for internal packages
5. **Threlte Version**: Uses Threlte 8.x, not 7.x (important for 3D component APIs)
6. **Svelte 5**: Project uses Svelte 5 with runes (`$state`, `$derived`, `$effect`)
7. **Tailwind 4**: Uses Tailwind CSS v4 with `@tailwindcss/vite` plugin
8. **IndexedDB**: Registry uses IDB for persistent node storage in browser

294
SUMMARY_RUNTIME.md Normal file
View File

@@ -0,0 +1,294 @@
# Node Compilation and Runtime Execution
## Overview
Nodarium nodes are WebAssembly modules written in Rust. Each node is a compiled WASM binary that exposes a standardized C ABI interface. The system uses procedural macros to generate the necessary boilerplate for node definitions, memory management, and execution.
## Node Compilation
### 1. Node Definition (JSON)
Each node has a `src/input.json` file that defines:
```json
{
"id": "max/plantarium/stem",
"meta": { "description": "Creates a stem" },
"outputs": ["path"],
"inputs": {
"origin": { "type": "vec3", "value": [0, 0, 0], "external": true },
"amount": { "type": "integer", "value": 1, "min": 1, "max": 64 },
"length": { "type": "float", "value": 5 },
"thickness": { "type": "float", "value": 0.2 }
}
}
```
### 2. Procedural Macros
The `nodarium_macros` crate provides two procedural macros:
#### `#[nodarium_execute]`
Transforms a Rust function into a WASM-compatible entry point:
```rust
#[nodarium_execute]
pub fn execute(input: &[i32]) -> Vec<i32> {
// Node logic here
}
```
The macro generates:
- **C ABI wrapper**: Converts the WASM interface to a standard C FFI
- **`execute` function**: Takes `(ptr: *const i32, len: usize)` and returns `*mut i32`
- **Memory allocation**: `__alloc(len: usize) -> *mut i32` for buffer allocation
- **Memory deallocation**: `__free(ptr: *mut i32, len: usize)` for cleanup
- **Static output buffer**: `OUTPUT_BUFFER` for returning results
- **Panic hook**: Routes panics through `host_log_panic` for debugging
- **Internal logic wrapper**: Wraps the original function
#### `nodarium_definition_file!("path")`
Embeds the node definition JSON into the WASM binary:
```rust
nodarium_definition_file!("src/input.json");
```
Generates:
- **`DEFINITION_DATA`**: Static byte array in `nodarium_definition` section
- **`get_definition_ptr()`**: Returns pointer to definition data
- **`get_definition_len()`**: Returns length of definition data
### 3. Build Process
Nodes are compiled with:
```bash
cargo build --release --target wasm32-unknown-unknown
```
The resulting `.wasm` files are copied to `app/static/nodes/` for serving.
## Node Execution Runtime
### Architecture
```
┌─────────────────────────────────────────────────────────────┐
│ WebWorker Thread │
│ ┌─────────────────────────────────────────────────────────┐│
│ │ WorkerRuntimeExecutor ││
│ │ ┌───────────────────────────────────────────────────┐ ││
│ │ │ MemoryRuntimeExecutor ││
│ │ │ ┌─────────────────────────────────────────────┐ ││
│ │ │ │ Node Registry (WASM + Definitions) ││
│ │ │ └─────────────────────────────────────────────┘ ││
│ │ │ ┌─────────────────────────────────────────────┐ ││
│ │ │ │ Execution Engine (Bottom-Up Evaluation) ││
│ │ │ └─────────────────────────────────────────────┘ ││
│ │ └───────────────────────────────────────────────────┘ ││
│ └─────────────────────────────────────────────────────────┘│
└─────────────────────────────────────────────────────────────┘
```
### 1. MemoryRuntimeExecutor
The core execution engine in `runtime-executor.ts`:
#### Metadata Collection (`addMetaData`)
1. Load node definitions from registry
2. Build parent/child relationships from graph edges
3. Calculate execution depth via reverse BFS from output node
#### Node Sorting
Nodes are sorted by depth (highest depth first) for bottom-up execution:
```
Depth 3: n3 n6
Depth 2: n2 n4 n5
Depth 1: n1
Depth 0: Output
Execution order: n3, n6, n2, n4, n5, n1, Output
```
#### Input Collection
For each node, inputs are gathered from:
1. **Connected nodes**: Results from parent nodes in the graph
2. **Node props**: Values stored directly on the node instance
3. **Settings**: Global settings mapped via `setting` property
4. **Defaults**: Values from node definition
#### Input Encoding
Values are encoded as `Int32Array`:
- **Floats**: IEEE 754 bits cast to i32
- **Vectors**: `[0, count, v1, v2, v3, 1, 1]` (nested bracket format)
- **Booleans**: `0` or `1`
- **Integers**: Direct i32 value
#### Caching
Results are cached using:
```typescript
inputHash = `node-${node.id}-${fastHashArrayBuffer(encoded_inputs)}`
```
The cache uses LRU eviction (default size: 50 entries).
### 2. Execution Flow
```typescript
async execute(graph: Graph, settings) {
// 1. Load definitions and build node relationships
const [outputNode, nodes] = await this.addMetaData(graph);
// 2. Sort nodes by depth (bottom-up)
const sortedNodes = nodes.sort((a, b) => b.depth - a.depth);
// 3. Execute each node
for (const node of sortedNodes) {
const inputs = this.collectInputs(node, settings);
const encoded = concatEncodedArrays(inputs);
const result = nodeType.execute(encoded);
this.results[node.id] = result;
}
// 4. Return output node result
return this.results[outputNode.id];
}
```
### 3. Worker Isolation
`WorkerRuntimeExecutor` runs execution in a WebWorker via Comlink:
```typescript
class WorkerRuntimeExecutor implements RuntimeExecutor {
private worker = new ComlinkWorker(...);
async execute(graph, settings) {
return this.worker.executeGraph(graph, settings);
}
}
```
The worker backend (`worker-runtime-executor-backend.ts`):
- Creates a single `MemoryRuntimeExecutor` instance
- Manages caching state
- Collects performance metrics
### 4. Remote Execution (Optional)
`RemoteRuntimeExecutor` can execute graphs on a remote server:
```typescript
class RemoteRuntimeExecutor implements RuntimeExecutor {
async execute(graph, settings) {
const res = await fetch(this.url, {
method: "POST",
body: JSON.stringify({ graph, settings })
});
return new Int32Array(await res.arrayBuffer());
}
}
```
## Data Encoding Format
### Bracket Notation
Inputs and outputs use a nested bracket encoding:
```
[0, count, item1, item2, ..., 1, 1]
^ ^ items ^ ^
| | | |
| | | +-- closing bracket
| +-- number of items + 1 |
+-- opening bracket (0) +-- closing bracket (1)
```
### Example Encodings
**Float (5.0)**:
```typescript
encodeFloat(5.0) // → 1084227584 (IEEE 754 bits as i32)
```
**Vec3 ([1, 2, 3])**:
```typescript
[0, 4, encodeFloat(1), encodeFloat(2), encodeFloat(3), 1, 1]
```
**Nested Math Expression**:
```
[0, 3, 0, 2, 0, 3, 0, 0, 0, 3, 7549747, 127, 1, 1, ...]
```
### Decoding Utilities
From `packages/utils/src/tree.rs`:
- `split_args()`: Parses nested bracket arrays into segments
- `evaluate_float()`: Recursively evaluates and decodes float expressions
- `evaluate_int()`: Evaluates integer/math node expressions
- `evaluate_vec3()`: Decodes vec3 arrays
## Geometry Data Format
### Path Data
Paths represent procedural plant structures:
```
[0, count, [0, header_size, node_type, depth, x, y, z, w, ...], 1, 1]
```
Each point has 4 values: x, y, z position + thickness (w).
### Geometry Data
Meshes use a similar format with vertices and face indices.
## Performance Tracking
The runtime collects detailed performance metrics:
- `collect-metadata`: Time to build node graph
- `collected-inputs`: Time to gather inputs
- `encoded-inputs`: Time to encode inputs
- `hash-inputs`: Time to compute cache hash
- `cache-hit`: 1 if cache hit, 0 if miss
- `node/{node_type}`: Time per node execution
## Caching Strategy
### MemoryRuntimeCache
LRU cache implementation:
```typescript
class MemoryRuntimeCache {
private map = new Map<string, unknown>();
size: number = 50;
get(key) { /* move to front */ }
set(key, value) { /* evict oldest if at capacity */ }
}
```
### IndexDBCache
For persistence across sessions, the registry uses IndexedDB caching.
## Summary
The Nodarium node system works as follows:
1. **Compilation**: Rust functions are decorated with macros that generate C ABI WASM exports
2. **Registration**: Node definitions are embedded in WASM and loaded at runtime
3. **Graph Analysis**: Runtime builds node relationships and execution order
4. **Bottom-Up Execution**: Nodes execute from leaves to output
5. **Caching**: Results are cached per-node-inputs hash for performance
6. **Isolation**: Execution runs in a WebWorker to prevent main thread blocking

View File

@@ -1,3 +1,4 @@
import { RemoteNodeRegistry } from '@nodarium/registry';
import type { import type {
Graph, Graph,
NodeDefinition, NodeDefinition,
@@ -7,10 +8,9 @@ import type {
SyncCache SyncCache
} from '@nodarium/types'; } from '@nodarium/types';
import { import {
concatEncodedArrays,
createLogger, createLogger,
createWasmWrapper,
encodeFloat, encodeFloat,
fastHashArrayBuffer,
type PerformanceStore type PerformanceStore
} from '@nodarium/utils'; } from '@nodarium/utils';
import type { RuntimeNode } from './types'; import type { RuntimeNode } from './types';
@@ -18,6 +18,8 @@ import type { RuntimeNode } from './types';
const log = createLogger('runtime-executor'); const log = createLogger('runtime-executor');
log.mute(); log.mute();
const remoteRegistry = new RemoteNodeRegistry('');
function getValue(input: NodeInput, value?: unknown) { function getValue(input: NodeInput, value?: unknown) {
if (value === undefined && 'value' in input) { if (value === undefined && 'value' in input) {
value = input.value; value = input.value;
@@ -52,19 +54,30 @@ function getValue(input: NodeInput, value?: unknown) {
return value; return value;
} }
console.error({ input, value });
throw new Error(`Unknown input type ${input.type}`); throw new Error(`Unknown input type ${input.type}`);
} }
export class MemoryRuntimeExecutor implements RuntimeExecutor { type Pointer = {
private definitionMap: Map<string, NodeDefinition> = new Map(); start: number;
end: number;
};
private seed = Math.floor(Math.random() * 100000000); export class MemoryRuntimeExecutor implements RuntimeExecutor {
private nodes: Map<
string,
{ definition: NodeDefinition; execute: (outputPos: number, args: number[]) => number }
> = new Map();
private offset = 0;
private memory = new WebAssembly.Memory({
initial: 1024,
maximum: 8192
});
seed = 123123;
perf?: PerformanceStore; perf?: PerformanceStore;
private results: Record<string, Int32Array> = {};
constructor( constructor(
private registry: NodeRegistry, private registry: NodeRegistry,
public cache?: SyncCache<Int32Array> public cache?: SyncCache<Int32Array>
@@ -79,12 +92,20 @@ export class MemoryRuntimeExecutor implements RuntimeExecutor {
await this.registry.load(graph.nodes.map((node) => node.type)); await this.registry.load(graph.nodes.map((node) => node.type));
const typeMap = new Map<string, NodeDefinition>(); const typeMap = new Map<string, {
definition: NodeDefinition;
execute: (outputPos: number, args: number[]) => number;
}>();
for (const node of graph.nodes) { for (const node of graph.nodes) {
if (!typeMap.has(node.type)) { if (!typeMap.has(node.type)) {
const type = this.registry.getNode(node.type); const type = this.registry.getNode(node.type);
const buffer = await remoteRegistry.fetchArrayBuffer('nodes/' + node.type + '.wasm');
const wrapper = createWasmWrapper(buffer, this.memory);
if (type) { if (type) {
typeMap.set(node.type, type); typeMap.set(node.type, {
definition: type,
execute: wrapper.execute
});
} }
} }
} }
@@ -93,7 +114,7 @@ export class MemoryRuntimeExecutor implements RuntimeExecutor {
private async addMetaData(graph: Graph) { private async addMetaData(graph: Graph) {
// First, lets check if all nodes have a definition // First, lets check if all nodes have a definition
this.definitionMap = await this.getNodeDefinitions(graph); this.nodes = await this.getNodeDefinitions(graph);
const graphNodes = graph.nodes.map(node => { const graphNodes = graph.nodes.map(node => {
const n = node as RuntimeNode; const n = node as RuntimeNode;
@@ -145,16 +166,33 @@ export class MemoryRuntimeExecutor implements RuntimeExecutor {
return [outputNode, nodes] as const; return [outputNode, nodes] as const;
} }
async execute(graph: Graph, settings: Record<string, unknown>) { private writeToMemory(v: number | number[] | Int32Array) {
this.perf?.addPoint('runtime'); let length = 1;
const view = new Int32Array(this.memory.buffer);
if (typeof v === 'number') {
view[this.offset] = v;
length = 1;
} else {
view.set(v, this.offset);
length = v.length;
}
let a = performance.now(); const start = this.offset;
const end = this.offset + length;
this.offset += length;
return {
start,
end
};
}
async execute(graph: Graph, _settings: Record<string, unknown>) {
this.offset = 0;
// Then we add some metadata to the graph // Then we add some metadata to the graph
const [outputNode, nodes] = await this.addMetaData(graph); const [outputNode, nodes] = await this.addMetaData(graph);
let b = performance.now();
this.perf?.addPoint('collect-metadata', b - a);
/* /*
* Here we sort the nodes into buckets, which we then execute one by one * Here we sort the nodes into buckets, which we then execute one by one
@@ -173,113 +211,81 @@ export class MemoryRuntimeExecutor implements RuntimeExecutor {
); );
// here we store the intermediate results of the nodes // here we store the intermediate results of the nodes
this.results = {}; const results: Record<number, Pointer> = {};
if (settings['randomSeed']) {
this.seed = Math.floor(Math.random() * 100000000);
}
for (const node of sortedNodes) { for (const node of sortedNodes) {
const node_type = this.definitionMap.get(node.type)!; const node_type = this.nodes.get(node.type)!;
console.log('EXECUTING NODE', node_type.definition.id);
console.log(node_type.definition.inputs);
const inputs = Object.entries(node_type.definition.inputs || {}).map(
([key, input]) => {
// We should probably initially write this to memory
if (input.type === 'seed') {
return this.writeToMemory(this.seed);
}
// We should probably initially write this to memory
// If the input is linked to a setting, we use that value
// if (input.setting) {
// return getValue(input, settings[input.setting]);
// }
// check if the input is connected to another node
const inputNode = node.state.inputNodes[key];
if (inputNode) {
if (results[inputNode.id] === undefined) {
throw new Error(
`Node ${node.type} is missing input from node ${inputNode.type}`
);
}
return results[inputNode.id];
}
// If the value is stored in the node itself, we use that value
if (node.props?.[key] !== undefined) {
return this.writeToMemory(getValue(input, node.props[key]));
}
return this.writeToMemory(getValue(input));
}
);
if (!node_type || !node.state || !node_type.execute) { if (!node_type || !node.state || !node_type.execute) {
log.warn(`Node ${node.id} has no definition`); log.warn(`Node ${node.id} has no definition`);
continue; continue;
} }
a = performance.now(); const args = inputs.map(s => [s.start, s.end]).flat();
console.log('ARGS', args);
// Collect the inputs for the node
const inputs = Object.entries(node_type.inputs || {}).map(
([key, input]) => {
if (input.type === 'seed') {
return this.seed;
}
// If the input is linked to a setting, we use that value
if (input.setting) {
return getValue(input, settings[input.setting]);
}
// check if the input is connected to another node
const inputNode = node.state.inputNodes[key];
if (inputNode) {
if (this.results[inputNode.id] === undefined) {
throw new Error(
`Node ${node.type} is missing input from node ${inputNode.type}`
);
}
return this.results[inputNode.id];
}
// If the value is stored in the node itself, we use that value
if (node.props?.[key] !== undefined) {
return getValue(input, node.props[key]);
}
return getValue(input);
}
);
b = performance.now();
this.perf?.addPoint('collected-inputs', b - a);
try { try {
a = performance.now(); const bytesWritten = node_type.execute(this.offset, args);
const encoded_inputs = concatEncodedArrays(inputs); results[node.id] = {
b = performance.now(); start: this.offset,
this.perf?.addPoint('encoded-inputs', b - a); end: this.offset + bytesWritten
};
a = performance.now(); this.offset += bytesWritten;
let inputHash = `node-${node.id}-${fastHashArrayBuffer(encoded_inputs)}`; console.log('FINISHED EXECUTION', {
b = performance.now(); bytesWritten,
this.perf?.addPoint('hash-inputs', b - a); offset: this.offset
});
let cachedValue = this.cache?.get(inputHash);
if (cachedValue !== undefined) {
log.log(`Using cached value for ${node_type.id || node.id}`);
this.perf?.addPoint('cache-hit', 1);
this.results[node.id] = cachedValue as Int32Array;
continue;
}
this.perf?.addPoint('cache-hit', 0);
log.group(`executing ${node_type.id}-${node.id}`);
log.log(`Inputs:`, inputs);
a = performance.now();
this.results[node.id] = node_type.execute(encoded_inputs);
log.log('Executed', node.type, node.id);
b = performance.now();
if (this.cache && node.id !== outputNode.id) {
this.cache.set(inputHash, this.results[node.id]);
}
this.perf?.addPoint('node/' + node_type.id, b - a);
log.log('Result:', this.results[node.id]);
log.groupEnd();
} catch (e) { } catch (e) {
log.groupEnd(); console.error(e);
log.error(`Error executing node ${node_type.id || node.id}`, e);
} }
} }
const mem = new Int32Array(this.memory.buffer);
console.log('OUT', mem.slice(0, 10));
// return the result of the parent of the output node // return the result of the parent of the output node
const res = this.results[outputNode.id]; const res = results[outputNode.id];
if (this.cache) {
this.cache.size = sortedNodes.length * 2;
}
this.perf?.endPoint('runtime'); this.perf?.endPoint('runtime');
return res as unknown as Int32Array; return res as unknown as Int32Array;
} }
getIntermediateResults() {
return this.results;
}
getPerformanceData() { getPerformanceData() {
return this.perf?.get(); return this.perf?.get();
} }

View File

@@ -4,20 +4,19 @@ This guide will help you developing your first Nodarium Node written in Rust. As
## Prerequesites ## Prerequesites
You need to have [Rust](https://www.rust-lang.org/tools/install) and [wasm-pack](https://rustwasm.github.io/docs/wasm-pack/) installed. Rust is the language we are going to develop our node in and wasm-pack helps us compile our rust code into a webassembly file. You need to have [Rust](https://www.rust-lang.org/tools/install) installed. Rust is the language we are going to develop our node in and cargo compiles our rust code into webassembly.
```bash ```bash
# install rust # install rust
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh
# install wasm-pack
cargo install wasm-pack
``` ```
## Clone Template ## Clone Template
```bash ```bash
wasm-pack new my-new-node --template https://github.com/jim-fx/nodarium_template # copy the template directory
cd my-new-node cp -r nodes/max/plantarium/.template nodes/max/plantarium/my-new-node
cd nodes/max/plantarium/my-new-node
``` ```
## Setup Definition ## Setup Definition

View File

@@ -8,5 +8,5 @@ edition = "2018"
crate-type = ["cdylib", "rlib"] crate-type = ["cdylib", "rlib"]
[dependencies] [dependencies]
nodarium_utils = { version = "0.1.0", path = "../../../../packages/utils" }
nodarium_macros = { version = "0.1.0", path = "../../../../packages/macros" } nodarium_macros = { version = "0.1.0", path = "../../../../packages/macros" }
nodarium_utils = { version = "0.1.0", path = "../../../../packages/utils" }

View File

@@ -1,9 +1,11 @@
use nodarium_macros::nodarium_definition_file; use nodarium_macros::nodarium_definition_file;
use nodarium_macros::nodarium_execute; use nodarium_macros::nodarium_execute;
use nodarium_utils::log;
nodarium_definition_file!("src/input.json"); nodarium_definition_file!("src/input.json");
#[nodarium_execute] #[nodarium_execute]
pub fn execute(args: &[i32]) -> Vec<i32> { pub fn execute(_value: *const i32) -> Vec<i32> {
args.into() log!("Duuuude");
vec![32]
} }

View File

@@ -8,5 +8,5 @@ edition = "2018"
crate-type = ["cdylib", "rlib"] crate-type = ["cdylib", "rlib"]
[dependencies] [dependencies]
nodarium_utils = { version = "0.1.0", path = "../../../../packages/utils" }
nodarium_macros = { version = "0.1.0", path = "../../../../packages/macros" } nodarium_macros = { version = "0.1.0", path = "../../../../packages/macros" }
nodarium_utils = { version = "0.1.0", path = "../../../../packages/utils" }

View File

@@ -1,13 +1,24 @@
use nodarium_macros::nodarium_definition_file; use nodarium_macros::nodarium_definition_file;
use nodarium_macros::nodarium_execute; use nodarium_macros::nodarium_execute;
use nodarium_utils::{ use nodarium_utils::{read_f32, read_i32, log};
concat_args, split_args
};
#[nodarium_execute]
pub fn execute(args: &[i32]) -> Vec<i32> {
let args = split_args(args);
concat_args(vec![&[0], args[0], args[1], args[2]])
}
nodarium_definition_file!("src/input.json"); nodarium_definition_file!("src/input.json");
#[nodarium_execute]
pub fn execute(op_type: *const i32, a: *const i32, b: *const i32) -> Vec<i32> {
let op = unsafe { read_i32(op_type) };
let a_val = unsafe { read_f32(a) };
let b_val = unsafe { read_f32(b) };
log!("op_type: {:?}", op);
let result = match op {
0 => a_val + b_val,
1 => a_val - b_val,
2 => a_val * b_val,
3 => a_val / b_val,
_ => 0.0,
};
vec![result.to_bits() as i32]
}

View File

@@ -8,5 +8,5 @@ edition = "2018"
crate-type = ["cdylib", "rlib"] crate-type = ["cdylib", "rlib"]
[dependencies] [dependencies]
nodarium_utils = { version = "0.1.0", path = "../../../../packages/utils" }
nodarium_macros = { version = "0.1.0", path = "../../../../packages/macros" } nodarium_macros = { version = "0.1.0", path = "../../../../packages/macros" }
nodarium_utils = { version = "0.1.0", path = "../../../../packages/utils" }

View File

@@ -1,44 +1,9 @@
use nodarium_macros::nodarium_definition_file; use nodarium_macros::nodarium_definition_file;
use nodarium_macros::nodarium_execute; use nodarium_macros::nodarium_execute;
use nodarium_utils::{
concat_args, evaluate_int,
geometry::{extrude_path, wrap_path},
log, split_args,
};
nodarium_definition_file!("src/inputs.json"); nodarium_definition_file!("src/input.json");
#[nodarium_execute] #[nodarium_execute]
pub fn execute(input: &[i32]) -> Vec<i32> { pub fn execute(_input: *const i32, _res: *const i32) -> Vec<i32> {
log!("WASM(output): input: {:?}", input); return vec![0];
let args = split_args(input);
log!("WASM(output) args: {:?}", args);
assert_eq!(args.len(), 2, "Expected 2 arguments, got {}", args.len());
let inputs = split_args(args[0]);
let resolution = evaluate_int(args[1]) as usize;
log!("inputs: {}, resolution: {}", inputs.len(), resolution);
let mut output: Vec<Vec<i32>> = Vec::new();
for arg in inputs {
let arg_type = arg[2];
log!("arg_type: {}, \n {:?}", arg_type, arg,);
if arg_type == 0 {
// if this is path we need to extrude it
output.push(arg.to_vec());
let path_data = wrap_path(arg);
let geometry = extrude_path(path_data, resolution);
output.push(geometry);
continue;
}
output.push(arg.to_vec());
}
concat_args(output.iter().map(|v| v.as_slice()).collect())
} }

View File

@@ -5,6 +5,7 @@
"build:story": "pnpm -r --filter 'ui' story:build", "build:story": "pnpm -r --filter 'ui' story:build",
"build:app": "BASE_PATH=/ui pnpm -r --filter 'ui' build && pnpm -r --filter 'app' build", "build:app": "BASE_PATH=/ui pnpm -r --filter 'ui' build && pnpm -r --filter 'app' build",
"build:nodes": "cargo build --workspace --target wasm32-unknown-unknown --release && rm -rf ./app/static/nodes/max/plantarium/ && mkdir -p ./app/static/nodes/max/plantarium/ && cp -R ./target/wasm32-unknown-unknown/release/*.wasm ./app/static/nodes/max/plantarium/", "build:nodes": "cargo build --workspace --target wasm32-unknown-unknown --release && rm -rf ./app/static/nodes/max/plantarium/ && mkdir -p ./app/static/nodes/max/plantarium/ && cp -R ./target/wasm32-unknown-unknown/release/*.wasm ./app/static/nodes/max/plantarium/",
"build:node": "cargo build --package float --package output --package math --package nodarium_macros --package nodarium_utils --target wasm32-unknown-unknown --release && rm -rf ./app/static/nodes/max/plantarium/ && mkdir -p ./app/static/nodes/max/plantarium/ && cp -R ./target/wasm32-unknown-unknown/release/*.wasm ./app/static/nodes/max/plantarium/",
"build:deploy": "pnpm build", "build:deploy": "pnpm build",
"dev:nodes": "chokidar './nodes/**' --initial -i '/pkg/' -c 'pnpm build:nodes'", "dev:nodes": "chokidar './nodes/**' --initial -i '/pkg/' -c 'pnpm build:nodes'",
"dev:app_ui": "pnpm -r --parallel --filter 'app' --filter './packages/ui' dev", "dev:app_ui": "pnpm -r --parallel --filter 'app' --filter './packages/ui' dev",

View File

@@ -6,6 +6,7 @@ use std::env;
use std::fs; use std::fs;
use std::path::Path; use std::path::Path;
use syn::parse_macro_input; use syn::parse_macro_input;
use syn::spanned::Spanned;
fn add_line_numbers(input: String) -> String { fn add_line_numbers(input: String) -> String {
return input return input
@@ -16,86 +17,145 @@ fn add_line_numbers(input: String) -> String {
.join("\n"); .join("\n");
} }
fn read_node_definition(file_path: &Path) -> NodeDefinition {
let project_dir = env::var("CARGO_MANIFEST_DIR").unwrap();
let full_path = Path::new(&project_dir).join(file_path);
let json_content = fs::read_to_string(&full_path).unwrap_or_else(|err| {
panic!(
"Failed to read JSON file at '{}/{}': {}",
project_dir,
file_path.to_string_lossy(),
err
)
});
serde_json::from_str(&json_content).unwrap_or_else(|err| {
panic!(
"JSON file contains invalid JSON: \n{} \n{}",
err,
add_line_numbers(json_content.clone())
)
})
}
#[proc_macro_attribute] #[proc_macro_attribute]
pub fn nodarium_execute(_attr: TokenStream, item: TokenStream) -> TokenStream { pub fn nodarium_execute(_attr: TokenStream, item: TokenStream) -> TokenStream {
let input_fn = parse_macro_input!(item as syn::ItemFn); let input_fn = parse_macro_input!(item as syn::ItemFn);
let _fn_name = &input_fn.sig.ident; let fn_name = &input_fn.sig.ident;
let _fn_vis = &input_fn.vis; let fn_vis = &input_fn.vis;
let fn_body = &input_fn.block; let fn_body = &input_fn.block;
let inner_fn_name = syn::Ident::new(&format!("__nodarium_inner_{}", fn_name), fn_name.span());
let first_arg_ident = if let Some(syn::FnArg::Typed(pat_type)) = input_fn.sig.inputs.first() { let def: NodeDefinition = read_node_definition(Path::new("src/input.json"));
let input_count = def.inputs.as_ref().map(|i| i.len()).unwrap_or(0);
validate_signature(&input_fn.sig, input_count, &def);
let input_param_names: Vec<_> = input_fn
.sig
.inputs
.iter()
.filter_map(|arg| {
if let syn::FnArg::Typed(pat_type) = arg {
if let syn::Pat::Ident(pat_ident) = &*pat_type.pat { if let syn::Pat::Ident(pat_ident) = &*pat_type.pat {
&pat_ident.ident Some(pat_ident.ident.clone())
} else { } else {
panic!("Expected a simple identifier for the first argument"); None
} }
} else { } else {
panic!("The execute function must have at least one argument (the input slice)"); None
}; }
})
.collect();
let arg_names: Vec<_> = (0..input_count)
.map(|i| syn::Ident::new(&format!("arg{}", i), input_fn.sig.span()))
.collect();
// We create a wrapper that handles the C ABI and pointer math
let expanded = quote! { let expanded = quote! {
extern "C" { extern "C" {
fn host_log_panic(ptr: *const u8, len: usize); fn __nodarium_log(ptr: *const u8, len: usize);
fn host_log(ptr: *const u8, len: usize); fn __nodarium_log_panic(ptr: *const u8, len: usize);
} }
fn setup_panic_hook() { #fn_vis fn #inner_fn_name(#( #input_param_names: *const i32 ),*) -> Vec<i32> {
static SET_HOOK: std::sync::Once = std::sync::Once::new(); #fn_body
SET_HOOK.call_once(|| { }
#[no_mangle]
#fn_vis extern "C" fn execute(output_pos: i32, #( #arg_names: i32 ),*) -> i32 {
static PANIC_HOOK_SET: std::sync::atomic::AtomicBool = std::sync::atomic::AtomicBool::new(false);
if !PANIC_HOOK_SET.load(std::sync::atomic::Ordering::SeqCst) {
std::panic::set_hook(Box::new(|info| { std::panic::set_hook(Box::new(|info| {
let msg = info.to_string(); let msg = info.to_string();
unsafe { host_log_panic(msg.as_ptr(), msg.len()); } unsafe { __nodarium_log_panic(msg.as_ptr(), msg.len()); }
})); }));
}); PANIC_HOOK_SET.store(true, std::sync::atomic::Ordering::SeqCst);
} }
#[no_mangle] let result = #inner_fn_name(
pub extern "C" fn __alloc(len: usize) -> *mut i32 { #( #arg_names as *const i32 ),*
let mut buf = Vec::with_capacity(len); );
let ptr = buf.as_mut_ptr();
std::mem::forget(buf);
ptr
}
#[no_mangle] let len_bytes = result.len() * 4;
pub extern "C" fn __free(ptr: *mut i32, len: usize) {
unsafe { unsafe {
let _ = Vec::from_raw_parts(ptr, 0, len); let src = result.as_ptr() as *const u8;
} let dst = output_pos as *mut u8;
dst.copy_from_nonoverlapping(src, len_bytes);
} }
static mut OUTPUT_BUFFER: Vec<i32> = Vec::new(); core::mem::forget(result);
#[no_mangle] len_bytes as i32
pub extern "C" fn execute(ptr: *const i32, len: usize) -> *mut i32 {
setup_panic_hook();
// 1. Convert raw pointer to slice
let input = unsafe { core::slice::from_raw_parts(ptr, len) };
// 2. Call the logic (which we define below)
let result_data: Vec<i32> = internal_logic(input);
// 3. Use the static buffer for the result
let result_len = result_data.len();
unsafe {
OUTPUT_BUFFER.clear();
OUTPUT_BUFFER.reserve(result_len + 1);
OUTPUT_BUFFER.push(result_len as i32);
OUTPUT_BUFFER.extend(result_data);
OUTPUT_BUFFER.as_mut_ptr()
}
}
fn internal_logic(#first_arg_ident: &[i32]) -> Vec<i32> {
#fn_body
} }
}; };
TokenStream::from(expanded) TokenStream::from(expanded)
} }
fn validate_signature(fn_sig: &syn::Signature, expected_inputs: usize, def: &NodeDefinition) {
let param_count = fn_sig.inputs.len();
if param_count != expected_inputs {
panic!(
"Execute function has {} parameters but definition has {} inputs\n\
Definition inputs: {:?}\n\
Expected signature:\n\
pub fn execute({}) -> Vec<i32>",
param_count,
expected_inputs,
def.inputs
.as_ref()
.map(|i| i.keys().collect::<Vec<_>>())
.unwrap_or_default(),
(0..expected_inputs)
.map(|i| format!("arg{}: *const i32", i))
.collect::<Vec<_>>()
.join(", ")
);
}
match &fn_sig.output {
syn::ReturnType::Type(_, ty) => {
let is_vec = match &**ty {
syn::Type::Path(tp) => tp
.path
.segments
.first()
.map(|seg| seg.ident == "Vec")
.unwrap_or(false),
_ => false,
};
if !is_vec {
panic!("Execute function must return Vec<i32>");
}
}
syn::ReturnType::Default => {
panic!("Execute function must return Vec<i32>");
}
}
}
#[proc_macro] #[proc_macro]
pub fn nodarium_definition_file(input: TokenStream) -> TokenStream { pub fn nodarium_definition_file(input: TokenStream) -> TokenStream {
let path_lit = syn::parse_macro_input!(input as syn::LitStr); let path_lit = syn::parse_macro_input!(input as syn::LitStr);
@@ -105,30 +165,26 @@ pub fn nodarium_definition_file(input: TokenStream) -> TokenStream {
let full_path = Path::new(&project_dir).join(&file_path); let full_path = Path::new(&project_dir).join(&file_path);
let json_content = fs::read_to_string(&full_path).unwrap_or_else(|err| { let json_content = fs::read_to_string(&full_path).unwrap_or_else(|err| {
panic!("Failed to read JSON file at '{}/{}': {}", project_dir, file_path, err) panic!(
"Failed to read JSON file at '{}/{}': {}",
project_dir, file_path, err
)
}); });
let _: NodeDefinition = serde_json::from_str(&json_content).unwrap_or_else(|err| { let _: NodeDefinition = serde_json::from_str(&json_content).unwrap_or_else(|err| {
panic!("JSON file contains invalid JSON: \n{} \n{}", err, add_line_numbers(json_content.clone())) panic!(
"JSON file contains invalid JSON: \n{} \n{}",
err,
add_line_numbers(json_content.clone())
)
}); });
// We use the span from the input path literal
let bytes = syn::LitByteStr::new(json_content.as_bytes(), path_lit.span()); let bytes = syn::LitByteStr::new(json_content.as_bytes(), path_lit.span());
let len = json_content.len(); let len = json_content.len();
let expanded = quote! { let expanded = quote! {
#[link_section = "nodarium_definition"] #[link_section = "nodarium_definition"]
static DEFINITION_DATA: [u8; #len] = *#bytes; static DEFINITION_DATA: [u8; #len] = *#bytes;
#[no_mangle]
pub extern "C" fn get_definition_ptr() -> *const u8 {
DEFINITION_DATA.as_ptr()
}
#[no_mangle]
pub extern "C" fn get_definition_len() -> usize {
DEFINITION_DATA.len()
}
}; };
TokenStream::from(expanded) TokenStream::from(expanded)

View File

@@ -13,6 +13,7 @@ log.mute();
export class RemoteNodeRegistry implements NodeRegistry { export class RemoteNodeRegistry implements NodeRegistry {
status: 'loading' | 'ready' | 'error' = 'loading'; status: 'loading' | 'ready' | 'error' = 'loading';
private nodes: Map<string, NodeDefinition> = new Map(); private nodes: Map<string, NodeDefinition> = new Map();
private memory = new WebAssembly.Memory({ initial: 1024, maximum: 8192 });
constructor( constructor(
private url: string, private url: string,
@@ -127,7 +128,10 @@ export class RemoteNodeRegistry implements NodeRegistry {
} }
async register(wasmBuffer: ArrayBuffer) { async register(wasmBuffer: ArrayBuffer) {
const wrapper = createWasmWrapper(wasmBuffer); const wrapper = createWasmWrapper(
wasmBuffer,
this.memory
);
const definition = NodeDefinitionSchema.safeParse(wrapper.get_definition()); const definition = NodeDefinitionSchema.safeParse(wrapper.get_definition());
@@ -139,10 +143,7 @@ export class RemoteNodeRegistry implements NodeRegistry {
this.cache.set(definition.data.id, wasmBuffer); this.cache.set(definition.data.id, wasmBuffer);
} }
let node = { let node = { ...definition.data, execute: wrapper.execute };
...definition.data,
execute: wrapper.execute
};
this.nodes.set(definition.data.id, node); this.nodes.set(definition.data.id, node);

View File

@@ -65,7 +65,7 @@ export const NodeSchema = z.object({
export type SerializedNode = z.infer<typeof NodeSchema>; export type SerializedNode = z.infer<typeof NodeSchema>;
export type NodeDefinition = z.infer<typeof NodeDefinitionSchema> & { export type NodeDefinition = z.infer<typeof NodeDefinitionSchema> & {
execute(input: Int32Array): Int32Array; execute(outputPos: number, args: number[]): number;
}; };
export type Socket = { export type Socket = {

View File

@@ -9,6 +9,10 @@ description = "A collection of utilities for Nodarium"
[lib] [lib]
crate-type = ["rlib"] crate-type = ["rlib"]
[features]
default = ["std"]
std = []
[dependencies] [dependencies]
glam = "0.30.10" glam = "0.30.10"
noise = "0.9.0" noise = "0.9.0"

View File

@@ -10,6 +10,55 @@ pub fn decode_float(bits: i32) -> f32 {
f32::from_bits(bits) f32::from_bits(bits)
} }
#[inline]
pub unsafe fn read_i32(ptr: *const i32) -> i32 {
*ptr
}
#[inline]
pub unsafe fn read_f32(ptr: *const i32) -> f32 {
f32::from_bits(*ptr as u32)
}
#[inline]
pub unsafe fn read_bool(ptr: *const i32) -> bool {
*ptr != 0
}
#[inline]
pub unsafe fn read_vec3(ptr: *const i32) -> [f32; 3] {
let p = ptr as *const f32;
[p.read(), p.add(1).read(), p.add(2).read()]
}
#[inline]
pub unsafe fn read_i32_slice(ptr: *const i32, len: usize) -> Vec<i32> {
std::slice::from_raw_parts(ptr, len).to_vec()
}
#[inline]
pub unsafe fn read_f32_slice(ptr: *const i32, len: usize) -> Vec<f32> {
std::slice::from_raw_parts(ptr as *const f32, len).to_vec()
}
#[inline]
pub unsafe fn read_f32_default(ptr: *const i32, default: f32) -> f32 {
if ptr.is_null() {
default
} else {
read_f32(ptr)
}
}
#[inline]
pub unsafe fn read_i32_default(ptr: *const i32, default: i32) -> i32 {
if ptr.is_null() {
default
} else {
read_i32(ptr)
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;

View File

@@ -8,30 +8,30 @@ pub mod geometry;
extern "C" { extern "C" {
#[cfg(target_arch = "wasm32")] #[cfg(target_arch = "wasm32")]
pub fn host_log(ptr: *const u8, len: usize); pub fn __nodarium_log(ptr: *const u8, len: usize);
} }
#[cfg(debug_assertions)] // #[cfg(debug_assertions)]
#[macro_export] #[macro_export]
macro_rules! log { macro_rules! log {
($($t:tt)*) => {{ ($($t:tt)*) => {{
let msg = std::format!($($t)*); let msg = std::format!($($t)*);
#[cfg(target_arch = "wasm32")] #[cfg(target_arch = "wasm32")]
unsafe { unsafe {
$crate::host_log(msg.as_ptr(), msg.len()); $crate::__nodarium_log(msg.as_ptr(), msg.len());
} }
#[cfg(not(target_arch = "wasm32"))] #[cfg(not(target_arch = "wasm32"))]
println!("{}", msg); println!("{}", msg);
}} }}
} }
#[cfg(not(debug_assertions))] // #[cfg(not(debug_assertions))]
#[macro_export] // #[macro_export]
macro_rules! log { // macro_rules! log {
($($arg:tt)*) => {{ // ($($arg:tt)*) => {{
// This will expand to nothing in release builds // // This will expand to nothing in release builds
}}; // }};
} // }
#[allow(dead_code)] #[allow(dead_code)]
#[rustfmt::skip] #[rustfmt::skip]

View File

@@ -1,24 +1,23 @@
interface NodariumExports extends WebAssembly.Exports { interface NodariumExports extends WebAssembly.Exports {
memory: WebAssembly.Memory; memory: WebAssembly.Memory;
execute: (ptr: number, len: number) => number; execute: (outputPos: number, ...args: number[]) => number;
__free: (ptr: number, len: number) => void;
__alloc: (len: number) => number;
} }
export function createWasmWrapper(buffer: ArrayBuffer) { export function createWasmWrapper(buffer: ArrayBuffer, memory: WebAssembly.Memory) {
let exports: NodariumExports; let exports: NodariumExports;
const importObject = { const importObject = {
env: { env: {
host_log_panic: (ptr: number, len: number) => { memory: memory,
__nodarium_log_panic: (ptr: number, len: number) => {
if (!exports) return; if (!exports) return;
const view = new Uint8Array(exports.memory.buffer, ptr, len); const view = new Uint8Array(memory.buffer, ptr, len);
console.error("RUST PANIC:", new TextDecoder().decode(view)); console.error('WASM PANIC:', new TextDecoder().decode(view));
}, },
host_log: (ptr: number, len: number) => { __nodarium_log: (ptr: number, len: number) => {
if (!exports) return; if (!exports) return;
const view = new Uint8Array(exports.memory.buffer, ptr, len); const view = new Uint8Array(memory.buffer, ptr, len);
console.log("RUST:", new TextDecoder().decode(view)); console.log('WASM:', new TextDecoder().decode(view));
} }
} }
}; };
@@ -27,23 +26,13 @@ export function createWasmWrapper(buffer: ArrayBuffer) {
const instance = new WebAssembly.Instance(module, importObject); const instance = new WebAssembly.Instance(module, importObject);
exports = instance.exports as NodariumExports; exports = instance.exports as NodariumExports;
function execute(args: Int32Array) { function execute(outputPos: number, args: number[]): number {
const inPtr = exports.__alloc(args.length); console.log('WASM_WRAPPER', { outputPos, args });
new Int32Array(exports.memory.buffer).set(args, inPtr / 4); return exports.execute(outputPos, ...args);
const outPtr = exports.execute(inPtr, args.length);
const i32Result = new Int32Array(exports.memory.buffer);
const outLen = i32Result[outPtr / 4];
const out = i32Result.slice(outPtr / 4 + 1, outPtr / 4 + 1 + outLen);
exports.__free(inPtr, args.length);
return out;
} }
function get_definition() { function get_definition() {
const sections = WebAssembly.Module.customSections(module, "nodarium_definition"); const sections = WebAssembly.Module.customSections(module, 'nodarium_definition');
if (sections.length > 0) { if (sections.length > 0) {
const decoder = new TextDecoder(); const decoder = new TextDecoder();
const jsonString = decoder.decode(sections[0]); const jsonString = decoder.decode(sections[0]);