Compare commits
3 Commits
fdd9785fc7
...
v0.0.1
| Author | SHA1 | Date | |
|---|---|---|---|
|
fdc4802d68
|
|||
|
5a30fce4ec
|
|||
|
0d0eb65ddd
|
@@ -1,9 +0,0 @@
|
||||
[target.wasm32-unknown-unknown]
|
||||
rustflags = [
|
||||
"-C",
|
||||
"link-arg=--import-memory",
|
||||
"-C",
|
||||
"link-arg=--initial-memory=67108864", # 64 MiB
|
||||
"-C",
|
||||
"link-arg=--max-memory=536870912", # 512 MiB
|
||||
]
|
||||
@@ -13,12 +13,16 @@
|
||||
"markdown": {},
|
||||
"toml": {},
|
||||
"dockerfile": {},
|
||||
"ruff": {},
|
||||
"jupyter": {},
|
||||
"malva": {},
|
||||
"markup": {
|
||||
// https://dprint.dev/plugins/markup_fmt/config/
|
||||
"scriptIndent": true,
|
||||
"styleIndent": true,
|
||||
},
|
||||
"yaml": {},
|
||||
"graphql": {},
|
||||
"exec": {
|
||||
"cwd": "${configDir}",
|
||||
"commands": [
|
||||
@@ -42,18 +46,20 @@
|
||||
"**/*-lock.yaml",
|
||||
"**/yaml.lock",
|
||||
"**/.DS_Store",
|
||||
"**/.pnpm-store",
|
||||
"**/.cargo",
|
||||
"**/target",
|
||||
],
|
||||
"plugins": [
|
||||
"https://plugins.dprint.dev/typescript-0.95.15.wasm",
|
||||
"https://plugins.dprint.dev/typescript-0.95.13.wasm",
|
||||
"https://plugins.dprint.dev/json-0.21.1.wasm",
|
||||
"https://plugins.dprint.dev/markdown-0.21.1.wasm",
|
||||
"https://plugins.dprint.dev/markdown-0.20.0.wasm",
|
||||
"https://plugins.dprint.dev/toml-0.7.0.wasm",
|
||||
"https://plugins.dprint.dev/dockerfile-0.3.3.wasm",
|
||||
"https://plugins.dprint.dev/ruff-0.6.11.wasm",
|
||||
"https://plugins.dprint.dev/jupyter-0.2.1.wasm",
|
||||
"https://plugins.dprint.dev/g-plane/malva-v0.15.1.wasm",
|
||||
"https://plugins.dprint.dev/g-plane/markup_fmt-v0.25.3.wasm",
|
||||
"https://plugins.dprint.dev/g-plane/pretty_yaml-v0.6.0.wasm",
|
||||
"https://plugins.dprint.dev/g-plane/pretty_yaml-v0.5.1.wasm",
|
||||
"https://plugins.dprint.dev/g-plane/pretty_graphql-v0.2.3.wasm",
|
||||
"https://plugins.dprint.dev/exec-0.6.0.json@a054130d458f124f9b5c91484833828950723a5af3f8ff2bd1523bd47b83b364",
|
||||
"https://plugins.dprint.dev/biome-0.11.10.wasm",
|
||||
],
|
||||
}
|
||||
|
||||
@@ -1,45 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
mkdir -p app/static
|
||||
|
||||
cp CHANGELOG.md app/static/CHANGELOG.md
|
||||
|
||||
# Derive branch/tag info
|
||||
REF_TYPE="${GITHUB_REF_TYPE:-branch}"
|
||||
REF_NAME="${GITHUB_REF_NAME:-$(basename "$GITHUB_REF")}"
|
||||
BRANCH="${GITHUB_HEAD_REF:-}"
|
||||
if [[ -z "$BRANCH" && "$REF_TYPE" == "branch" ]]; then
|
||||
BRANCH="$REF_NAME"
|
||||
fi
|
||||
|
||||
# Determine last tag and commits since
|
||||
LAST_TAG="$(git describe --tags --abbrev=0 2>/dev/null || true)"
|
||||
if [[ -n "$LAST_TAG" ]]; then
|
||||
COMMITS_SINCE_LAST_RELEASE="$(git rev-list --count "${LAST_TAG}..HEAD")"
|
||||
else
|
||||
COMMITS_SINCE_LAST_RELEASE="0"
|
||||
fi
|
||||
|
||||
commit_message=$(git log -1 --pretty=%B | tr -d '\n' | sed 's/"/\\"/g')
|
||||
|
||||
cat >app/static/git.json <<EOF
|
||||
{
|
||||
"ref": "${GITHUB_REF:-}",
|
||||
"ref_name": "${REF_NAME}",
|
||||
"ref_type": "${REF_TYPE}",
|
||||
"sha": "${GITHUB_SHA:-}",
|
||||
"run_number": "${GITHUB_RUN_NUMBER:-}",
|
||||
"event_name": "${GITHUB_EVENT_NAME:-}",
|
||||
"workflow": "${GITHUB_WORKFLOW:-}",
|
||||
"job": "${GITHUB_JOB:-}",
|
||||
"commit_message": "${commit_message}",
|
||||
"commit_timestamp": "$(git log -1 --pretty=%cI)",
|
||||
"branch": "${BRANCH}",
|
||||
"commits_since_last_release": "${COMMITS_SINCE_LAST_RELEASE}"
|
||||
}
|
||||
EOF
|
||||
|
||||
pnpm build
|
||||
|
||||
cp -R packages/ui/build app/build/ui
|
||||
@@ -5,21 +5,16 @@ TAG="$GITHUB_REF_NAME"
|
||||
VERSION=$(echo "$TAG" | sed 's/^v//')
|
||||
DATE=$(date +%Y-%m-%d)
|
||||
|
||||
echo "🚀 Creating release for $TAG"
|
||||
echo "🚀 Creating release for $TAG (safe mode)"
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# 1. Extract release notes from annotated tag
|
||||
# -------------------------------------------------------------------
|
||||
|
||||
# Ensure the local git knows this is an annotated tag with metadata
|
||||
git fetch origin "refs/tags/$TAG:refs/tags/$TAG" --force
|
||||
NOTES=$(git tag -l "$TAG" --format='%(contents)')
|
||||
|
||||
# %(contents) gets the whole message.
|
||||
# If you want ONLY what you typed after the first line, use %(contents:body)
|
||||
NOTES=$(git tag -l "$TAG" --format='%(contents)' | sed '/-----BEGIN PGP SIGNATURE-----/,/-----END PGP SIGNATURE-----/d')
|
||||
|
||||
if [ -z "$(echo "$NOTES" | tr -d '[:space:]')" ]; then
|
||||
echo "❌ Tag message is empty or tag is not annotated"
|
||||
if [ -z "$NOTES" ]; then
|
||||
echo "❌ Tag message is empty"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@@ -28,39 +23,26 @@ git checkout main
|
||||
# -------------------------------------------------------------------
|
||||
# 2. Update all package.json versions
|
||||
# -------------------------------------------------------------------
|
||||
|
||||
echo "🔧 Updating package.json versions to $VERSION"
|
||||
find . -name package.json ! -path "*/node_modules/*" | while read -r file; do
|
||||
|
||||
find . -name package.json ! -path "*/node_modules/*" | while read file; do
|
||||
tmp_file="$file.tmp"
|
||||
jq --arg v "$VERSION" '.version = $v' "$file" >"$tmp_file"
|
||||
mv "$tmp_file" "$file"
|
||||
done
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# 3. Generate commit list since last release
|
||||
# 3. Update CHANGELOG.md (prepend)
|
||||
# -------------------------------------------------------------------
|
||||
LAST_TAG=$(git tag --sort=-creatordate | grep -v "^$TAG$" | head -n 1 || echo "")
|
||||
|
||||
if [ -n "$LAST_TAG" ]; then
|
||||
# Filter out previous 'chore(release)' commits so the list stays clean
|
||||
COMMITS=$(git log "$LAST_TAG"..HEAD --pretty=format:'* [%h](https://git.max-richter.dev/max/nodarium/commit/%H) %s' | grep -v "chore(release)")
|
||||
else
|
||||
COMMITS=$(git log HEAD --pretty=format:'* [%h](https://git.max-richter.dev/max/nodarium/commit/%H) %s' | grep -v "chore(release)")
|
||||
fi
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# 4. Update CHANGELOG.md (prepend)
|
||||
# -------------------------------------------------------------------
|
||||
tmp_changelog="CHANGELOG.tmp"
|
||||
{
|
||||
echo "# $TAG ($DATE)"
|
||||
echo "## $TAG ($DATE)"
|
||||
echo ""
|
||||
echo "$NOTES"
|
||||
echo ""
|
||||
if [ -n "$COMMITS" ]; then
|
||||
echo "---"
|
||||
echo "$COMMITS"
|
||||
echo ""
|
||||
fi
|
||||
echo "---"
|
||||
echo ""
|
||||
if [ -f CHANGELOG.md ]; then
|
||||
cat CHANGELOG.md
|
||||
@@ -69,23 +51,27 @@ tmp_changelog="CHANGELOG.tmp"
|
||||
|
||||
mv "$tmp_changelog" CHANGELOG.md
|
||||
|
||||
pnpm exec dprint fmt CHANGELOG.md
|
||||
# -------------------------------------------------------------------
|
||||
# 4. Create release commit
|
||||
# -------------------------------------------------------------------
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# 5. Create release commit
|
||||
# -------------------------------------------------------------------
|
||||
git config user.name "release-bot"
|
||||
git config user.email "release-bot@ci"
|
||||
|
||||
git add CHANGELOG.md $(find . -name package.json ! -path "*/node_modules/*")
|
||||
|
||||
# Skip commit if nothing changed
|
||||
if git diff --cached --quiet; then
|
||||
echo "No changes to commit for release $TAG"
|
||||
else
|
||||
git commit -m "chore(release): $TAG"
|
||||
git push origin main
|
||||
exit 0
|
||||
fi
|
||||
|
||||
rm app/static/CHANGELOG.md
|
||||
cp CHANGELOG.md app/static/CHANGELOG.md
|
||||
echo "✅ Release process for $TAG complete"
|
||||
git commit -m "chore(release): $TAG"
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# 5. Push changes
|
||||
# -------------------------------------------------------------------
|
||||
|
||||
git push origin main
|
||||
|
||||
echo "✅ Release commit for $TAG created successfully (tag untouched)"
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
echo "Configuring rclone"
|
||||
|
||||
KEY_FILE="$(mktemp)"
|
||||
echo "${SSH_PRIVATE_KEY}" >"${KEY_FILE}"
|
||||
chmod 600 "${KEY_FILE}"
|
||||
|
||||
mkdir -p ~/.config/rclone
|
||||
cat >~/.config/rclone/rclone.conf <<EOF
|
||||
[sftp-remote]
|
||||
type = sftp
|
||||
host = ${SSH_HOST}
|
||||
user = ${SSH_USER}
|
||||
port = ${SSH_PORT}
|
||||
key_file = ${KEY_FILE}
|
||||
EOF
|
||||
|
||||
if [[ "${GITHUB_REF_TYPE:-}" == "tag" ]]; then
|
||||
TARGET_DIR="${REMOTE_DIR}"
|
||||
elif [[ "${GITHUB_EVENT_NAME:-}" == "pull_request" ]]; then
|
||||
SAFE_PR_NAME="${GITHUB_HEAD_REF//\//-}"
|
||||
TARGET_DIR="${REMOTE_DIR}_${SAFE_PR_NAME}"
|
||||
elif [[ "${GITHUB_REF_NAME:-}" == "main" ]]; then
|
||||
TARGET_DIR="${REMOTE_DIR}_main"
|
||||
else
|
||||
SAFE_REF="${GITHUB_REF_NAME//\//-}"
|
||||
TARGET_DIR="${REMOTE_DIR}_${SAFE_REF}"
|
||||
fi
|
||||
|
||||
echo "Deploying to ${TARGET_DIR}"
|
||||
|
||||
rclone sync \
|
||||
--update \
|
||||
--verbose \
|
||||
--progress \
|
||||
--exclude _astro/** \
|
||||
--stats 2s \
|
||||
--stats-one-line \
|
||||
--transfers 4 \
|
||||
./app/build/ \
|
||||
"sftp-remote:${TARGET_DIR}"
|
||||
@@ -1,41 +0,0 @@
|
||||
name: Build & Push CI Image
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- "Dockerfile.ci"
|
||||
- ".gitea/workflows/build-ci-image.yaml"
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Docker BuildX
|
||||
uses: docker/setup-buildx-action@v2
|
||||
with:
|
||||
config-inline: |
|
||||
[registry."git.max-richter.dev"]
|
||||
https = true
|
||||
insecure = false
|
||||
|
||||
- name: Login to Gitea Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: git.max-richter.dev
|
||||
username: ${{ gitea.actor }}
|
||||
password: ${{ secrets.REGISTRY_TOKEN }}
|
||||
|
||||
- name: Build and Push
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile.ci
|
||||
push: true
|
||||
tags: |
|
||||
git.max-richter.dev/${{ gitea.repository }}-ci:latest
|
||||
git.max-richter.dev/${{ gitea.repository }}-ci:${{ gitea.sha }}
|
||||
@@ -1,28 +1,24 @@
|
||||
name: 🚀 Lint & Test & Deploy
|
||||
name: 🚀 Release
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["*"]
|
||||
tags: ["*"]
|
||||
pull_request:
|
||||
branches: ["*"]
|
||||
|
||||
env:
|
||||
PNPM_CACHE_FOLDER: .pnpm-store
|
||||
CARGO_HOME: .cargo
|
||||
CARGO_TARGET_DIR: target
|
||||
PNPM_CACHE_FOLDER: ~/.pnpm-store
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
container: git.max-richter.dev/max/nodarium-ci:fd7268d6208aede435e1685817ae6b271c68bd83
|
||||
container: jimfx/nodes:latest
|
||||
|
||||
steps:
|
||||
- name: 📑 Checkout Code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.GITEA_TOKEN }}
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: 💾 Setup pnpm Cache
|
||||
uses: actions/cache@v4
|
||||
@@ -32,50 +28,31 @@ jobs:
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-
|
||||
|
||||
- name: 🦀 Cache Cargo
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/registry
|
||||
~/.cargo/git
|
||||
target
|
||||
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-cargo-
|
||||
|
||||
- name: 📦 Install Dependencies
|
||||
run: pnpm install --frozen-lockfile --store-dir ${{ env.PNPM_CACHE_FOLDER }}
|
||||
|
||||
- name: 🧹 Quality Control
|
||||
run: |
|
||||
pnpm build
|
||||
pnpm lint
|
||||
pnpm format:check
|
||||
pnpm check
|
||||
xvfb-run --auto-servernum --server-args="-screen 0 1280x1024x24" pnpm test
|
||||
- name: 🧹 Lint
|
||||
run: pnpm lint
|
||||
|
||||
- name: 🎨 Format Check
|
||||
run: pnpm format:check
|
||||
|
||||
- name: 🧬 Type Check
|
||||
run: pnpm check
|
||||
|
||||
- name: 🛠️ Build
|
||||
run: ./.gitea/scripts/build.sh
|
||||
run: pnpm build:deploy
|
||||
|
||||
- name: 🚀 Create Release Commit
|
||||
if: gitea.ref_type == 'tag'
|
||||
if: github.ref_type == 'tag'
|
||||
run: ./.gitea/scripts/create-release.sh
|
||||
|
||||
- name: 🏷️ Create Gitea Release
|
||||
if: gitea.ref_type == 'tag'
|
||||
if: github.ref_type == 'tag'
|
||||
uses: akkuman/gitea-release-action@v1
|
||||
with:
|
||||
tag_name: ${{ gitea.ref_name }}
|
||||
release_name: Release ${{ gitea.ref_name }}
|
||||
tag_name: ${{ github.ref_name }}
|
||||
release_name: Release ${{ github.ref_name }}
|
||||
body_path: CHANGELOG.md
|
||||
draft: false
|
||||
prerelease: false
|
||||
|
||||
- name: 🚀 Deploy Changed Files via rclone
|
||||
run: ./.gitea/scripts/deploy-files.sh
|
||||
env:
|
||||
REMOTE_DIR: ${{ vars.REMOTE_DIR }}
|
||||
SSH_PRIVATE_KEY: ${{ secrets.SSH_PRIVATE_KEY }}
|
||||
SSH_HOST: ${{ vars.SSH_HOST }}
|
||||
SSH_PORT: ${{ vars.SSH_PORT }}
|
||||
SSH_USER: ${{ vars.SSH_USER }}
|
||||
|
||||
70
CHANGELOG.md
70
CHANGELOG.md
@@ -1,70 +0,0 @@
|
||||
# v0.0.3 (2026-02-07)
|
||||
|
||||
## Features
|
||||
|
||||
- Edge dragging now highlights valid connection sockets, improving graph editing clarity.
|
||||
- InputNumber supports snapping to predefined values while holding Alt.
|
||||
- Changelog is accessible directly from the sidebar and now includes git metadata and a list of commits.
|
||||
|
||||
## Fixes
|
||||
|
||||
- Fixed incorrect socket highlighting when an edge already existed.
|
||||
- Corrected initialization of `InputNumber` values outside min/max bounds.
|
||||
- Fixed initialization of nested vec3 inputs.
|
||||
- Multiple CI fixes to ensure reliable builds, correct environment variables, and proper image handling.
|
||||
|
||||
## Maintenance / CI
|
||||
|
||||
- Significant CI and Dockerfile cleanup and optimization.
|
||||
- Improved git metadata generation during builds.
|
||||
- Dependency updates, formatting, and test snapshot updates.
|
||||
|
||||
---
|
||||
|
||||
- [f8a2a95](https://git.max-richter.dev/max/nodarium/commit/f8a2a95bc18fa3c8c1db67dc0c2b66db1ff0d866) chore: clean CHANGELOG.md
|
||||
- [c9dd143](https://git.max-richter.dev/max/nodarium/commit/c9dd143916d758991f3ba30723a32c18b6f98bb5) fix(ci): correctly add release notes from tag to changelog
|
||||
- [898dd49](https://git.max-richter.dev/max/nodarium/commit/898dd49aee930350af8645382ef5042765a1fac7) fix(ci): correctly copy changelog to build output
|
||||
- [9fb69d7](https://git.max-richter.dev/max/nodarium/commit/9fb69d760fdf92ecc2448e468242970ec48443b0) feat: show commits since last release in changelog
|
||||
- [bafbcca](https://git.max-richter.dev/max/nodarium/commit/bafbcca2b8a7cd9f76e961349f11ec84d1e4da63) fix: wrong socket was highlighted when dragging node
|
||||
- [8ad9e55](https://git.max-richter.dev/max/nodarium/commit/8ad9e5535cd752ef111504226b4dac57b5adcf3d) feat: highlight possible sockets when dragging edge
|
||||
- [11eaeb7](https://git.max-richter.dev/max/nodarium/commit/11eaeb719be7f34af8db8b7908008a15308c0cac) feat(app): display some git metadata in changelog
|
||||
- [74c2978](https://git.max-richter.dev/max/nodarium/commit/74c2978cd16d2dd95ce1ae8019dfb9098e52b4b6) chore: cleanup git.json a bit
|
||||
- [4fdc247](https://git.max-richter.dev/max/nodarium/commit/4fdc24790490d3f13ee94a557159617f4077a2f9) ci: update build.sh to correct git.json
|
||||
- [c3f8b4b](https://git.max-richter.dev/max/nodarium/commit/c3f8b4b5aad7a525fb11ab14c9236374cb60442d) ci: debug available env vars
|
||||
- [67591c0](https://git.max-richter.dev/max/nodarium/commit/67591c0572b873d8c7cd00db8efb7dac2d6d4de2) chore: pnpm format
|
||||
- [de1f9d6](https://git.max-richter.dev/max/nodarium/commit/de1f9d6ab669b8e699d98b8855e125e21030b5b3) feat(ui): change inputnumber to snap to values when alt is pressed
|
||||
- [6acce72](https://git.max-richter.dev/max/nodarium/commit/6acce72fb8c416cc7f6eec99c2ae94d6529e960c) fix(ui): correctly initialize InputNumber
|
||||
- [cf8943b](https://git.max-richter.dev/max/nodarium/commit/cf8943b2059aa286e41865caf75058d35498daf7) chore: pnpm update
|
||||
- [9e03d36](https://git.max-richter.dev/max/nodarium/commit/9e03d36482bb4f972c384b66b2dcf258f0cd18be) chore: use newest ci image
|
||||
- [fd7268d](https://git.max-richter.dev/max/nodarium/commit/fd7268d6208aede435e1685817ae6b271c68bd83) ci: make dockerfile work
|
||||
- [6358c22](https://git.max-richter.dev/max/nodarium/commit/6358c22a853ec340be5223fabb8289092e4f4afe) ci: use tagged own image for ci
|
||||
- [655b6a1](https://git.max-richter.dev/max/nodarium/commit/655b6a18b282f0cddcc750892e575ee6c311036b) ci: make dockerfile work
|
||||
- [37b2bdc](https://git.max-richter.dev/max/nodarium/commit/37b2bdc8bdbd8ded6b22b89214b49de46f788351) ci: update ci Dockerfile to work
|
||||
- [94e01d4](https://git.max-richter.dev/max/nodarium/commit/94e01d4ea865f15ce06b52827a1ae6906de5be5e) ci: correctly build and push ci image
|
||||
- [35f5177](https://git.max-richter.dev/max/nodarium/commit/35f5177884b62bbf119af1bbf4df61dd0291effb) feat: try to optimize the Dockerfile
|
||||
- [ac2c61f](https://git.max-richter.dev/max/nodarium/commit/ac2c61f2211ba96bbdbb542179905ca776537cec) ci: use actual git url in ci
|
||||
- [ef3d462](https://git.max-richter.dev/max/nodarium/commit/ef3d46279f4ff9c04d80bb2d9a9e7cfec63b224e) fix(ci): build before testing
|
||||
- [703da32](https://git.max-richter.dev/max/nodarium/commit/703da324fabbef0e2c017f0f7a925209fa26bd03) ci: automatically build ci image and store locally
|
||||
- [1dae472](https://git.max-richter.dev/max/nodarium/commit/1dae472253ccb5e3766f2270adc053b922f46738) ci: add a git.json metadata file during build
|
||||
- [09fdfb8](https://git.max-richter.dev/max/nodarium/commit/09fdfb88cd203ace0e36663ebdb2c8c7ba53f190) chore: update test screenshots
|
||||
- [04b63cc](https://git.max-richter.dev/max/nodarium/commit/04b63cc7e2fc4fcfa0973cf40592d11457179db3) feat: add changelog to sidebar
|
||||
- [cb6a356](https://git.max-richter.dev/max/nodarium/commit/cb6a35606dfda50b0c81b04902d7a6c8e59458d2) feat(ci): also cache cargo stuff
|
||||
- [9c9f3ba](https://git.max-richter.dev/max/nodarium/commit/9c9f3ba3b7c94215a86b0a338a5cecdd87b96b28) fix(ci): use GITHUB_instead of GITEA_ for env vars
|
||||
- [08dda2b](https://git.max-richter.dev/max/nodarium/commit/08dda2b2cb4d276846abe30bc260127626bb508a) chore: pnpm format
|
||||
- [059129a](https://git.max-richter.dev/max/nodarium/commit/059129a738d02b8b313bb301a515697c7c4315ac) fix(ci): deploy prs and main
|
||||
- [437c9f4](https://git.max-richter.dev/max/nodarium/commit/437c9f4a252125e1724686edace0f5f006f58439) feat(ci): add list of all commits to changelog entry
|
||||
- [48bf447](https://git.max-richter.dev/max/nodarium/commit/48bf447ce12949d7c29a230806d160840b7847e1) docs: straighten up changelog a bit
|
||||
- [548fa4f](https://git.max-richter.dev/max/nodarium/commit/548fa4f0a1a14adc40a74da1182fa6da81eab3df) fix(app): correctly initialize vec3 inputs in nestedsettings
|
||||
|
||||
# v0.0.2 (2026-02-04)
|
||||
|
||||
## Fixes
|
||||
|
||||
---
|
||||
|
||||
- []() fix(ci): actually deploy on tags
|
||||
- []() fix(app): correctly handle false value in settings
|
||||
|
||||
# v0.0.1 (2026-02-03)
|
||||
|
||||
chore: format
|
||||
24
Cargo.lock
generated
24
Cargo.lock
generated
@@ -24,14 +24,6 @@ dependencies = [
|
||||
"nodarium_utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "debug"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"nodarium_macros",
|
||||
"nodarium_utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "float"
|
||||
version = "0.1.0"
|
||||
@@ -70,14 +62,6 @@ version = "1.0.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2"
|
||||
|
||||
[[package]]
|
||||
name = "leaf"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"nodarium_macros",
|
||||
"nodarium_utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "math"
|
||||
version = "0.1.0"
|
||||
@@ -261,14 +245,6 @@ dependencies = [
|
||||
"zmij",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "shape"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"nodarium_macros",
|
||||
"nodarium_utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "stem"
|
||||
version = "0.1.0"
|
||||
|
||||
19
Dockerfile
Normal file
19
Dockerfile
Normal file
@@ -0,0 +1,19 @@
|
||||
FROM node:24-alpine
|
||||
|
||||
# Install all required packages in one layer
|
||||
RUN apk add --no-cache curl git jq g++ make
|
||||
|
||||
# Set Rust paths
|
||||
ENV RUSTUP_HOME=/usr/local/rustup \
|
||||
CARGO_HOME=/usr/local/cargo \
|
||||
PATH=/usr/local/cargo/bin:$PATH
|
||||
|
||||
# Install Rust, wasm target, and pnpm
|
||||
RUN curl --silent --show-error --location --fail --retry 3 \
|
||||
--proto '=https' --tlsv1.2 \
|
||||
--output /tmp/rustup-init.sh https://sh.rustup.rs \
|
||||
&& sh /tmp/rustup-init.sh -y --no-modify-path --profile minimal \
|
||||
&& rm /tmp/rustup-init.sh \
|
||||
&& rustup target add wasm32-unknown-unknown \
|
||||
&& rm -rf /usr/local/rustup/toolchains/*/share/doc \
|
||||
&& npm i -g pnpm
|
||||
@@ -1,30 +0,0 @@
|
||||
FROM node:25-bookworm-slim
|
||||
|
||||
ENV RUSTUP_HOME=/usr/local/rustup \
|
||||
CARGO_HOME=/usr/local/cargo \
|
||||
PATH=/usr/local/cargo/bin:$PATH
|
||||
|
||||
RUN apt-get update && apt-get install -y \
|
||||
ca-certificates=20230311+deb12u1 \
|
||||
curl=7.88.1-10+deb12u14 \
|
||||
git=1:2.39.5-0+deb12u3 \
|
||||
jq=1.6-2.1+deb12u1 \
|
||||
g++=4:12.2.0-3 \
|
||||
rclone=1.60.1+dfsg-2+b5 \
|
||||
xvfb=2:21.1.7-3+deb12u11 \
|
||||
xauth=1:1.1.2-1 \
|
||||
--no-install-recommends \
|
||||
# Install Rust
|
||||
&& curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y \
|
||||
--default-toolchain stable \
|
||||
--profile minimal \
|
||||
&& rustup target add wasm32-unknown-unknown \
|
||||
# Setup Playwright
|
||||
&& npm i -g pnpm \
|
||||
&& pnpm dlx playwright install --with-deps firefox \
|
||||
# Final Cleanup
|
||||
&& rm -rf /usr/local/rustup/downloads /usr/local/rustup/tmp \
|
||||
&& rm -rf /usr/local/cargo/registry/index /usr/local/cargo/registry/cache \
|
||||
&& rm -rf /usr/local/rustup/toolchains/*/share/doc \
|
||||
&& apt-get purge -y --auto-remove \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
@@ -27,6 +27,7 @@ Currently this visual programming language is used to develop <https://nodes.max
|
||||
- [Node.js](https://nodejs.org/en/download)
|
||||
- [pnpm](https://pnpm.io/installation)
|
||||
- [rust](https://www.rust-lang.org/tools/install)
|
||||
- wasm-pack
|
||||
|
||||
### Install dependencies
|
||||
|
||||
|
||||
@@ -1,783 +0,0 @@
|
||||
# Shared Memory Refactor Plan
|
||||
|
||||
## Executive Summary
|
||||
|
||||
Migrate to a single shared `WebAssembly.Memory` instance imported by all nodes using `--import-memory`. The `#[nodarium_execute]` macro writes the function's return value directly to shared memory at the specified offset.
|
||||
|
||||
## Architecture Overview
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────────────────────────────────────┐
|
||||
│ Shared WebAssembly.Memory │
|
||||
│ ┌───────────────────────────────────────────────────────────────┐ │
|
||||
│ │ [Node A output] [Node B output] [Node C output] ... │ │
|
||||
│ │ ┌────────────┐ ┌────────────┐ ┌────────────┐ │ │
|
||||
│ │ │ Vec<i32> │ │ Vec<i32> │ │ Vec<i32> │ │ │
|
||||
│ │ │ 4 bytes │ │ 12 bytes │ │ 2KB │ │ │
|
||||
│ │ └────────────┘ └────────────┘ └────────────┘ │ │
|
||||
│ │ │ │
|
||||
│ │ offset: 0 ────────────────────────────────────────────────► │ │
|
||||
│ └───────────────────────────────────────────────────────────────┘ │
|
||||
└─────────────────────────────────────────────────────────────────────┘
|
||||
▲
|
||||
│
|
||||
│ import { memory } from "env"
|
||||
┌─────────────────────────┼─────────────────────────┐
|
||||
│ │ │
|
||||
┌────┴────┐ ┌────┴────┐ ┌────┴────┐
|
||||
│ Node A │ │ Node B │ │ Node C │
|
||||
│ WASM │ │ WASM │ │ WASM │
|
||||
└─────────┘ └─────────┘ └─────────┘
|
||||
```
|
||||
|
||||
## Phase 1: Compilation Configuration
|
||||
|
||||
### 1.1 Cargo Config
|
||||
|
||||
```toml
|
||||
# nodes/max/plantarium/box/.cargo/config.toml
|
||||
[build]
|
||||
rustflags = ["-C", "link-arg=--import-memory"]
|
||||
```
|
||||
|
||||
Or globally in `Cargo.toml`:
|
||||
|
||||
```toml
|
||||
[profile.release]
|
||||
rustflags = ["-C", "link-arg=--import-memory"]
|
||||
```
|
||||
|
||||
### 1.2 Import Memory Semantics
|
||||
|
||||
With `--import-memory`:
|
||||
|
||||
- Nodes **import** memory from the host (not export their own)
|
||||
- All nodes receive the same `WebAssembly.Memory` instance
|
||||
- Memory is read/write accessible from all modules
|
||||
- No `memory.grow` needed (host manages allocation)
|
||||
|
||||
## Phase 2: Macro Design
|
||||
|
||||
### 2.1 Clean Node API
|
||||
|
||||
```rust
|
||||
// input.json has 3 inputs: op_type, a, b
|
||||
nodarium_definition_file!("src/input.json");
|
||||
|
||||
#[nodarium_execute]
|
||||
pub fn execute(op_type: *i32, a: *i32, b: *i32) -> Vec<i32> {
|
||||
// Read inputs directly from shared memory
|
||||
let op = unsafe { *op_type };
|
||||
let a_val = f32::from_bits(unsafe { *a } as u32);
|
||||
let b_val = f32::from_bits(unsafe { *b } as u32);
|
||||
|
||||
let result = match op {
|
||||
0 => a_val + b_val,
|
||||
1 => a_val - b_val,
|
||||
2 => a_val * b_val,
|
||||
3 => a_val / b_val,
|
||||
_ => 0.0,
|
||||
};
|
||||
|
||||
// Return Vec<i32>, macro handles writing to shared memory
|
||||
vec![result.to_bits()]
|
||||
}
|
||||
```
|
||||
|
||||
### 2.2 Macro Implementation
|
||||
|
||||
```rust
|
||||
// packages/macros/src/lib.rs
|
||||
|
||||
#[proc_macro_attribute]
|
||||
pub fn nodarium_execute(_attr: TokenStream, item: TokenStream) -> TokenStream {
|
||||
let input_fn = parse_macro_input!(item as syn::ItemFn);
|
||||
let fn_name = &input_fn.sig.ident;
|
||||
|
||||
// Parse definition to get input count
|
||||
let project_dir = env::var("CARGO_MANIFEST_DIR").unwrap();
|
||||
let def: NodeDefinition = serde_json::from_str(&fs::read_to_string(
|
||||
Path::new(&project_dir).join("src/input.json")
|
||||
).unwrap()).unwrap();
|
||||
|
||||
let input_count = def.inputs.as_ref().map(|i| i.len()).unwrap_or(0);
|
||||
|
||||
// Validate signature
|
||||
validate_signature(&input_fn, input_count);
|
||||
|
||||
// Generate wrapper
|
||||
generate_execute_wrapper(input_fn, fn_name, input_count)
|
||||
}
|
||||
|
||||
fn validate_signature(fn_sig: &syn::Signature, expected_inputs: usize) {
|
||||
let param_count = fn_sig.inputs.len();
|
||||
if param_count != expected_inputs {
|
||||
panic!(
|
||||
"Execute function has {} parameters but definition has {} inputs\n\
|
||||
Definition inputs: {:?}\n\
|
||||
Expected signature:\n\
|
||||
pub fn execute({}) -> Vec<i32>",
|
||||
param_count,
|
||||
expected_inputs,
|
||||
def.inputs.as_ref().map(|i| i.keys().collect::<Vec<_>>()),
|
||||
(0..expected_inputs)
|
||||
.map(|i| format!("arg{}: *const i32", i))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
);
|
||||
}
|
||||
|
||||
// Verify return type is Vec<i32>
|
||||
match &fn_sig.output {
|
||||
syn::ReturnType::Type(_, ty) => {
|
||||
if !matches!(&**ty, syn::Type::Path(tp) if tp.path.is_ident("Vec")) {
|
||||
panic!("Execute function must return Vec<i32>");
|
||||
}
|
||||
}
|
||||
syn::ReturnType::Default => {
|
||||
panic!("Execute function must return Vec<i32>");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn generate_execute_wrapper(
|
||||
input_fn: syn::ItemFn,
|
||||
fn_name: &syn::Ident,
|
||||
input_count: usize,
|
||||
) -> TokenStream {
|
||||
let arg_names: Vec<_> = (0..input_count)
|
||||
.map(|i| syn::Ident::new(&format!("arg{}", i), proc_macro2::Span::call_site()))
|
||||
.collect();
|
||||
|
||||
let expanded = quote! {
|
||||
#input_fn
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn execute(
|
||||
output_pos: i32,
|
||||
#( #arg_names: i32 ),*
|
||||
) -> i32 {
|
||||
extern "C" {
|
||||
fn __nodarium_log(ptr: *const u8, len: usize);
|
||||
fn __nodarium_log_panic(ptr: *const u8, len: usize);
|
||||
}
|
||||
|
||||
// Setup panic hook
|
||||
static SET_HOOK: std::sync::Once = std::sync::Once::new();
|
||||
SET_HOOK.call_once(|| {
|
||||
std::panic::set_hook(Box::new(|info| {
|
||||
let msg = info.to_string();
|
||||
unsafe { __nodarium_log_panic(msg.as_ptr(), msg.len()); }
|
||||
}));
|
||||
});
|
||||
|
||||
// Call user function
|
||||
let result = #fn_name(
|
||||
#( #arg_names as *const i32 ),*
|
||||
);
|
||||
|
||||
// Write result directly to shared memory at output_pos
|
||||
let len_bytes = result.len() * 4;
|
||||
unsafe {
|
||||
let src = result.as_ptr() as *const u8;
|
||||
let dst = output_pos as *mut u8;
|
||||
dst.copy_from_nonoverlapping(src, len_bytes);
|
||||
}
|
||||
|
||||
// Forget the Vec to prevent deallocation (data is in shared memory now)
|
||||
core::mem::forget(result);
|
||||
|
||||
len_bytes as i32
|
||||
}
|
||||
};
|
||||
|
||||
TokenStream::from(expanded)
|
||||
}
|
||||
```
|
||||
|
||||
### 2.3 Generated Assembly
|
||||
|
||||
The macro generates:
|
||||
|
||||
```asm
|
||||
; Input: output_pos in register r0, arg0 in r1, arg1 in r2, arg2 in r3
|
||||
execute:
|
||||
; Call user function
|
||||
bl user_execute ; returns pointer to Vec<i32> in r0
|
||||
|
||||
; Calculate byte length
|
||||
ldr r4, [r0, #8] ; Vec::len field
|
||||
lsl r4, r4, #2 ; len * 4 (i32 = 4 bytes)
|
||||
|
||||
; Copy Vec data to shared memory at output_pos
|
||||
ldr r5, [r0, #0] ; Vec::ptr field
|
||||
ldr r6, [r0, #4] ; capacity (unused)
|
||||
|
||||
; memcpy(dst=output_pos, src=r5, len=r4)
|
||||
; (implemented via copy_from_nonoverlapping)
|
||||
|
||||
; Return length
|
||||
mov r0, r4
|
||||
bx lr
|
||||
```
|
||||
|
||||
## Phase 3: Input Reading Helpers
|
||||
|
||||
```rust
|
||||
// packages/utils/src/accessor.rs
|
||||
|
||||
/// Read i32 from shared memory
|
||||
#[inline]
|
||||
pub unsafe fn read_i32(ptr: *const i32) -> i32 {
|
||||
*ptr
|
||||
}
|
||||
|
||||
/// Read f32 from shared memory (stored as i32 bits)
|
||||
#[inline]
|
||||
pub unsafe fn read_f32(ptr: *const i32) -> f32 {
|
||||
f32::from_bits(*ptr as u32)
|
||||
}
|
||||
|
||||
/// Read boolean from shared memory
|
||||
#[inline]
|
||||
pub unsafe fn read_bool(ptr: *const i32) -> bool {
|
||||
*ptr != 0
|
||||
}
|
||||
|
||||
/// Read vec3 (3 f32s) from shared memory
|
||||
#[inline]
|
||||
pub unsafe fn read_vec3(ptr: *const i32) -> [f32; 3] {
|
||||
let p = ptr as *const f32;
|
||||
[p.read(), p.add(1).read(), p.add(2).read()]
|
||||
}
|
||||
|
||||
/// Read slice from shared memory
|
||||
#[inline]
|
||||
pub unsafe fn read_i32_slice(ptr: *const i32, len: usize) -> &[i32] {
|
||||
std::slice::from_raw_parts(ptr, len)
|
||||
}
|
||||
|
||||
/// Read f32 slice from shared memory
|
||||
#[inline]
|
||||
pub unsafe fn read_f32_slice(ptr: *const i32, len: usize) -> &[f32] {
|
||||
std::slice::from_raw_parts(ptr as *const f32, len)
|
||||
}
|
||||
|
||||
/// Read with default value
|
||||
#[inline]
|
||||
pub unsafe fn read_f32_default(ptr: *const i32, default: f32) -> f32 {
|
||||
if ptr.is_null() { default } else { read_f32(ptr) }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub unsafe fn read_i32_default(ptr: *const i32, default: i32) -> i32 {
|
||||
if ptr.is_null() { default } else { read_i32(ptr) }
|
||||
}
|
||||
```
|
||||
|
||||
## Phase 4: Node Implementation Examples
|
||||
|
||||
### 4.1 Math Node
|
||||
|
||||
```rust
|
||||
// nodes/max/plantarium/math/src/lib.rs
|
||||
|
||||
nodarium_definition_file!("src/input.json");
|
||||
|
||||
#[nodarium_execute]
|
||||
pub fn execute(op_type: *const i32, a: *const i32, b: *const i32) -> Vec<i32> {
|
||||
use nodarium_utils::{read_i32, read_f32};
|
||||
|
||||
let op = unsafe { read_i32(op_type) };
|
||||
let a_val = unsafe { read_f32(a) };
|
||||
let b_val = unsafe { read_f32(b) };
|
||||
|
||||
let result = match op {
|
||||
0 => a_val + b_val, // add
|
||||
1 => a_val - b_val, // subtract
|
||||
2 => a_val * b_val, // multiply
|
||||
3 => a_val / b_val, // divide
|
||||
_ => 0.0,
|
||||
};
|
||||
|
||||
vec![result.to_bits()]
|
||||
}
|
||||
```
|
||||
|
||||
### 4.2 Vec3 Node
|
||||
|
||||
```rust
|
||||
// nodes/max/plantarium/vec3/src/lib.rs
|
||||
|
||||
nodarium_definition_file!("src/input.json");
|
||||
|
||||
#[nodarium_execute]
|
||||
pub fn execute(x: *const i32, y: *const i32, z: *const i32) -> Vec<i32> {
|
||||
use nodarium_utils::read_f32;
|
||||
|
||||
let x_val = unsafe { read_f32(x) };
|
||||
let y_val = unsafe { read_f32(y) };
|
||||
let z_val = unsafe { read_f32(z) };
|
||||
|
||||
vec![x_val.to_bits(), y_val.to_bits(), z_val.to_bits()]
|
||||
}
|
||||
```
|
||||
|
||||
### 4.3 Box Node
|
||||
|
||||
```rust
|
||||
// nodes/max/plantarium/box/src/lib.rs
|
||||
|
||||
nodarium_definition_file!("src/input.json");
|
||||
|
||||
#[nodarium_execute]
|
||||
pub fn execute(size: *const i32) -> Vec<i32> {
|
||||
use nodarium_utils::{read_f32, encode_float, calculate_normals};
|
||||
|
||||
let size = unsafe { read_f32(size) };
|
||||
let p = encode_float(size);
|
||||
let n = encode_float(-size);
|
||||
|
||||
let mut cube_geometry = vec![
|
||||
1, // 1: geometry
|
||||
8, // 8 vertices
|
||||
12, // 12 faces
|
||||
|
||||
// Face indices
|
||||
0, 1, 2, 0, 2, 3,
|
||||
0, 3, 4, 4, 5, 0,
|
||||
6, 1, 0, 5, 6, 0,
|
||||
7, 2, 1, 6, 7, 1,
|
||||
2, 7, 3, 3, 7, 4,
|
||||
7, 6, 4, 4, 6, 5,
|
||||
|
||||
// Bottom plate
|
||||
p, n, n, p, n, p, n, n, p, n, n, n,
|
||||
|
||||
// Top plate
|
||||
n, p, n, p, p, n, p, p, p, n, p, p,
|
||||
|
||||
// Normals
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
];
|
||||
|
||||
calculate_normals(&mut cube_geometry);
|
||||
cube_geometry
|
||||
}
|
||||
```
|
||||
|
||||
### 4.4 Stem Node
|
||||
|
||||
```rust
|
||||
// nodes/max/plantarium/stem/src/lib.rs
|
||||
|
||||
nodarium_definition_file!("src/input.json");
|
||||
|
||||
#[nodarium_execute]
|
||||
pub fn execute(
|
||||
origin: *const i32,
|
||||
amount: *const i32,
|
||||
length: *const i32,
|
||||
thickness: *const i32,
|
||||
resolution: *const i32,
|
||||
) -> Vec<i32> {
|
||||
use nodarium_utils::{
|
||||
read_vec3, read_i32, read_f32,
|
||||
geometry::{create_multiple_paths, wrap_multiple_paths},
|
||||
};
|
||||
|
||||
let origin = unsafe { read_vec3(origin) };
|
||||
let amount = unsafe { read_i32(amount) } as usize;
|
||||
let length = unsafe { read_f32(length) };
|
||||
let thickness = unsafe { read_f32(thickness) };
|
||||
let resolution = unsafe { read_i32(resolution) } as usize;
|
||||
|
||||
let mut stem_data = create_multiple_paths(amount, resolution, 1);
|
||||
let mut stems = wrap_multiple_paths(&mut stem_data);
|
||||
|
||||
for stem in stems.iter_mut() {
|
||||
let points = stem.get_points_mut();
|
||||
for (i, point) in points.iter_mut().enumerate() {
|
||||
let t = i as f32 / (resolution as f32 - 1.0);
|
||||
point.x = origin[0];
|
||||
point.y = origin[1] + t * length;
|
||||
point.z = origin[2];
|
||||
point.w = thickness * (1.0 - t);
|
||||
}
|
||||
}
|
||||
|
||||
stem_data
|
||||
}
|
||||
```
|
||||
|
||||
## Phase 5: Runtime Implementation
|
||||
|
||||
```typescript
|
||||
// app/src/lib/runtime/memory-manager.ts
|
||||
|
||||
export const SHARED_MEMORY = new WebAssembly.Memory({
|
||||
initial: 1024, // 64MB initial
|
||||
maximum: 4096, // 256MB maximum
|
||||
});
|
||||
|
||||
export class MemoryManager {
|
||||
private offset: number = 0;
|
||||
private readonly start: number = 0;
|
||||
|
||||
reset() {
|
||||
this.offset = this.start;
|
||||
}
|
||||
|
||||
alloc(bytes: number): number {
|
||||
const pos = this.offset;
|
||||
this.offset += bytes;
|
||||
return pos;
|
||||
}
|
||||
|
||||
readInt32(pos: number): number {
|
||||
return new Int32Array(SHARED_MEMORY.buffer)[pos / 4];
|
||||
}
|
||||
|
||||
readFloat32(pos: number): number {
|
||||
return new Float32Array(SHARED_MEMORY.buffer)[pos / 4];
|
||||
}
|
||||
|
||||
readBytes(pos: number, length: number): Uint8Array {
|
||||
return new Uint8Array(SHARED_MEMORY.buffer, pos, length);
|
||||
}
|
||||
|
||||
getInt32View(): Int32Array {
|
||||
return new Int32Array(SHARED_MEMORY.buffer);
|
||||
}
|
||||
|
||||
getFloat32View(): Float32Array {
|
||||
return new Float32Array(SHARED_MEMORY.buffer);
|
||||
}
|
||||
|
||||
getRemaining(): number {
|
||||
return SHARED_MEMORY.buffer.byteLength - this.offset;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
```typescript
|
||||
// app/src/lib/runtime/imports.ts
|
||||
|
||||
import { SHARED_MEMORY } from "./memory-manager";
|
||||
|
||||
export function createImportObject(nodeId: string): WebAssembly.Imports {
|
||||
return {
|
||||
env: {
|
||||
// Import shared memory
|
||||
memory: SHARED_MEMORY,
|
||||
|
||||
// Logging
|
||||
__nodarium_log: (ptr: number, len: number) => {
|
||||
const msg = new TextDecoder().decode(
|
||||
new Uint8Array(SHARED_MEMORY.buffer, ptr, len),
|
||||
);
|
||||
console.log(`[${nodeId}] ${msg}`);
|
||||
},
|
||||
|
||||
__nodarium_log_panic: (ptr: number, len: number) => {
|
||||
const msg = new TextDecoder().decode(
|
||||
new Uint8Array(SHARED_MEMORY.buffer, ptr, len),
|
||||
);
|
||||
console.error(`[${nodeId}] PANIC: ${msg}`);
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
```typescript
|
||||
// app/src/lib/runtime/executor.ts
|
||||
|
||||
import { SHARED_MEMORY } from "./memory-manager";
|
||||
import { createImportObject } from "./imports";
|
||||
|
||||
export class SharedMemoryRuntimeExecutor implements RuntimeExecutor {
|
||||
private memory: MemoryManager;
|
||||
private results: Map<string, { pos: number; len: number }> = new Map();
|
||||
private instances: Map<string, WebAssembly.Instance> = new Map();
|
||||
|
||||
constructor(private registry: NodeRegistry) {
|
||||
this.memory = new MemoryManager();
|
||||
}
|
||||
|
||||
async execute(graph: Graph, settings: Record<string, unknown>) {
|
||||
this.memory.reset();
|
||||
this.results.clear();
|
||||
|
||||
const [outputNode, nodes] = await this.addMetaData(graph);
|
||||
const sortedNodes = nodes.sort((a, b) => b.depth - a.depth);
|
||||
|
||||
for (const node of sortedNodes) {
|
||||
await this.executeNode(node, settings);
|
||||
}
|
||||
|
||||
const result = this.results.get(outputNode.id);
|
||||
const view = this.memory.getInt32View();
|
||||
return view.subarray(result.pos / 4, result.pos / 4 + result.len / 4);
|
||||
}
|
||||
|
||||
private async executeNode(
|
||||
node: RuntimeNode,
|
||||
settings: Record<string, unknown>,
|
||||
) {
|
||||
const def = this.definitionMap.get(node.type)!;
|
||||
const inputs = def.inputs || {};
|
||||
const inputNames = Object.keys(inputs);
|
||||
|
||||
const outputSize = this.estimateOutputSize(def);
|
||||
const outputPos = this.memory.alloc(outputSize);
|
||||
const args: number[] = [outputPos];
|
||||
|
||||
for (const inputName of inputNames) {
|
||||
const inputDef = inputs[inputName];
|
||||
const inputNode = node.state.inputNodes[inputName];
|
||||
if (inputNode) {
|
||||
const parentResult = this.results.get(inputNode.id)!;
|
||||
args.push(parentResult.pos);
|
||||
continue;
|
||||
}
|
||||
|
||||
const valuePos = this.memory.alloc(16);
|
||||
this.writeValue(
|
||||
valuePos,
|
||||
inputDef,
|
||||
node.props?.[inputName] ??
|
||||
settings[inputDef.setting ?? ""] ??
|
||||
inputDef.value,
|
||||
);
|
||||
args.push(valuePos);
|
||||
}
|
||||
|
||||
let instance = this.instances.get(node.type);
|
||||
if (!instance) {
|
||||
instance = await this.instantiateNode(node.type);
|
||||
this.instances.set(node.type, instance);
|
||||
}
|
||||
|
||||
const writtenLen = instance.exports.execute(...args);
|
||||
this.results.set(node.id, { pos: outputPos, len: writtenLen });
|
||||
}
|
||||
|
||||
private writeValue(pos: number, inputDef: NodeInput, value: unknown) {
|
||||
const view = this.memory.getFloat32View();
|
||||
const intView = this.memory.getInt32View();
|
||||
|
||||
switch (inputDef.type) {
|
||||
case "float":
|
||||
view[pos / 4] = value as number;
|
||||
break;
|
||||
case "integer":
|
||||
case "select":
|
||||
case "seed":
|
||||
intView[pos / 4] = value as number;
|
||||
break;
|
||||
case "boolean":
|
||||
intView[pos / 4] = value ? 1 : 0;
|
||||
break;
|
||||
case "vec3":
|
||||
const arr = value as number[];
|
||||
view[pos / 4] = arr[0];
|
||||
view[pos / 4 + 1] = arr[1];
|
||||
view[pos / 4 + 2] = arr[2];
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
private estimateOutputSize(def: NodeDefinition): number {
|
||||
const sizes: Record<string, number> = {
|
||||
float: 16,
|
||||
integer: 16,
|
||||
boolean: 16,
|
||||
vec3: 16,
|
||||
geometry: 8192,
|
||||
path: 4096,
|
||||
};
|
||||
return sizes[def.outputs?.[0] || "float"] || 64;
|
||||
}
|
||||
|
||||
private async instantiateNode(
|
||||
nodeType: string,
|
||||
): Promise<WebAssembly.Instance> {
|
||||
const wasmBytes = await this.fetchWasm(nodeType);
|
||||
const module = await WebAssembly.compile(wasmBytes);
|
||||
const importObject = createImportObject(nodeType);
|
||||
return WebAssembly.instantiate(module, importObject);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Phase 7: Execution Flow Visualization
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────────────────────────────────────────────┐
|
||||
│ Execution Timeline │
|
||||
└─────────────────────────────────────────────────────────────────────────────┘
|
||||
|
||||
Step 1: Setup
|
||||
SHARED_MEMORY = new WebAssembly.Memory({ initial: 1024 })
|
||||
memory.offset = 0
|
||||
|
||||
Step 2: Execute Node A (math with 3 inputs)
|
||||
outputPos = memory.alloc(16) = 0
|
||||
args = [0, ptr_to_op_type, ptr_to_a, ptr_to_b]
|
||||
|
||||
Node A reads:
|
||||
*ptr_to_op_type → op
|
||||
*ptr_to_a → a
|
||||
*ptr_to_b → b
|
||||
|
||||
Node A returns: vec![result.to_bits()]
|
||||
|
||||
Macro writes result directly to SHARED_MEMORY[0..4]
|
||||
Returns: 4
|
||||
|
||||
results['A'] = { pos: 0, len: 4 }
|
||||
memory.offset = 4
|
||||
|
||||
Step 3: Execute Node B (stem with 5 inputs, input[0] from A)
|
||||
outputPos = memory.alloc(4096) = 4
|
||||
args = [4, results['A'].pos, ptr_to_amount, ptr_to_length, ...]
|
||||
|
||||
Node B reads:
|
||||
*results['A'].pos → value from Node A
|
||||
*ptr_to_amount → amount
|
||||
...
|
||||
|
||||
Node B returns: stem_data Vec<i32> (1000 elements = 4000 bytes)
|
||||
|
||||
Macro writes stem_data directly to SHARED_MEMORY[4..4004]
|
||||
Returns: 4000
|
||||
|
||||
results['B'] = { pos: 4, len: 4000 }
|
||||
memory.offset = 4004
|
||||
|
||||
Step 4: Execute Node C (output, 1 input from B)
|
||||
outputPos = memory.alloc(16) = 4004
|
||||
args = [4004, results['B'].pos, results['B'].len]
|
||||
|
||||
Node C reads:
|
||||
*results['B'].pos → stem geometry
|
||||
|
||||
Node C returns: vec![1] (identity)
|
||||
Macro writes to SHARED_MEMORY[4004..4008]
|
||||
|
||||
results['C'] = { pos: 4004, len: 4 }
|
||||
|
||||
Final: Return SHARED_MEMORY[4004..4008] as geometry result
|
||||
```
|
||||
|
||||
## Phase 6: Memory Growth Strategy
|
||||
|
||||
```typescript
|
||||
class MemoryManager {
|
||||
alloc(bytes: number): number {
|
||||
const required = this.offset + bytes;
|
||||
const currentBytes = SHARED_MEMORY.buffer.byteLength;
|
||||
|
||||
if (required > currentBytes) {
|
||||
const pagesNeeded = Math.ceil((required - currentBytes) / 65536);
|
||||
const success = SHARED_MEMORY.grow(pagesNeeded);
|
||||
|
||||
if (!success) {
|
||||
throw new Error(`Out of memory: need ${bytes} bytes`);
|
||||
}
|
||||
|
||||
this.int32View = new Int32Array(SHARED_MEMORY.buffer);
|
||||
this.float32View = new Float32Array(SHARED_MEMORY.buffer);
|
||||
}
|
||||
|
||||
const pos = this.offset;
|
||||
this.offset += bytes;
|
||||
return pos;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Phase 8: Migration Checklist
|
||||
|
||||
### Build Configuration
|
||||
|
||||
- [ ] Add `--import-memory` to Rust flags in `Cargo.toml`
|
||||
- [ ] Ensure no nodes export memory
|
||||
|
||||
### Runtime
|
||||
|
||||
- [ ] Create `SHARED_MEMORY` instance
|
||||
- [ ] Implement `MemoryManager` with alloc/read/write
|
||||
- [ ] Create import object factory
|
||||
- [ ] Implement `SharedMemoryRuntimeExecutor`
|
||||
|
||||
### Macro
|
||||
|
||||
- [ ] Parse definition JSON
|
||||
- [ ] Validate function signature (N params, Vec<i32> return)
|
||||
- [ ] Generate wrapper that writes return value to `output_pos`
|
||||
- [ ] Add panic hook
|
||||
|
||||
### Utilities
|
||||
|
||||
- [ ] `read_i32(ptr: *const i32) -> i32`
|
||||
- [ ] `read_f32(ptr: *const i32) -> f32`
|
||||
- [ ] `read_bool(ptr: *const i32) -> bool`
|
||||
- [ ] `read_vec3(ptr: *const i32) -> [f32; 3]`
|
||||
- [ ] `read_i32_slice(ptr: *const i32, len: usize) -> &[i32]`
|
||||
|
||||
### Nodes
|
||||
|
||||
- [ ] `float`, `integer`, `boolean` nodes
|
||||
- [ ] `vec3` node
|
||||
- [ ] `math` node
|
||||
- [ ] `random` node
|
||||
- [ ] `box` node
|
||||
- [ ] `stem` node
|
||||
- [ ] `branch` node
|
||||
- [ ] `instance` node
|
||||
- [ ] `output` node
|
||||
|
||||
## Phase 9: Before vs After
|
||||
|
||||
### Before (per-node memory)
|
||||
|
||||
```rust
|
||||
#[nodarium_execute]
|
||||
pub fn execute(input: &[i32]) -> Vec<i32> {
|
||||
let args = split_args(input);
|
||||
let a = evaluate_float(args[0]);
|
||||
let b = evaluate_float(args[1]);
|
||||
vec![(a + b).to_bits()]
|
||||
}
|
||||
```
|
||||
|
||||
### After (shared memory)
|
||||
|
||||
```rust
|
||||
#[nodarium_execute]
|
||||
pub fn execute(a: *const i32, b: *const i32) -> Vec<i32> {
|
||||
use nodarium_utils::read_f32;
|
||||
let a_val = unsafe { read_f32(a) };
|
||||
let b_val = unsafe { read_f32(b) };
|
||||
vec![(a_val + b_val).to_bits()]
|
||||
}
|
||||
```
|
||||
|
||||
**Key differences:**
|
||||
|
||||
- Parameters are input pointers, not a slice
|
||||
- Use `read_f32` helper instead of `evaluate_float`
|
||||
- Macro writes result directly to shared memory
|
||||
- All nodes share the same memory import
|
||||
|
||||
## Phase 10: Benefits
|
||||
|
||||
| Aspect | Before | After |
|
||||
| ----------------- | -------------- | -------------------- |
|
||||
| Memory | N × ~1MB heaps | 1 × 64-256MB shared |
|
||||
| Cross-node access | Copy via JS | Direct read |
|
||||
| API | `&[i32]` slice | `*const i32` pointer |
|
||||
| Validation | Runtime | Compile-time |
|
||||
227
SUMMARY.md
227
SUMMARY.md
@@ -1,227 +0,0 @@
|
||||
# Nodarium - AI Coding Agent Summary
|
||||
|
||||
## Project Overview
|
||||
|
||||
Nodarium is a WebAssembly-based visual programming language used to build <https://nodes.max-richter.dev>, a procedural 3D plant modeling tool. The system allows users to create visual node graphs where each node is a compiled WebAssembly module.
|
||||
|
||||
## Technology Stack
|
||||
|
||||
**Frontend (SvelteKit):**
|
||||
|
||||
- Framework: SvelteKit with Svelte 5
|
||||
- 3D Rendering: Three.js via Threlte
|
||||
- Styling: Tailwind CSS 4
|
||||
- Build Tool: Vite
|
||||
- State Management: Custom store-client package
|
||||
- WASM Integration: vite-plugin-wasm, comlink
|
||||
|
||||
**Backend/Core (Rust/WASM):**
|
||||
|
||||
- Language: Rust
|
||||
- Output: WebAssembly (wasm32-unknown-unknown target)
|
||||
- Build Tool: cargo
|
||||
- Procedural Macros: custom macros package
|
||||
|
||||
**Package Management:**
|
||||
|
||||
- Node packages: pnpm workspace (v10.28.1)
|
||||
- Rust packages: Cargo workspace
|
||||
|
||||
## Directory Structure
|
||||
|
||||
```
|
||||
nodarium/
|
||||
├── app/ # SvelteKit web application
|
||||
│ ├── src/
|
||||
│ │ ├── lib/ # App-specific components and utilities
|
||||
│ │ ├── routes/ # SvelteKit routes (pages)
|
||||
│ │ ├── app.css # Global styles
|
||||
│ │ └── app.html # HTML template
|
||||
│ ├── static/
|
||||
│ │ └── nodes/ # Compiled WASM node files served statically
|
||||
│ ├── package.json # App dependencies
|
||||
│ ├── svelte.config.js # SvelteKit configuration
|
||||
│ ├── vite.config.ts # Vite configuration
|
||||
│ └── tsconfig.json # TypeScript configuration
|
||||
│
|
||||
├── packages/ # Shared workspace packages
|
||||
│ ├── ui/ # Svelte UI component library (published as @nodarium/ui)
|
||||
│ │ ├── src/ # UI components
|
||||
│ │ ├── static/ # Static assets for UI
|
||||
│ │ ├── dist/ # Built output
|
||||
│ │ └── package.json
|
||||
│ ├── registry/ # Node registry with IndexedDB persistence (@nodarium/registry)
|
||||
│ │ └── src/
|
||||
│ ├── types/ # Shared TypeScript types (@nodarium/types)
|
||||
│ │ └── src/
|
||||
│ ├── utils/ # Shared utilities (@nodarium/utils)
|
||||
│ │ └── src/
|
||||
│ └── macros/ # Rust procedural macros for node development
|
||||
│
|
||||
├── nodes/ # WebAssembly node packages (Rust)
|
||||
│ └── max/plantarium/ # Plantarium nodes namespace
|
||||
│ ├── box/ # Box geometry node
|
||||
│ ├── branch/ # Branch generation node
|
||||
│ ├── float/ # Float value node
|
||||
│ ├── gravity/ # Gravity simulation node
|
||||
│ ├── instance/ # Geometry instancing node
|
||||
│ ├── math/ # Math operations node
|
||||
│ ├── noise/ # Noise generation node
|
||||
│ ├── output/ # Output node for results
|
||||
│ ├── random/ # Random value node
|
||||
│ ├── rotate/ # Rotation transformation node
|
||||
│ ├── stem/ # Stem geometry node
|
||||
│ ├── triangle/ # Triangle geometry node
|
||||
│ ├── vec3/ # Vector3 manipulation node
|
||||
│ └── .template/ # Node template for creating new nodes
|
||||
│
|
||||
├── docs/ # Documentation
|
||||
│ ├── ARCHITECTURE.md # System architecture overview
|
||||
│ ├── DEVELOPING_NODES.md # Guide for creating new nodes
|
||||
│ ├── NODE_DEFINITION.md # Node definition schema
|
||||
│ └── PLANTARIUM.md # Plantarium-specific documentation
|
||||
│
|
||||
├── Cargo.toml # Rust workspace configuration
|
||||
├── package.json # Root npm scripts
|
||||
├── pnpm-workspace.yaml # pnpm workspace configuration
|
||||
├── pnpm-lock.yaml # Locked dependency versions
|
||||
└── README.md # Project readme
|
||||
```
|
||||
|
||||
## Node System Architecture
|
||||
|
||||
### What is a Node?
|
||||
|
||||
Nodes are WebAssembly modules that:
|
||||
|
||||
- Have a unique ID (e.g., `max/plantarium/stem`)
|
||||
- Define inputs with types and default values
|
||||
- Define outputs they produce
|
||||
- Execute logic when called with arguments
|
||||
|
||||
### Node Definition Schema
|
||||
|
||||
Nodes are defined via `definition.json` embedded in each WASM module:
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "namespace/category/node-name",
|
||||
"outputs": ["geometry"],
|
||||
"inputs": {
|
||||
"height": { "type": "float", "value": 1.0 },
|
||||
"radius": { "type": "float", "value": 0.1 }
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
For now the outputs are limited to a single output.
|
||||
|
||||
### Node Execution
|
||||
|
||||
Nodes receive serialized arguments and return serialized outputs. The `nodarium_utils` Rust crate provides helpers for:
|
||||
|
||||
- Parsing input arguments
|
||||
- Creating geometry data
|
||||
- Concatenating output vectors
|
||||
|
||||
### Node Registration
|
||||
|
||||
Nodes are:
|
||||
|
||||
1. Compiled to WASM files in `target/wasm32-unknown-unknown/release/`
|
||||
2. Copied to `app/static/nodes/` for serving
|
||||
3. Registered in the browser via IndexedDB using the registry package
|
||||
|
||||
## Key Dependencies
|
||||
|
||||
**Frontend:**
|
||||
|
||||
- `@sveltejs/kit` - Application framework
|
||||
- `@threlte/core` & `@threlte/extras` - Three.js Svelte integration
|
||||
- `three` - 3D graphics library
|
||||
- `tailwindcss` - CSS framework
|
||||
- `comlink` - WebWorker RPC
|
||||
- `idb` - IndexedDB wrapper
|
||||
- `wabt` - WebAssembly binary toolkit
|
||||
|
||||
**Rust/WASM:**
|
||||
|
||||
- Language: Rust (compiled with plain cargo)
|
||||
- Output: WebAssembly (wasm32-unknown-unknown target)
|
||||
- Generic WASM wrapper for language-agnostic node development
|
||||
- `glam` - Math library (Vec2, Vec3, Mat4, etc.)
|
||||
- `nodarium_macros` - Custom procedural macros
|
||||
- `nodarium_utils` - Shared node utilities
|
||||
|
||||
## Build Commands
|
||||
|
||||
From root directory:
|
||||
|
||||
```bash
|
||||
# Install dependencies
|
||||
pnpm i
|
||||
|
||||
# Build all WASM nodes (compiles Rust, copies to app/static)
|
||||
pnpm build:nodes
|
||||
|
||||
# Build the app (builds UI library + SvelteKit app)
|
||||
pnpm build:app
|
||||
|
||||
# Full build (nodes + app)
|
||||
pnpm build
|
||||
|
||||
# Development
|
||||
pnpm dev # Run all dev commands in parallel
|
||||
pnpm dev:nodes # Watch nodes/, auto-rebuild on changes
|
||||
pnpm dev:app_ui # Watch app and UI package
|
||||
pnpm dev_ui # Watch UI package only
|
||||
```
|
||||
|
||||
## Workspace Packages
|
||||
|
||||
The project uses pnpm workspaces with the following packages:
|
||||
|
||||
| Package | Location | Purpose |
|
||||
| ------------------ | ------------------ | ------------------------------ |
|
||||
| @nodarium/app | app/ | Main SvelteKit application |
|
||||
| @nodarium/ui | packages/ui/ | Reusable UI component library |
|
||||
| @nodarium/registry | packages/registry/ | Node registry with persistence |
|
||||
| @nodarium/types | packages/types/ | Shared TypeScript types |
|
||||
| @nodarium/utils | packages/utils/ | Shared utilities |
|
||||
| nodarium macros | packages/macros/ | Rust procedural macros |
|
||||
|
||||
## Configuration Files
|
||||
|
||||
- `.dprint.jsonc` - Dprint formatter configuration
|
||||
- `svelte.config.js` - SvelteKit configuration (app and ui)
|
||||
- `vite.config.ts` - Vite bundler configuration
|
||||
- `tsconfig.json` - TypeScript configuration (app and packages)
|
||||
- `Cargo.toml` - Rust workspace with member packages
|
||||
- `flake.nix` - Nix development environment
|
||||
|
||||
## Development Workflow
|
||||
|
||||
### Adding a New Node
|
||||
|
||||
1. Copy the `.template` directory in `nodes/max/plantarium/` to create a new node directory
|
||||
2. Define node in `src/definition.json`
|
||||
3. Implement logic in `src/lib.rs`
|
||||
4. Build with `cargo build --release --target wasm32-unknown-unknown`
|
||||
5. Test by dragging onto the node graph
|
||||
|
||||
### Modifying UI Components
|
||||
|
||||
1. Changes to `packages/ui/` automatically rebuild with watch mode
|
||||
2. App imports from `@nodarium/ui`
|
||||
3. Run `pnpm dev:app_ui` for hot reload
|
||||
|
||||
## Important Notes for AI Agents
|
||||
|
||||
1. **WASM Compilation**: Nodes require `wasm32-unknown-unknown` target (`rustup target add wasm32-unknown-unknown`)
|
||||
2. **Cross-Compilation**: WASM build happens on host, not in containers/VMs
|
||||
3. **Static Serving**: Compiled WASM files must exist in `app/static/nodes/` before dev server runs
|
||||
4. **Workspace Dependencies**: Use `workspace:*` protocol for internal packages
|
||||
5. **Threlte Version**: Uses Threlte 8.x, not 7.x (important for 3D component APIs)
|
||||
6. **Svelte 5**: Project uses Svelte 5 with runes (`$state`, `$derived`, `$effect`)
|
||||
7. **Tailwind 4**: Uses Tailwind CSS v4 with `@tailwindcss/vite` plugin
|
||||
8. **IndexedDB**: Registry uses IDB for persistent node storage in browser
|
||||
@@ -1,294 +0,0 @@
|
||||
# Node Compilation and Runtime Execution
|
||||
|
||||
## Overview
|
||||
|
||||
Nodarium nodes are WebAssembly modules written in Rust. Each node is a compiled WASM binary that exposes a standardized C ABI interface. The system uses procedural macros to generate the necessary boilerplate for node definitions, memory management, and execution.
|
||||
|
||||
## Node Compilation
|
||||
|
||||
### 1. Node Definition (JSON)
|
||||
|
||||
Each node has a `src/input.json` file that defines:
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "max/plantarium/stem",
|
||||
"meta": { "description": "Creates a stem" },
|
||||
"outputs": ["path"],
|
||||
"inputs": {
|
||||
"origin": { "type": "vec3", "value": [0, 0, 0], "external": true },
|
||||
"amount": { "type": "integer", "value": 1, "min": 1, "max": 64 },
|
||||
"length": { "type": "float", "value": 5 },
|
||||
"thickness": { "type": "float", "value": 0.2 }
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 2. Procedural Macros
|
||||
|
||||
The `nodarium_macros` crate provides two procedural macros:
|
||||
|
||||
#### `#[nodarium_execute]`
|
||||
|
||||
Transforms a Rust function into a WASM-compatible entry point:
|
||||
|
||||
```rust
|
||||
#[nodarium_execute]
|
||||
pub fn execute(input: &[i32]) -> Vec<i32> {
|
||||
// Node logic here
|
||||
}
|
||||
```
|
||||
|
||||
The macro generates:
|
||||
- **C ABI wrapper**: Converts the WASM interface to a standard C FFI
|
||||
- **`execute` function**: Takes `(ptr: *const i32, len: usize)` and returns `*mut i32`
|
||||
- **Memory allocation**: `__alloc(len: usize) -> *mut i32` for buffer allocation
|
||||
- **Memory deallocation**: `__free(ptr: *mut i32, len: usize)` for cleanup
|
||||
- **Static output buffer**: `OUTPUT_BUFFER` for returning results
|
||||
- **Panic hook**: Routes panics through `host_log_panic` for debugging
|
||||
- **Internal logic wrapper**: Wraps the original function
|
||||
|
||||
#### `nodarium_definition_file!("path")`
|
||||
|
||||
Embeds the node definition JSON into the WASM binary:
|
||||
|
||||
```rust
|
||||
nodarium_definition_file!("src/input.json");
|
||||
```
|
||||
|
||||
Generates:
|
||||
- **`DEFINITION_DATA`**: Static byte array in `nodarium_definition` section
|
||||
- **`get_definition_ptr()`**: Returns pointer to definition data
|
||||
- **`get_definition_len()`**: Returns length of definition data
|
||||
|
||||
### 3. Build Process
|
||||
|
||||
Nodes are compiled with:
|
||||
```bash
|
||||
cargo build --release --target wasm32-unknown-unknown
|
||||
```
|
||||
|
||||
The resulting `.wasm` files are copied to `app/static/nodes/` for serving.
|
||||
|
||||
## Node Execution Runtime
|
||||
|
||||
### Architecture
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────────────────────────────┐
|
||||
│ WebWorker Thread │
|
||||
│ ┌─────────────────────────────────────────────────────────┐│
|
||||
│ │ WorkerRuntimeExecutor ││
|
||||
│ │ ┌───────────────────────────────────────────────────┐ ││
|
||||
│ │ │ MemoryRuntimeExecutor ││
|
||||
│ │ │ ┌─────────────────────────────────────────────┐ ││
|
||||
│ │ │ │ Node Registry (WASM + Definitions) ││
|
||||
│ │ │ └─────────────────────────────────────────────┘ ││
|
||||
│ │ │ ┌─────────────────────────────────────────────┐ ││
|
||||
│ │ │ │ Execution Engine (Bottom-Up Evaluation) ││
|
||||
│ │ │ └─────────────────────────────────────────────┘ ││
|
||||
│ │ └───────────────────────────────────────────────────┘ ││
|
||||
│ └─────────────────────────────────────────────────────────┘│
|
||||
└─────────────────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
### 1. MemoryRuntimeExecutor
|
||||
|
||||
The core execution engine in `runtime-executor.ts`:
|
||||
|
||||
#### Metadata Collection (`addMetaData`)
|
||||
|
||||
1. Load node definitions from registry
|
||||
2. Build parent/child relationships from graph edges
|
||||
3. Calculate execution depth via reverse BFS from output node
|
||||
|
||||
#### Node Sorting
|
||||
|
||||
Nodes are sorted by depth (highest depth first) for bottom-up execution:
|
||||
|
||||
```
|
||||
Depth 3: n3 n6
|
||||
Depth 2: n2 n4 n5
|
||||
Depth 1: n1
|
||||
Depth 0: Output
|
||||
Execution order: n3, n6, n2, n4, n5, n1, Output
|
||||
```
|
||||
|
||||
#### Input Collection
|
||||
|
||||
For each node, inputs are gathered from:
|
||||
1. **Connected nodes**: Results from parent nodes in the graph
|
||||
2. **Node props**: Values stored directly on the node instance
|
||||
3. **Settings**: Global settings mapped via `setting` property
|
||||
4. **Defaults**: Values from node definition
|
||||
|
||||
#### Input Encoding
|
||||
|
||||
Values are encoded as `Int32Array`:
|
||||
- **Floats**: IEEE 754 bits cast to i32
|
||||
- **Vectors**: `[0, count, v1, v2, v3, 1, 1]` (nested bracket format)
|
||||
- **Booleans**: `0` or `1`
|
||||
- **Integers**: Direct i32 value
|
||||
|
||||
#### Caching
|
||||
|
||||
Results are cached using:
|
||||
```typescript
|
||||
inputHash = `node-${node.id}-${fastHashArrayBuffer(encoded_inputs)}`
|
||||
```
|
||||
|
||||
The cache uses LRU eviction (default size: 50 entries).
|
||||
|
||||
### 2. Execution Flow
|
||||
|
||||
```typescript
|
||||
async execute(graph: Graph, settings) {
|
||||
// 1. Load definitions and build node relationships
|
||||
const [outputNode, nodes] = await this.addMetaData(graph);
|
||||
|
||||
// 2. Sort nodes by depth (bottom-up)
|
||||
const sortedNodes = nodes.sort((a, b) => b.depth - a.depth);
|
||||
|
||||
// 3. Execute each node
|
||||
for (const node of sortedNodes) {
|
||||
const inputs = this.collectInputs(node, settings);
|
||||
const encoded = concatEncodedArrays(inputs);
|
||||
const result = nodeType.execute(encoded);
|
||||
this.results[node.id] = result;
|
||||
}
|
||||
|
||||
// 4. Return output node result
|
||||
return this.results[outputNode.id];
|
||||
}
|
||||
```
|
||||
|
||||
### 3. Worker Isolation
|
||||
|
||||
`WorkerRuntimeExecutor` runs execution in a WebWorker via Comlink:
|
||||
|
||||
```typescript
|
||||
class WorkerRuntimeExecutor implements RuntimeExecutor {
|
||||
private worker = new ComlinkWorker(...);
|
||||
|
||||
async execute(graph, settings) {
|
||||
return this.worker.executeGraph(graph, settings);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
The worker backend (`worker-runtime-executor-backend.ts`):
|
||||
- Creates a single `MemoryRuntimeExecutor` instance
|
||||
- Manages caching state
|
||||
- Collects performance metrics
|
||||
|
||||
### 4. Remote Execution (Optional)
|
||||
|
||||
`RemoteRuntimeExecutor` can execute graphs on a remote server:
|
||||
|
||||
```typescript
|
||||
class RemoteRuntimeExecutor implements RuntimeExecutor {
|
||||
async execute(graph, settings) {
|
||||
const res = await fetch(this.url, {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ graph, settings })
|
||||
});
|
||||
return new Int32Array(await res.arrayBuffer());
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Data Encoding Format
|
||||
|
||||
### Bracket Notation
|
||||
|
||||
Inputs and outputs use a nested bracket encoding:
|
||||
|
||||
```
|
||||
[0, count, item1, item2, ..., 1, 1]
|
||||
^ ^ items ^ ^
|
||||
| | | |
|
||||
| | | +-- closing bracket
|
||||
| +-- number of items + 1 |
|
||||
+-- opening bracket (0) +-- closing bracket (1)
|
||||
```
|
||||
|
||||
### Example Encodings
|
||||
|
||||
**Float (5.0)**:
|
||||
```typescript
|
||||
encodeFloat(5.0) // → 1084227584 (IEEE 754 bits as i32)
|
||||
```
|
||||
|
||||
**Vec3 ([1, 2, 3])**:
|
||||
```typescript
|
||||
[0, 4, encodeFloat(1), encodeFloat(2), encodeFloat(3), 1, 1]
|
||||
```
|
||||
|
||||
**Nested Math Expression**:
|
||||
```
|
||||
[0, 3, 0, 2, 0, 3, 0, 0, 0, 3, 7549747, 127, 1, 1, ...]
|
||||
```
|
||||
|
||||
### Decoding Utilities
|
||||
|
||||
From `packages/utils/src/tree.rs`:
|
||||
- `split_args()`: Parses nested bracket arrays into segments
|
||||
- `evaluate_float()`: Recursively evaluates and decodes float expressions
|
||||
- `evaluate_int()`: Evaluates integer/math node expressions
|
||||
- `evaluate_vec3()`: Decodes vec3 arrays
|
||||
|
||||
## Geometry Data Format
|
||||
|
||||
### Path Data
|
||||
|
||||
Paths represent procedural plant structures:
|
||||
|
||||
```
|
||||
[0, count, [0, header_size, node_type, depth, x, y, z, w, ...], 1, 1]
|
||||
```
|
||||
|
||||
Each point has 4 values: x, y, z position + thickness (w).
|
||||
|
||||
### Geometry Data
|
||||
|
||||
Meshes use a similar format with vertices and face indices.
|
||||
|
||||
## Performance Tracking
|
||||
|
||||
The runtime collects detailed performance metrics:
|
||||
- `collect-metadata`: Time to build node graph
|
||||
- `collected-inputs`: Time to gather inputs
|
||||
- `encoded-inputs`: Time to encode inputs
|
||||
- `hash-inputs`: Time to compute cache hash
|
||||
- `cache-hit`: 1 if cache hit, 0 if miss
|
||||
- `node/{node_type}`: Time per node execution
|
||||
|
||||
## Caching Strategy
|
||||
|
||||
### MemoryRuntimeCache
|
||||
|
||||
LRU cache implementation:
|
||||
```typescript
|
||||
class MemoryRuntimeCache {
|
||||
private map = new Map<string, unknown>();
|
||||
size: number = 50;
|
||||
|
||||
get(key) { /* move to front */ }
|
||||
set(key, value) { /* evict oldest if at capacity */ }
|
||||
}
|
||||
```
|
||||
|
||||
### IndexDBCache
|
||||
|
||||
For persistence across sessions, the registry uses IndexedDB caching.
|
||||
|
||||
## Summary
|
||||
|
||||
The Nodarium node system works as follows:
|
||||
|
||||
1. **Compilation**: Rust functions are decorated with macros that generate C ABI WASM exports
|
||||
2. **Registration**: Node definitions are embedded in WASM and loaded at runtime
|
||||
3. **Graph Analysis**: Runtime builds node relationships and execution order
|
||||
4. **Bottom-Up Execution**: Nodes execute from leaves to output
|
||||
5. **Caching**: Results are cached per-node-inputs hash for performance
|
||||
6. **Isolation**: Execution runs in a WebWorker to prevent main thread blocking
|
||||
2
app/.gitignore
vendored
2
app/.gitignore
vendored
@@ -27,5 +27,3 @@ dist-ssr
|
||||
*.sln
|
||||
*.sw?
|
||||
build/
|
||||
|
||||
test-results/
|
||||
|
||||
@@ -28,6 +28,5 @@ RUN rm /etc/nginx/conf.d/default.conf
|
||||
COPY app/docker/app.conf /etc/nginx/conf.d/app.conf
|
||||
|
||||
COPY --from=builder /app/app/build /app
|
||||
COPY --from=builder /app/packages/ui/build /app/ui
|
||||
|
||||
EXPOSE 80
|
||||
|
||||
@@ -1,62 +0,0 @@
|
||||
import { expect, test } from '@playwright/test';
|
||||
|
||||
test('test', async ({ page }) => {
|
||||
// Listen for console messages
|
||||
page.on('console', msg => {
|
||||
console.log(`[Browser Console] ${msg.type()}: ${msg.text()}`);
|
||||
});
|
||||
|
||||
await page.goto('http://localhost:4173', { waitUntil: 'load' });
|
||||
|
||||
// await expect(page).toHaveScreenshot();
|
||||
await expect(page.locator('.graph-wrapper')).toHaveScreenshot();
|
||||
|
||||
await page.getByRole('button', { name: 'projects' }).click();
|
||||
await page.getByRole('button', { name: 'New', exact: true }).click();
|
||||
await page.getByRole('combobox').selectOption('2');
|
||||
await page.getByRole('textbox', { name: 'Project name' }).click();
|
||||
await page.getByRole('textbox', { name: 'Project name' }).fill('Test Project');
|
||||
await page.getByRole('button', { name: 'Create' }).click();
|
||||
|
||||
const expectedNodes = [
|
||||
{
|
||||
id: '10',
|
||||
type: 'max/plantarium/stem',
|
||||
props: {
|
||||
amount: 50,
|
||||
length: 4,
|
||||
thickness: 1
|
||||
}
|
||||
},
|
||||
{
|
||||
id: '11',
|
||||
type: 'max/plantarium/noise',
|
||||
props: {
|
||||
scale: 0.5,
|
||||
strength: 5
|
||||
}
|
||||
},
|
||||
{
|
||||
id: '9',
|
||||
type: 'max/plantarium/output'
|
||||
}
|
||||
];
|
||||
|
||||
for (const node of expectedNodes) {
|
||||
const wrapper = page.locator(
|
||||
`div.wrapper[data-node-id="${node.id}"][data-node-type="${node.type}"]`
|
||||
);
|
||||
await expect(wrapper).toBeVisible();
|
||||
if ('props' in node) {
|
||||
const props = node.props as unknown as Record<string, number>;
|
||||
for (const propId in node.props) {
|
||||
const expectedValue = props[propId];
|
||||
const inputElement = page.locator(
|
||||
`div.wrapper[data-node-type="${node.type}"][data-node-input="${propId}"] input[type="number"]`
|
||||
);
|
||||
const value = parseFloat(await inputElement.inputValue());
|
||||
expect(value).toBe(expectedValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 42 KiB |
@@ -1,15 +1,12 @@
|
||||
{
|
||||
"name": "@nodarium/app",
|
||||
"private": true,
|
||||
"version": "0.0.3",
|
||||
"version": "0.0.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite dev",
|
||||
"predev": "rm static/CHANGELOG.md && ln -s ../../CHANGELOG.md static/CHANGELOG.md",
|
||||
"build": "svelte-kit sync && vite build",
|
||||
"test:unit": "vitest",
|
||||
"test": "npm run test:unit -- --run && npm run test:e2e",
|
||||
"test:e2e": "playwright test",
|
||||
"test": "vitest",
|
||||
"preview": "vite preview",
|
||||
"format": "dprint fmt -c '../.dprint.jsonc' .",
|
||||
"format:check": "dprint check -c '../.dprint.jsonc' .",
|
||||
@@ -19,7 +16,7 @@
|
||||
"dependencies": {
|
||||
"@nodarium/ui": "workspace:*",
|
||||
"@nodarium/utils": "workspace:*",
|
||||
"@sveltejs/kit": "^2.50.2",
|
||||
"@sveltejs/kit": "^2.50.0",
|
||||
"@tailwindcss/vite": "^4.1.18",
|
||||
"@threlte/core": "8.3.1",
|
||||
"@threlte/extras": "9.7.1",
|
||||
@@ -27,29 +24,27 @@
|
||||
"file-saver": "^2.0.5",
|
||||
"idb": "^8.0.3",
|
||||
"jsondiffpatch": "^0.7.3",
|
||||
"micromark": "^4.0.2",
|
||||
"tailwindcss": "^4.1.18",
|
||||
"three": "^0.182.0"
|
||||
"three": "^0.182.0",
|
||||
"wabt": "^1.0.39"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/compat": "^2.0.2",
|
||||
"@eslint/js": "^9.39.2",
|
||||
"@iconify-json/tabler": "^1.2.26",
|
||||
"@iconify/tailwind4": "^1.2.1",
|
||||
"@nodarium/types": "workspace:^",
|
||||
"@playwright/test": "^1.58.1",
|
||||
"@nodarium/types": "workspace:",
|
||||
"@sveltejs/adapter-static": "^3.0.10",
|
||||
"@sveltejs/vite-plugin-svelte": "^6.2.4",
|
||||
"@tsconfig/svelte": "^5.0.7",
|
||||
"@tsconfig/svelte": "^5.0.6",
|
||||
"@types/file-saver": "^2.0.7",
|
||||
"@types/three": "^0.182.0",
|
||||
"@vitest/browser-playwright": "^4.0.18",
|
||||
"dprint": "^0.51.1",
|
||||
"eslint": "^9.39.2",
|
||||
"eslint-plugin-svelte": "^3.14.0",
|
||||
"globals": "^17.3.0",
|
||||
"svelte": "^5.49.2",
|
||||
"svelte-check": "^4.3.6",
|
||||
"svelte": "^5.46.4",
|
||||
"svelte-check": "^4.3.5",
|
||||
"tslib": "^2.8.1",
|
||||
"typescript": "^5.9.3",
|
||||
"typescript-eslint": "^8.54.0",
|
||||
@@ -57,7 +52,6 @@
|
||||
"vite-plugin-comlink": "^5.3.0",
|
||||
"vite-plugin-glsl": "^1.5.5",
|
||||
"vite-plugin-wasm": "^3.5.0",
|
||||
"vitest": "^4.0.18",
|
||||
"vitest-browser-svelte": "^2.0.2"
|
||||
"vitest": "^4.0.17"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
import { defineConfig } from '@playwright/test';
|
||||
|
||||
export default defineConfig({
|
||||
webServer: { command: 'pnpm build && pnpm preview', port: 4173 },
|
||||
testDir: 'e2e',
|
||||
use: {
|
||||
browserName: 'firefox',
|
||||
launchOptions: {
|
||||
firefoxUserPrefs: {
|
||||
// Force WebGL even without a GPU
|
||||
'webgl.force-enabled': true,
|
||||
'webgl.disabled': false,
|
||||
// Use software rendering (Mesa) instead of hardware
|
||||
'layers.acceleration.disabled': true,
|
||||
'gfx.webrender.software': true,
|
||||
'webgl.enable-webgl2': true
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -2,7 +2,7 @@
|
||||
@source "../../packages/ui/**/*.svelte";
|
||||
@plugin "@iconify/tailwind4" {
|
||||
prefix: "i";
|
||||
icon-sets: from-folder("custom", "./src/lib/icons");
|
||||
icon-sets: from-folder(custom, "./src/lib/icons");
|
||||
}
|
||||
|
||||
body * {
|
||||
|
||||
@@ -11,7 +11,6 @@ uniform vec3 camPos;
|
||||
uniform vec2 zoomLimits;
|
||||
uniform vec3 backgroundColor;
|
||||
uniform vec3 lineColor;
|
||||
uniform int gridType; // 0 = grid lines, 1 = dots
|
||||
|
||||
// Anti-aliased step: threshold in the same units as `value`
|
||||
float aaStep(float threshold, float value, float deriv) {
|
||||
@@ -79,51 +78,35 @@ void main(void) {
|
||||
float ux = (vUv.x - 0.5) * width + cx * cz;
|
||||
float uy = (vUv.y - 0.5) * height - cy * cz;
|
||||
|
||||
if(gridType == 0) {
|
||||
// extra small grid
|
||||
float m1 = grid(ux, uy, divisions * 4.0, thickness * 4.0) * 0.9;
|
||||
float m2 = grid(ux, uy, divisions * 16.0, thickness * 16.0) * 0.5;
|
||||
float xsmall = max(m1, m2);
|
||||
|
||||
float s3 = circle_grid(ux, uy, cz / 1.6, 1.0) * 0.5;
|
||||
xsmall = max(xsmall, s3);
|
||||
// extra small grid
|
||||
float m1 = grid(ux, uy, divisions * 4.0, thickness * 4.0) * 0.9;
|
||||
float m2 = grid(ux, uy, divisions * 16.0, thickness * 16.0) * 0.5;
|
||||
float xsmall = max(m1, m2);
|
||||
|
||||
float s3 = circle_grid(ux, uy, cz / 1.6, 1.0) * 0.5;
|
||||
xsmall = max(xsmall, s3);
|
||||
|
||||
// small grid
|
||||
float c1 = grid(ux, uy, divisions, thickness) * 0.6;
|
||||
float c2 = grid(ux, uy, divisions * 2.0, thickness * 2.0) * 0.5;
|
||||
float small = max(c1, c2);
|
||||
// small grid
|
||||
float c1 = grid(ux, uy, divisions, thickness) * 0.6;
|
||||
float c2 = grid(ux, uy, divisions * 2.0, thickness * 2.0) * 0.5;
|
||||
float small = max(c1, c2);
|
||||
|
||||
float s1 = circle_grid(ux, uy, cz * 10.0, 2.0) * 0.5;
|
||||
small = max(small, s1);
|
||||
float s1 = circle_grid(ux, uy, cz * 10.0, 2.0) * 0.5;
|
||||
small = max(small, s1);
|
||||
|
||||
// large grid
|
||||
float c3 = grid(ux, uy, divisions / 8.0, thickness / 8.0) * 0.5;
|
||||
float c4 = grid(ux, uy, divisions / 2.0, thickness / 4.0) * 0.4;
|
||||
float large = max(c3, c4);
|
||||
// large grid
|
||||
float c3 = grid(ux, uy, divisions / 8.0, thickness / 8.0) * 0.5;
|
||||
float c4 = grid(ux, uy, divisions / 2.0, thickness / 4.0) * 0.4;
|
||||
float large = max(c3, c4);
|
||||
|
||||
float s2 = circle_grid(ux, uy, cz * 20.0, 1.0) * 0.4;
|
||||
large = max(large, s2);
|
||||
float s2 = circle_grid(ux, uy, cz * 20.0, 1.0) * 0.4;
|
||||
large = max(large, s2);
|
||||
|
||||
float c = mix(large, small, min(nz * 2.0 + 0.05, 1.0));
|
||||
c = mix(c, xsmall, clamp((nz - 0.3) / 0.7, 0.0, 1.0));
|
||||
float c = mix(large, small, min(nz * 2.0 + 0.05, 1.0));
|
||||
c = mix(c, xsmall, clamp((nz - 0.3) / 0.7, 0.0, 1.0));
|
||||
|
||||
vec3 color = mix(backgroundColor, lineColor, c);
|
||||
|
||||
gl_FragColor = vec4(color, 1.0);
|
||||
} else {
|
||||
float large = circle_grid(ux, uy, cz * 20.0, 1.0) * 0.4;
|
||||
|
||||
float medium = circle_grid(ux, uy, cz * 10.0, 1.0) * 0.6;
|
||||
|
||||
float small = circle_grid(ux, uy, cz * 2.5, 1.0) * 0.8;
|
||||
|
||||
float c = mix(large, medium, min(nz * 2.0 + 0.05, 1.0));
|
||||
c = mix(c, small, clamp((nz - 0.3) / 0.7, 0.0, 1.0));
|
||||
|
||||
vec3 color = mix(backgroundColor, lineColor, c);
|
||||
|
||||
gl_FragColor = vec4(color, 1.0);
|
||||
}
|
||||
vec3 color = mix(backgroundColor, lineColor, c);
|
||||
|
||||
gl_FragColor = vec4(color, 1.0);
|
||||
}
|
||||
|
||||
|
||||
@@ -6,12 +6,11 @@
|
||||
import BackgroundVert from './Background.vert';
|
||||
|
||||
type Props = {
|
||||
minZoom?: number;
|
||||
maxZoom?: number;
|
||||
cameraPosition?: [number, number, number];
|
||||
width?: number;
|
||||
height?: number;
|
||||
type?: 'grid' | 'dots' | 'none';
|
||||
minZoom: number;
|
||||
maxZoom: number;
|
||||
cameraPosition: [number, number, number];
|
||||
width: number;
|
||||
height: number;
|
||||
};
|
||||
|
||||
let {
|
||||
@@ -19,18 +18,9 @@
|
||||
maxZoom = 150,
|
||||
cameraPosition = [0, 1, 0],
|
||||
width = globalThis?.innerWidth || 100,
|
||||
height = globalThis?.innerHeight || 100,
|
||||
type = 'grid'
|
||||
height = globalThis?.innerHeight || 100
|
||||
}: Props = $props();
|
||||
|
||||
const typeMap = new Map([
|
||||
['grid', 0],
|
||||
['dots', 1],
|
||||
['none', 2]
|
||||
]);
|
||||
|
||||
const gridType = $derived(typeMap.get(type) || 0);
|
||||
|
||||
let bw = $derived(width / cameraPosition[2]);
|
||||
let bh = $derived(height / cameraPosition[2]);
|
||||
</script>
|
||||
@@ -61,9 +51,6 @@
|
||||
},
|
||||
dimensions: {
|
||||
value: [100, 100]
|
||||
},
|
||||
gridType: {
|
||||
value: 0
|
||||
}
|
||||
}}
|
||||
uniforms.camPos.value={cameraPosition}
|
||||
@@ -72,7 +59,6 @@
|
||||
uniforms.lineColor.value={appSettings.value.theme && colors['outline']}
|
||||
uniforms.zoomLimits.value={[minZoom, maxZoom]}
|
||||
uniforms.dimensions.value={[width, height]}
|
||||
uniforms.gridType.value={gridType}
|
||||
/>
|
||||
</T.Mesh>
|
||||
</T.Group>
|
||||
|
||||
@@ -2,19 +2,19 @@
|
||||
import { colors } from '../graph/colors.svelte';
|
||||
|
||||
const circleMaterial = new MeshBasicMaterial({
|
||||
color: colors.outline.clone(),
|
||||
color: colors.edge.clone(),
|
||||
toneMapped: false
|
||||
});
|
||||
|
||||
let lineColor = $state(colors.outline.clone().convertSRGBToLinear());
|
||||
let lineColor = $state(colors.edge.clone().convertSRGBToLinear());
|
||||
|
||||
$effect.root(() => {
|
||||
$effect(() => {
|
||||
if (appSettings.value.theme === undefined) {
|
||||
return;
|
||||
}
|
||||
circleMaterial.color = colors.outline.clone().convertSRGBToLinear();
|
||||
lineColor = colors.outline.clone().convertSRGBToLinear();
|
||||
circleMaterial.color = colors.edge.clone().convertSRGBToLinear();
|
||||
lineColor = colors.edge.clone().convertSRGBToLinear();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -1,265 +0,0 @@
|
||||
import { describe, expect, it } from 'vitest';
|
||||
import { GraphManager } from './graph-manager.svelte';
|
||||
import {
|
||||
createMockNodeRegistry,
|
||||
mockFloatInputNode,
|
||||
mockFloatOutputNode,
|
||||
mockGeometryOutputNode,
|
||||
mockPathInputNode,
|
||||
mockVec3OutputNode
|
||||
} from './test-utils';
|
||||
|
||||
describe('GraphManager', () => {
|
||||
describe('getPossibleSockets', () => {
|
||||
describe('when dragging an output socket', () => {
|
||||
it('should return compatible input sockets based on type', () => {
|
||||
const registry = createMockNodeRegistry([
|
||||
mockFloatOutputNode,
|
||||
mockFloatInputNode,
|
||||
mockGeometryOutputNode,
|
||||
mockPathInputNode
|
||||
]);
|
||||
|
||||
const manager = new GraphManager(registry);
|
||||
|
||||
const floatInputNode = manager.createNode({
|
||||
type: 'test/node/input',
|
||||
position: [100, 100],
|
||||
props: {}
|
||||
});
|
||||
|
||||
const floatOutputNode = manager.createNode({
|
||||
type: 'test/node/output',
|
||||
position: [0, 0],
|
||||
props: {}
|
||||
});
|
||||
|
||||
expect(floatInputNode).toBeDefined();
|
||||
expect(floatOutputNode).toBeDefined();
|
||||
|
||||
const possibleSockets = manager.getPossibleSockets({
|
||||
node: floatOutputNode!,
|
||||
index: 0,
|
||||
position: [0, 0]
|
||||
});
|
||||
|
||||
expect(possibleSockets.length).toBe(1);
|
||||
const socketNodeIds = possibleSockets.map(([node]) => node.id);
|
||||
expect(socketNodeIds).toContain(floatInputNode!.id);
|
||||
});
|
||||
|
||||
it('should exclude self node from possible sockets', () => {
|
||||
const registry = createMockNodeRegistry([
|
||||
mockFloatOutputNode,
|
||||
mockFloatInputNode
|
||||
]);
|
||||
|
||||
const manager = new GraphManager(registry);
|
||||
|
||||
const floatInputNode = manager.createNode({
|
||||
type: 'test/node/input',
|
||||
position: [100, 100],
|
||||
props: {}
|
||||
});
|
||||
|
||||
expect(floatInputNode).toBeDefined();
|
||||
|
||||
const possibleSockets = manager.getPossibleSockets({
|
||||
node: floatInputNode!,
|
||||
index: 'value',
|
||||
position: [0, 0]
|
||||
});
|
||||
|
||||
const socketNodeIds = possibleSockets.map(([node]) => node.id);
|
||||
expect(socketNodeIds).not.toContain(floatInputNode!.id);
|
||||
});
|
||||
|
||||
it('should exclude parent nodes from possible sockets when dragging output', () => {
|
||||
const registry = createMockNodeRegistry([
|
||||
mockFloatOutputNode,
|
||||
mockFloatInputNode
|
||||
]);
|
||||
|
||||
const manager = new GraphManager(registry);
|
||||
|
||||
const parentNode = manager.createNode({
|
||||
type: 'test/node/output',
|
||||
position: [0, 0],
|
||||
props: {}
|
||||
});
|
||||
|
||||
const childNode = manager.createNode({
|
||||
type: 'test/node/input',
|
||||
position: [100, 100],
|
||||
props: {}
|
||||
});
|
||||
|
||||
expect(parentNode).toBeDefined();
|
||||
expect(childNode).toBeDefined();
|
||||
|
||||
if (parentNode && childNode) {
|
||||
manager.createEdge(parentNode, 0, childNode, 'value');
|
||||
}
|
||||
|
||||
const possibleSockets = manager.getPossibleSockets({
|
||||
node: parentNode!,
|
||||
index: 0,
|
||||
position: [0, 0]
|
||||
});
|
||||
|
||||
const socketNodeIds = possibleSockets.map(([node]) => node.id);
|
||||
expect(socketNodeIds).not.toContain(childNode!.id);
|
||||
});
|
||||
|
||||
it('should return sockets compatible with accepts property', () => {
|
||||
const registry = createMockNodeRegistry([
|
||||
mockGeometryOutputNode,
|
||||
mockPathInputNode
|
||||
]);
|
||||
|
||||
const manager = new GraphManager(registry);
|
||||
|
||||
const geometryOutputNode = manager.createNode({
|
||||
type: 'test/node/geometry',
|
||||
position: [0, 0],
|
||||
props: {}
|
||||
});
|
||||
|
||||
const pathInputNode = manager.createNode({
|
||||
type: 'test/node/path',
|
||||
position: [100, 100],
|
||||
props: {}
|
||||
});
|
||||
|
||||
expect(geometryOutputNode).toBeDefined();
|
||||
expect(pathInputNode).toBeDefined();
|
||||
|
||||
const possibleSockets = manager.getPossibleSockets({
|
||||
node: geometryOutputNode!,
|
||||
index: 0,
|
||||
position: [0, 0]
|
||||
});
|
||||
|
||||
const socketNodeIds = possibleSockets.map(([node]) => node.id);
|
||||
expect(socketNodeIds).toContain(pathInputNode!.id);
|
||||
});
|
||||
|
||||
it('should return empty array when no compatible sockets exist', () => {
|
||||
const registry = createMockNodeRegistry([
|
||||
mockVec3OutputNode,
|
||||
mockFloatInputNode
|
||||
]);
|
||||
|
||||
const manager = new GraphManager(registry);
|
||||
|
||||
const vec3OutputNode = manager.createNode({
|
||||
type: 'test/node/vec3',
|
||||
position: [0, 0],
|
||||
props: {}
|
||||
});
|
||||
|
||||
const floatInputNode = manager.createNode({
|
||||
type: 'test/node/input',
|
||||
position: [100, 100],
|
||||
props: {}
|
||||
});
|
||||
|
||||
expect(vec3OutputNode).toBeDefined();
|
||||
expect(floatInputNode).toBeDefined();
|
||||
|
||||
const possibleSockets = manager.getPossibleSockets({
|
||||
node: vec3OutputNode!,
|
||||
index: 0,
|
||||
position: [0, 0]
|
||||
});
|
||||
|
||||
const socketNodeIds = possibleSockets.map(([node]) => node.id);
|
||||
expect(socketNodeIds).not.toContain(floatInputNode!.id);
|
||||
expect(possibleSockets.length).toBe(0);
|
||||
});
|
||||
|
||||
it('should return socket info with correct socket key for inputs', () => {
|
||||
const registry = createMockNodeRegistry([
|
||||
mockFloatOutputNode,
|
||||
mockFloatInputNode
|
||||
]);
|
||||
|
||||
const manager = new GraphManager(registry);
|
||||
|
||||
const floatOutputNode = manager.createNode({
|
||||
type: 'test/node/output',
|
||||
position: [0, 0],
|
||||
props: {}
|
||||
});
|
||||
|
||||
const floatInputNode = manager.createNode({
|
||||
type: 'test/node/input',
|
||||
position: [100, 100],
|
||||
props: {}
|
||||
});
|
||||
|
||||
expect(floatOutputNode).toBeDefined();
|
||||
expect(floatInputNode).toBeDefined();
|
||||
|
||||
const possibleSockets = manager.getPossibleSockets({
|
||||
node: floatOutputNode!,
|
||||
index: 0,
|
||||
position: [0, 0]
|
||||
});
|
||||
|
||||
const matchingSocket = possibleSockets.find(([node]) => node.id === floatInputNode!.id);
|
||||
expect(matchingSocket).toBeDefined();
|
||||
expect(matchingSocket![1]).toBe('value');
|
||||
});
|
||||
|
||||
it('should return multiple compatible sockets', () => {
|
||||
const registry = createMockNodeRegistry([
|
||||
mockFloatOutputNode,
|
||||
mockFloatInputNode,
|
||||
mockGeometryOutputNode,
|
||||
mockPathInputNode
|
||||
]);
|
||||
|
||||
const manager = new GraphManager(registry);
|
||||
|
||||
const floatOutputNode = manager.createNode({
|
||||
type: 'test/node/output',
|
||||
position: [0, 0],
|
||||
props: {}
|
||||
});
|
||||
|
||||
const geometryOutputNode = manager.createNode({
|
||||
type: 'test/node/geometry',
|
||||
position: [200, 0],
|
||||
props: {}
|
||||
});
|
||||
|
||||
const floatInputNode = manager.createNode({
|
||||
type: 'test/node/input',
|
||||
position: [100, 100],
|
||||
props: {}
|
||||
});
|
||||
|
||||
const pathInputNode = manager.createNode({
|
||||
type: 'test/node/path',
|
||||
position: [300, 100],
|
||||
props: {}
|
||||
});
|
||||
|
||||
expect(floatOutputNode).toBeDefined();
|
||||
expect(geometryOutputNode).toBeDefined();
|
||||
expect(floatInputNode).toBeDefined();
|
||||
expect(pathInputNode).toBeDefined();
|
||||
|
||||
const possibleSocketsForFloat = manager.getPossibleSockets({
|
||||
node: floatOutputNode!,
|
||||
index: 0,
|
||||
position: [0, 0]
|
||||
});
|
||||
|
||||
expect(possibleSocketsForFloat.length).toBe(1);
|
||||
expect(possibleSocketsForFloat.map(([n]) => n.id)).toContain(floatInputNode!.id);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -25,14 +25,14 @@ const clone = 'structuredClone' in self
|
||||
? self.structuredClone
|
||||
: (args: unknown) => JSON.parse(JSON.stringify(args));
|
||||
|
||||
export function areSocketsCompatible(
|
||||
function areSocketsCompatible(
|
||||
output: string | undefined,
|
||||
inputs: string | (string | undefined)[] | undefined
|
||||
) {
|
||||
if (Array.isArray(inputs) && output) {
|
||||
return inputs.includes('*') || inputs.includes(output);
|
||||
return inputs.includes(output);
|
||||
}
|
||||
return inputs === output || inputs === '*';
|
||||
return inputs === output;
|
||||
}
|
||||
|
||||
function areEdgesEqual(firstEdge: Edge, secondEdge: Edge) {
|
||||
@@ -268,7 +268,14 @@ export class GraphManager extends EventEmitter<{
|
||||
private _init(graph: Graph) {
|
||||
const nodes = new SvelteMap(
|
||||
graph.nodes.map((node) => {
|
||||
return [node.id, node as NodeInstance];
|
||||
const nodeType = this.registry.getNode(node.type);
|
||||
const n = node as NodeInstance;
|
||||
if (nodeType) {
|
||||
n.state = {
|
||||
type: nodeType
|
||||
};
|
||||
}
|
||||
return [node.id, n];
|
||||
})
|
||||
);
|
||||
|
||||
@@ -293,30 +300,6 @@ export class GraphManager extends EventEmitter<{
|
||||
this.execute();
|
||||
}
|
||||
|
||||
private async loadAllCollections() {
|
||||
// Fetch all nodes from all collections of the loaded nodes
|
||||
const nodeIds = Array.from(new Set([...this.graph.nodes.map((n) => n.type)]));
|
||||
const allCollections = new Set<`${string}/${string}`>();
|
||||
for (const id of nodeIds) {
|
||||
const [user, collection] = id.split('/');
|
||||
allCollections.add(`${user}/${collection}`);
|
||||
}
|
||||
|
||||
const allCollectionIds = await Promise
|
||||
.all([...allCollections]
|
||||
.map(async (collection) =>
|
||||
remoteRegistry
|
||||
.fetchCollection(collection)
|
||||
.then((collection: { nodes: { id: NodeId }[] }) => {
|
||||
return collection.nodes.map(n => n.id.replace(/\.wasm$/, '') as NodeId);
|
||||
})
|
||||
));
|
||||
|
||||
const missingNodeIds = [...new Set(allCollectionIds.flat())];
|
||||
|
||||
this.registry.load(missingNodeIds);
|
||||
}
|
||||
|
||||
async load(graph: Graph) {
|
||||
const a = performance.now();
|
||||
|
||||
@@ -401,9 +384,7 @@ export class GraphManager extends EventEmitter<{
|
||||
|
||||
this.loaded = true;
|
||||
logger.log(`Graph loaded in ${performance.now() - a}ms`);
|
||||
|
||||
setTimeout(() => this.execute(), 100);
|
||||
this.loadAllCollections(); // lazily load all nodes from all collections
|
||||
}
|
||||
|
||||
getAllNodes() {
|
||||
@@ -510,10 +491,10 @@ export class GraphManager extends EventEmitter<{
|
||||
const inputs = Object.entries(to.state?.type?.inputs ?? {});
|
||||
const outputs = from.state?.type?.outputs ?? [];
|
||||
for (let i = 0; i < inputs.length; i++) {
|
||||
const [inputName, input] = inputs[i];
|
||||
const [inputName, input] = inputs[0];
|
||||
for (let o = 0; o < outputs.length; o++) {
|
||||
const output = outputs[o];
|
||||
if (input.type === output || input.type === '*') {
|
||||
const output = outputs[0];
|
||||
if (input.type === output) {
|
||||
return this.createEdge(from, o, to, inputName);
|
||||
}
|
||||
}
|
||||
@@ -615,14 +596,11 @@ export class GraphManager extends EventEmitter<{
|
||||
return;
|
||||
}
|
||||
|
||||
const fromType = from.state.type || this.registry.getNode(from.type);
|
||||
const toType = to.state.type || this.registry.getNode(to.type);
|
||||
|
||||
// check if socket types match
|
||||
const fromSocketType = fromType?.outputs?.[fromSocket];
|
||||
const toSocketType = [toType?.inputs?.[toSocket]?.type];
|
||||
if (toType?.inputs?.[toSocket]?.accepts) {
|
||||
toSocketType.push(...(toType?.inputs?.[toSocket]?.accepts || []));
|
||||
const fromSocketType = from.state?.type?.outputs?.[fromSocket];
|
||||
const toSocketType = [to.state?.type?.inputs?.[toSocket]?.type];
|
||||
if (to.state?.type?.inputs?.[toSocket]?.accepts) {
|
||||
toSocketType.push(...(to?.state?.type?.inputs?.[toSocket]?.accepts || []));
|
||||
}
|
||||
|
||||
if (!areSocketsCompatible(fromSocketType, toSocketType)) {
|
||||
@@ -745,9 +723,8 @@ export class GraphManager extends EventEmitter<{
|
||||
}
|
||||
|
||||
getPossibleSockets({ node, index }: Socket): [NodeInstance, string | number][] {
|
||||
const nodeType = this.registry.getNode(node.type);
|
||||
const nodeType = node?.state?.type;
|
||||
if (!nodeType) return [];
|
||||
console.log({ index });
|
||||
|
||||
const sockets: [NodeInstance, string | number][] = [];
|
||||
|
||||
@@ -762,7 +739,7 @@ export class GraphManager extends EventEmitter<{
|
||||
const ownType = nodeType?.inputs?.[index].type;
|
||||
|
||||
for (const node of nodes) {
|
||||
const nodeType = this.registry.getNode(node.type);
|
||||
const nodeType = node?.state?.type;
|
||||
const inputs = nodeType?.outputs;
|
||||
if (!inputs) continue;
|
||||
for (let index = 0; index < inputs.length; index++) {
|
||||
@@ -780,21 +757,17 @@ export class GraphManager extends EventEmitter<{
|
||||
(n) => n.id !== node.id && !parents.has(n.id)
|
||||
);
|
||||
|
||||
const edges = new SvelteMap<number, string[]>();
|
||||
this.getEdgesFromNode(node)
|
||||
.filter((e) => e[1] === index)
|
||||
.forEach((e) => {
|
||||
if (edges.has(e[2].id)) {
|
||||
edges.get(e[2].id)?.push(e[3]);
|
||||
} else {
|
||||
edges.set(e[2].id, [e[3]]);
|
||||
}
|
||||
});
|
||||
// get edges from this socket
|
||||
const edges = new SvelteMap(
|
||||
this.getEdgesFromNode(node)
|
||||
.filter((e) => e[1] === index)
|
||||
.map((e) => [e[2].id, e[3]])
|
||||
);
|
||||
|
||||
const ownType = nodeType.outputs?.[index];
|
||||
|
||||
for (const node of nodes) {
|
||||
const inputs = this.registry.getNode(node.type)?.inputs;
|
||||
const inputs = node?.state?.type?.inputs;
|
||||
if (!inputs) continue;
|
||||
for (const key in inputs) {
|
||||
const otherType = [inputs[key].type];
|
||||
@@ -802,7 +775,7 @@ export class GraphManager extends EventEmitter<{
|
||||
|
||||
if (
|
||||
areSocketsCompatible(ownType, otherType)
|
||||
&& !edges.get(node.id)?.includes(key)
|
||||
&& edges.get(node.id) !== key
|
||||
) {
|
||||
sockets.push([node, key]);
|
||||
}
|
||||
@@ -810,7 +783,6 @@ export class GraphManager extends EventEmitter<{
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Found ${sockets.length} possible sockets`, sockets);
|
||||
return sockets;
|
||||
}
|
||||
|
||||
|
||||
@@ -83,7 +83,7 @@ export class GraphState {
|
||||
addMenuPosition = $state<[number, number] | null>(null);
|
||||
|
||||
snapToGrid = $state(false);
|
||||
backgroundType = $state<'grid' | 'dots' | 'none'>('grid');
|
||||
showGrid = $state(true);
|
||||
showHelp = $state(false);
|
||||
|
||||
cameraDown = [0, 0];
|
||||
@@ -169,14 +169,11 @@ export class GraphState {
|
||||
(node?.state?.y ?? node.position[1]) + 2.5 + 10 * index
|
||||
];
|
||||
} else {
|
||||
const inputs = node.state.type?.inputs || this.graph.registry.getNode(node.type)?.inputs
|
||||
|| {};
|
||||
const _index = Object.keys(inputs).indexOf(index);
|
||||
const pos = [
|
||||
const _index = Object.keys(node.state?.type?.inputs || {}).indexOf(index);
|
||||
return [
|
||||
node?.state?.x ?? node.position[0],
|
||||
(node?.state?.y ?? node.position[1]) + 10 + 10 * _index
|
||||
] as [number, number];
|
||||
return pos;
|
||||
];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -189,25 +186,15 @@ export class GraphState {
|
||||
if (!node?.inputs) {
|
||||
return 5;
|
||||
}
|
||||
let height = 5;
|
||||
|
||||
for (const key of Object.keys(node.inputs)) {
|
||||
if (key === 'seed') continue;
|
||||
if (!node.inputs) continue;
|
||||
if (node?.inputs?.[key] === undefined) continue;
|
||||
if ('setting' in node.inputs[key]) continue;
|
||||
if (node.inputs[key].hidden) continue;
|
||||
if (
|
||||
node.inputs[key].type === 'shape'
|
||||
&& node.inputs[key].external !== true
|
||||
&& node.inputs[key].internal !== false
|
||||
) {
|
||||
height += 20;
|
||||
continue;
|
||||
}
|
||||
height += 10;
|
||||
}
|
||||
|
||||
const height = 5
|
||||
+ 10
|
||||
* Object.keys(node.inputs).filter(
|
||||
(p) =>
|
||||
p !== 'seed'
|
||||
&& node?.inputs
|
||||
&& !(node?.inputs?.[p] !== undefined && 'setting' in node.inputs[p])
|
||||
&& node.inputs[p].hidden !== true
|
||||
).length;
|
||||
this.nodeHeightCache[nodeTypeId] = height;
|
||||
return height;
|
||||
}
|
||||
@@ -262,7 +249,7 @@ export class GraphState {
|
||||
|
||||
let { node, index, position } = socket;
|
||||
|
||||
// if the socket is an input socket -> remove existing edges
|
||||
// remove existing edge
|
||||
if (typeof index === 'string') {
|
||||
const edges = this.graph.getEdgesToNode(node);
|
||||
for (const edge of edges) {
|
||||
|
||||
@@ -132,9 +132,8 @@
|
||||
position={graphState.cameraPosition}
|
||||
/>
|
||||
|
||||
{#if graphState.backgroundType !== 'none'}
|
||||
{#if graphState.showGrid !== false}
|
||||
<Background
|
||||
type={graphState.backgroundType}
|
||||
cameraPosition={graphState.cameraPosition}
|
||||
{maxZoom}
|
||||
{minZoom}
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
settings?: Record<string, unknown>;
|
||||
|
||||
activeNode?: NodeInstance;
|
||||
backgroundType?: 'grid' | 'dots' | 'none';
|
||||
showGrid?: boolean;
|
||||
snapToGrid?: boolean;
|
||||
showHelp?: boolean;
|
||||
settingTypes?: Record<string, unknown>;
|
||||
@@ -25,11 +25,11 @@
|
||||
let {
|
||||
graph,
|
||||
registry,
|
||||
settings = $bindable(),
|
||||
activeNode = $bindable(),
|
||||
backgroundType = $bindable('grid'),
|
||||
showGrid = $bindable(true),
|
||||
snapToGrid = $bindable(true),
|
||||
showHelp = $bindable(false),
|
||||
settings = $bindable(),
|
||||
settingTypes = $bindable(),
|
||||
onsave,
|
||||
onresult
|
||||
@@ -43,7 +43,7 @@
|
||||
|
||||
const graphState = new GraphState(manager);
|
||||
$effect(() => {
|
||||
graphState.backgroundType = backgroundType;
|
||||
graphState.showGrid = showGrid;
|
||||
graphState.snapToGrid = snapToGrid;
|
||||
graphState.showHelp = showHelp;
|
||||
});
|
||||
|
||||
@@ -9,7 +9,7 @@ const variables = [
|
||||
'outline',
|
||||
'active',
|
||||
'selected',
|
||||
'connection'
|
||||
'edge'
|
||||
] as const;
|
||||
|
||||
function getColor(variable: (typeof variables)[number]) {
|
||||
|
||||
@@ -166,14 +166,15 @@ export class MouseEventManager {
|
||||
|
||||
if (this.state.mouseDown) return;
|
||||
this.state.edgeEndPosition = null;
|
||||
const target = event.target as HTMLElement;
|
||||
|
||||
if (
|
||||
target.nodeName !== 'CANVAS'
|
||||
&& !target.classList.contains('node')
|
||||
&& !target.classList.contains('content')
|
||||
) {
|
||||
return;
|
||||
if (event.target instanceof HTMLElement) {
|
||||
if (
|
||||
event.target.nodeName !== 'CANVAS'
|
||||
&& !event.target.classList.contains('node')
|
||||
&& !event.target.classList.contains('content')
|
||||
) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const mx = event.clientX - this.state.rect.x;
|
||||
@@ -264,7 +265,7 @@ export class MouseEventManager {
|
||||
}
|
||||
}
|
||||
|
||||
if (_socket && smallestDist < 1.5) {
|
||||
if (_socket && smallestDist < 0.9) {
|
||||
this.state.mousePosition = _socket.position;
|
||||
this.state.hoveredSocket = _socket;
|
||||
} else {
|
||||
|
||||
@@ -57,7 +57,7 @@
|
||||
uniforms={{
|
||||
uColorBright: { value: colors['layer-2'] },
|
||||
uColorDark: { value: colors['layer-1'] },
|
||||
uStrokeColor: { value: colors['layer-2'].clone() },
|
||||
uStrokeColor: { value: colors.outline.clone() },
|
||||
uStrokeWidth: { value: 1.0 },
|
||||
uWidth: { value: 20 },
|
||||
uHeight: { value: height }
|
||||
|
||||
@@ -35,8 +35,8 @@
|
||||
);
|
||||
const pathHover = $derived(
|
||||
createNodePath({
|
||||
depth: 7,
|
||||
height: 40,
|
||||
depth: 8.5,
|
||||
height: 50,
|
||||
y: 49,
|
||||
cornerTop,
|
||||
rightBump,
|
||||
@@ -87,6 +87,8 @@
|
||||
width: 30px;
|
||||
z-index: 100;
|
||||
border-radius: 50%;
|
||||
/* background: red; */
|
||||
/* opacity: 0.2; */
|
||||
}
|
||||
|
||||
.click-target:hover + svg path {
|
||||
@@ -106,16 +108,11 @@
|
||||
|
||||
svg path {
|
||||
stroke-width: 0.2px;
|
||||
transition:
|
||||
d 0.3s ease,
|
||||
fill 0.3s ease;
|
||||
transition: d 0.3s ease, fill 0.3s ease;
|
||||
fill: var(--color-layer-2);
|
||||
stroke: var(--stroke);
|
||||
stroke-width: var(--stroke-width);
|
||||
d: var(--path);
|
||||
|
||||
stroke-linejoin: round;
|
||||
shape-rendering: geometricPrecision;
|
||||
}
|
||||
|
||||
.content {
|
||||
|
||||
@@ -31,24 +31,11 @@
|
||||
return 0;
|
||||
}
|
||||
|
||||
let value = $state(structuredClone($state.snapshot(getDefaultValue())));
|
||||
|
||||
function diffArray(a: number[], b?: number[] | number) {
|
||||
if (!Array.isArray(b)) return true;
|
||||
if (Array.isArray(a) !== Array.isArray(b)) return true;
|
||||
if (a.length !== b.length) return true;
|
||||
for (let i = 0; i < a.length; i++) {
|
||||
if (a[i] !== b[i]) return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
let value = $state(getDefaultValue());
|
||||
|
||||
$effect(() => {
|
||||
const a = $state.snapshot(value);
|
||||
const b = $state.snapshot(node?.props?.[id]);
|
||||
const isDiff = Array.isArray(a) ? diffArray(a, b) : a !== b;
|
||||
if (value !== undefined && isDiff) {
|
||||
node.props = { ...node.props, [id]: a };
|
||||
if (value !== undefined && node?.props?.[id] !== value) {
|
||||
node.props = { ...node.props, [id]: value };
|
||||
if (graph) {
|
||||
graph.save();
|
||||
graph.execute();
|
||||
|
||||
@@ -18,8 +18,6 @@
|
||||
const inputType = $derived(node?.state?.type?.inputs?.[id]);
|
||||
|
||||
const socketId = $derived(`${node.id}-${id}`);
|
||||
const isShape = $derived(input.type === 'shape' && input.external !== true);
|
||||
const height = $derived(isShape ? 200 : 100);
|
||||
|
||||
const graphState = getGraphState();
|
||||
const graphId = graph?.id;
|
||||
@@ -41,6 +39,16 @@
|
||||
const aspectRatio = 0.5;
|
||||
|
||||
const path = $derived(
|
||||
createNodePath({
|
||||
depth: 7,
|
||||
height: 20,
|
||||
y: 50.5,
|
||||
cornerBottom,
|
||||
leftBump,
|
||||
aspectRatio
|
||||
})
|
||||
);
|
||||
const pathDisabled = $derived(
|
||||
createNodePath({
|
||||
depth: 6,
|
||||
height: 18,
|
||||
@@ -52,8 +60,8 @@
|
||||
);
|
||||
const pathHover = $derived(
|
||||
createNodePath({
|
||||
depth: 7,
|
||||
height: 20,
|
||||
depth: 8,
|
||||
height: 25,
|
||||
y: 50.5,
|
||||
cornerBottom,
|
||||
leftBump,
|
||||
@@ -66,8 +74,7 @@
|
||||
class="wrapper"
|
||||
data-node-type={node.type}
|
||||
data-node-input={id}
|
||||
style:height="{height}px"
|
||||
class:possible-socket={graphState?.possibleSocketIds.has(socketId)}
|
||||
class:disabled={!graphState?.possibleSocketIds.has(socketId)}
|
||||
>
|
||||
{#key id && graphId}
|
||||
<div class="content" class:disabled={graph?.inputSockets?.has(socketId)}>
|
||||
@@ -84,9 +91,10 @@
|
||||
</div>
|
||||
|
||||
{#if node?.state?.type?.inputs?.[id]?.internal !== true}
|
||||
<div data-node-socket class="large target"></div>
|
||||
<div
|
||||
data-node-socket
|
||||
class="target"
|
||||
class="small target"
|
||||
onmousedown={handleMouseDown}
|
||||
role="button"
|
||||
tabindex="0"
|
||||
@@ -98,10 +106,13 @@
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 100 100"
|
||||
width="100"
|
||||
height="100"
|
||||
preserveAspectRatio="none"
|
||||
style={`
|
||||
--path: path("${path}");
|
||||
--hover-path: path("${pathHover}");
|
||||
--hover-path-disabled: path("${pathDisabled}");
|
||||
`}
|
||||
>
|
||||
<path vector-effect="non-scaling-stroke"></path>
|
||||
@@ -112,26 +123,33 @@
|
||||
.wrapper {
|
||||
position: relative;
|
||||
width: 100%;
|
||||
height: 100px;
|
||||
transform: translateY(-0.5px);
|
||||
}
|
||||
|
||||
.target {
|
||||
width: 30px;
|
||||
height: 30px;
|
||||
position: absolute;
|
||||
border-radius: 50%;
|
||||
top: 50%;
|
||||
transform: translateY(-50%) translateX(-50%);
|
||||
/* background: red; */
|
||||
/* opacity: 0.1; */
|
||||
}
|
||||
|
||||
.possible-socket .target {
|
||||
box-shadow: 0px 0px 10px rgba(255, 255, 255, 0.5);
|
||||
background-color: rgba(255, 255, 255, 0.2);
|
||||
z-index: -10;
|
||||
.small.target {
|
||||
width: 30px;
|
||||
height: 30px;
|
||||
}
|
||||
|
||||
.target:hover ~ svg path{
|
||||
d: var(--hover-path);
|
||||
.large.target {
|
||||
width: 60px;
|
||||
height: 60px;
|
||||
cursor: unset;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
:global(.hovering-sockets) .large.target {
|
||||
pointer-events: all;
|
||||
}
|
||||
|
||||
.content {
|
||||
@@ -161,16 +179,19 @@
|
||||
stroke: var(--stroke);
|
||||
stroke-width: var(--stroke-width);
|
||||
d: var(--path);
|
||||
}
|
||||
|
||||
stroke-linejoin: round;
|
||||
shape-rendering: geometricPrecision;
|
||||
:global {
|
||||
.hovering-sockets .large:hover ~ svg path {
|
||||
d: var(--hover-path);
|
||||
}
|
||||
}
|
||||
|
||||
.content.disabled {
|
||||
opacity: 0.2;
|
||||
}
|
||||
|
||||
.possible-socket svg path {
|
||||
d: var(--hover-path);
|
||||
.disabled svg path {
|
||||
d: var(--hover-path-disabled) !important;
|
||||
}
|
||||
</style>
|
||||
|
||||
@@ -1,86 +0,0 @@
|
||||
import type { NodeDefinition, NodeId, NodeRegistry } from '@nodarium/types';
|
||||
|
||||
export function createMockNodeRegistry(nodes: NodeDefinition[]): NodeRegistry {
|
||||
const nodesMap = new Map(nodes.map(n => [n.id, n]));
|
||||
return {
|
||||
status: 'ready' as const,
|
||||
load: async (nodeIds: NodeId[]) => {
|
||||
const loaded: NodeDefinition[] = [];
|
||||
for (const id of nodeIds) {
|
||||
if (nodesMap.has(id)) {
|
||||
loaded.push(nodesMap.get(id)!);
|
||||
}
|
||||
}
|
||||
return loaded;
|
||||
},
|
||||
getNode: (id: string) => nodesMap.get(id as NodeId),
|
||||
getAllNodes: () => Array.from(nodesMap.values()),
|
||||
register: async () => {
|
||||
throw new Error('Not implemented in mock');
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export const mockFloatOutputNode: NodeDefinition = {
|
||||
id: 'test/node/output',
|
||||
inputs: {},
|
||||
outputs: ['float'],
|
||||
meta: { title: 'Float Output' },
|
||||
execute: () => new Int32Array()
|
||||
};
|
||||
|
||||
export const mockFloatInputNode: NodeDefinition = {
|
||||
id: 'test/node/input',
|
||||
inputs: { value: { type: 'float' } },
|
||||
outputs: [],
|
||||
meta: { title: 'Float Input' },
|
||||
execute: () => new Int32Array()
|
||||
};
|
||||
|
||||
export const mockGeometryOutputNode: NodeDefinition = {
|
||||
id: 'test/node/geometry',
|
||||
inputs: {},
|
||||
outputs: ['geometry'],
|
||||
meta: { title: 'Geometry Output' },
|
||||
execute: () => new Int32Array()
|
||||
};
|
||||
|
||||
export const mockPathInputNode: NodeDefinition = {
|
||||
id: 'test/node/path',
|
||||
inputs: { input: { type: 'path', accepts: ['geometry'] } },
|
||||
outputs: [],
|
||||
meta: { title: 'Path Input' },
|
||||
execute: () => new Int32Array()
|
||||
};
|
||||
|
||||
export const mockVec3OutputNode: NodeDefinition = {
|
||||
id: 'test/node/vec3',
|
||||
inputs: {},
|
||||
outputs: ['vec3'],
|
||||
meta: { title: 'Vec3 Output' },
|
||||
execute: () => new Int32Array()
|
||||
};
|
||||
|
||||
export const mockIntegerInputNode: NodeDefinition = {
|
||||
id: 'test/node/integer',
|
||||
inputs: { value: { type: 'integer' } },
|
||||
outputs: [],
|
||||
meta: { title: 'Integer Input' },
|
||||
execute: () => new Int32Array()
|
||||
};
|
||||
|
||||
export const mockBooleanOutputNode: NodeDefinition = {
|
||||
id: 'test/node/boolean',
|
||||
inputs: {},
|
||||
outputs: ['boolean'],
|
||||
meta: { title: 'Boolean Output' },
|
||||
execute: () => new Int32Array()
|
||||
};
|
||||
|
||||
export const mockBooleanInputNode: NodeDefinition = {
|
||||
id: 'test/node/boolean-input',
|
||||
inputs: { value: { type: 'boolean' } },
|
||||
outputs: [],
|
||||
meta: { title: 'Boolean Input' },
|
||||
execute: () => new Int32Array()
|
||||
};
|
||||
@@ -1,110 +0,0 @@
|
||||
import { grid } from '$lib/graph-templates/grid';
|
||||
import { tree } from '$lib/graph-templates/tree';
|
||||
import { describe, expect, it } from 'vitest';
|
||||
|
||||
describe('graph-templates', () => {
|
||||
describe('grid', () => {
|
||||
it('should create a grid graph with nodes and edges', () => {
|
||||
const result = grid(2, 3);
|
||||
expect(result.nodes.length).toBeGreaterThan(0);
|
||||
expect(result.edges.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it('should have output node at the end', () => {
|
||||
const result = grid(1, 1);
|
||||
const outputNode = result.nodes.find(n => n.type === 'max/plantarium/output');
|
||||
expect(outputNode).toBeDefined();
|
||||
});
|
||||
|
||||
it('should create nodes based on grid dimensions', () => {
|
||||
const result = grid(2, 2);
|
||||
const mathNodes = result.nodes.filter(n => n.type === 'max/plantarium/math');
|
||||
expect(mathNodes.length).toBeGreaterThan(0);
|
||||
const outputNode = result.nodes.find(n => n.type === 'max/plantarium/output');
|
||||
expect(outputNode).toBeDefined();
|
||||
});
|
||||
|
||||
it('should have output node at the end', () => {
|
||||
const result = grid(1, 1);
|
||||
const outputNode = result.nodes.find(n => n.type === 'max/plantarium/output');
|
||||
expect(outputNode).toBeDefined();
|
||||
});
|
||||
|
||||
it('should create nodes based on grid dimensions', () => {
|
||||
const result = grid(2, 2);
|
||||
const mathNodes = result.nodes.filter(n => n.type === 'max/plantarium/math');
|
||||
expect(mathNodes.length).toBeGreaterThan(0);
|
||||
const outputNode = result.nodes.find(n => n.type === 'max/plantarium/output');
|
||||
expect(outputNode).toBeDefined();
|
||||
});
|
||||
|
||||
it('should have valid node positions', () => {
|
||||
const result = grid(3, 2);
|
||||
|
||||
result.nodes.forEach(node => {
|
||||
expect(node.position).toHaveLength(2);
|
||||
expect(typeof node.position[0]).toBe('number');
|
||||
expect(typeof node.position[1]).toBe('number');
|
||||
});
|
||||
});
|
||||
|
||||
it('should generate valid graph structure', () => {
|
||||
const result = grid(2, 2);
|
||||
|
||||
result.nodes.forEach(node => {
|
||||
expect(typeof node.id).toBe('number');
|
||||
expect(node.type).toBeTruthy();
|
||||
});
|
||||
|
||||
result.edges.forEach(edge => {
|
||||
expect(edge).toHaveLength(4);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('tree', () => {
|
||||
it('should create a tree graph with specified depth', () => {
|
||||
const result = tree(0);
|
||||
|
||||
expect(result.nodes.length).toBeGreaterThan(0);
|
||||
expect(result.edges.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it('should have root output node', () => {
|
||||
const result = tree(2);
|
||||
|
||||
const outputNode = result.nodes.find(n => n.type === 'max/plantarium/output');
|
||||
expect(outputNode).toBeDefined();
|
||||
expect(outputNode?.id).toBe(0);
|
||||
});
|
||||
|
||||
it('should increase node count with depth', () => {
|
||||
const tree0 = tree(0);
|
||||
const tree1 = tree(1);
|
||||
const tree2 = tree(2);
|
||||
|
||||
expect(tree0.nodes.length).toBeLessThan(tree1.nodes.length);
|
||||
expect(tree1.nodes.length).toBeLessThan(tree2.nodes.length);
|
||||
});
|
||||
|
||||
it('should create binary tree structure', () => {
|
||||
const result = tree(2);
|
||||
|
||||
const mathNodes = result.nodes.filter(n => n.type === 'max/plantarium/math');
|
||||
expect(mathNodes.length).toBeGreaterThan(0);
|
||||
|
||||
const edgeCount = result.edges.length;
|
||||
expect(edgeCount).toBe(result.nodes.length - 1);
|
||||
});
|
||||
|
||||
it('should have valid node positions', () => {
|
||||
const result = tree(3);
|
||||
|
||||
result.nodes.forEach(node => {
|
||||
expect(node.position).toHaveLength(2);
|
||||
expect(typeof node.position[0]).toBe('number');
|
||||
expect(typeof node.position[1]).toBe('number');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -4,5 +4,4 @@ export { default as lottaFaces } from './lotta-faces.json';
|
||||
export { default as lottaNodesAndFaces } from './lotta-nodes-and-faces.json';
|
||||
export { default as lottaNodes } from './lotta-nodes.json';
|
||||
export { plant } from './plant';
|
||||
export { default as simple } from './simple.json';
|
||||
export { tree } from './tree';
|
||||
|
||||
@@ -1,63 +0,0 @@
|
||||
{
|
||||
"id": 0,
|
||||
"settings": {
|
||||
"resolution.circle": 54,
|
||||
"resolution.curve": 20,
|
||||
"randomSeed": true
|
||||
},
|
||||
"meta": {
|
||||
"title": "New Project",
|
||||
"lastModified": "2026-02-03T16:56:40.375Z"
|
||||
},
|
||||
"nodes": [
|
||||
{
|
||||
"id": 9,
|
||||
"position": [
|
||||
215,
|
||||
85
|
||||
],
|
||||
"type": "max/plantarium/output",
|
||||
"props": {}
|
||||
},
|
||||
{
|
||||
"id": 10,
|
||||
"position": [
|
||||
165,
|
||||
72.5
|
||||
],
|
||||
"type": "max/plantarium/stem",
|
||||
"props": {
|
||||
"amount": 50,
|
||||
"length": 4,
|
||||
"thickness": 1
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": 11,
|
||||
"position": [
|
||||
190,
|
||||
77.5
|
||||
],
|
||||
"type": "max/plantarium/noise",
|
||||
"props": {
|
||||
"plant": 0,
|
||||
"scale": 0.5,
|
||||
"strength": 5
|
||||
}
|
||||
}
|
||||
],
|
||||
"edges": [
|
||||
[
|
||||
10,
|
||||
0,
|
||||
11,
|
||||
"plant"
|
||||
],
|
||||
[
|
||||
11,
|
||||
0,
|
||||
9,
|
||||
"input"
|
||||
]
|
||||
]
|
||||
}
|
||||
@@ -1,145 +0,0 @@
|
||||
import { clone, debounce, humanizeDuration, humanizeNumber, lerp, snapToGrid } from '$lib/helpers';
|
||||
import { describe, expect, it } from 'vitest';
|
||||
|
||||
describe('helpers', () => {
|
||||
describe('snapToGrid', () => {
|
||||
it('should snap to nearest grid point', () => {
|
||||
expect(snapToGrid(5, 10)).toBe(10);
|
||||
expect(snapToGrid(15, 10)).toBe(20);
|
||||
expect(snapToGrid(0, 10)).toBe(0);
|
||||
expect(snapToGrid(-10, 10)).toBe(-10);
|
||||
});
|
||||
|
||||
it('should snap exact midpoint values', () => {
|
||||
expect(snapToGrid(5, 10)).toBe(10);
|
||||
});
|
||||
|
||||
it('should use default grid size of 10', () => {
|
||||
expect(snapToGrid(5)).toBe(10);
|
||||
expect(snapToGrid(15)).toBe(20);
|
||||
});
|
||||
|
||||
it('should handle values exactly on grid', () => {
|
||||
expect(snapToGrid(10, 10)).toBe(10);
|
||||
expect(snapToGrid(20, 10)).toBe(20);
|
||||
});
|
||||
});
|
||||
|
||||
describe('lerp', () => {
|
||||
it('should linearly interpolate between two values', () => {
|
||||
expect(lerp(0, 100, 0)).toBe(0);
|
||||
expect(lerp(0, 100, 0.5)).toBe(50);
|
||||
expect(lerp(0, 100, 1)).toBe(100);
|
||||
});
|
||||
|
||||
it('should handle negative values', () => {
|
||||
expect(lerp(-50, 50, 0.5)).toBe(0);
|
||||
expect(lerp(-100, 0, 0.5)).toBe(-50);
|
||||
});
|
||||
|
||||
it('should handle t values outside 0-1 range', () => {
|
||||
expect(lerp(0, 100, -0.5)).toBe(-50);
|
||||
expect(lerp(0, 100, 1.5)).toBe(150);
|
||||
});
|
||||
});
|
||||
|
||||
describe('humanizeNumber', () => {
|
||||
it('should return unchanged numbers below 1000', () => {
|
||||
expect(humanizeNumber(0)).toBe('0');
|
||||
expect(humanizeNumber(999)).toBe('999');
|
||||
});
|
||||
|
||||
it('should add K suffix for thousands', () => {
|
||||
expect(humanizeNumber(1000)).toBe('1K');
|
||||
expect(humanizeNumber(1500)).toBe('1.5K');
|
||||
expect(humanizeNumber(999999)).toBe('1000K');
|
||||
});
|
||||
|
||||
it('should add M suffix for millions', () => {
|
||||
expect(humanizeNumber(1000000)).toBe('1M');
|
||||
expect(humanizeNumber(2500000)).toBe('2.5M');
|
||||
});
|
||||
|
||||
it('should add B suffix for billions', () => {
|
||||
expect(humanizeNumber(1000000000)).toBe('1B');
|
||||
});
|
||||
});
|
||||
|
||||
describe('humanizeDuration', () => {
|
||||
it('should return ms for very short durations', () => {
|
||||
expect(humanizeDuration(100)).toBe('100ms');
|
||||
expect(humanizeDuration(999)).toBe('999ms');
|
||||
});
|
||||
|
||||
it('should format seconds', () => {
|
||||
expect(humanizeDuration(1000)).toBe('1s');
|
||||
expect(humanizeDuration(1500)).toBe('1s500ms');
|
||||
expect(humanizeDuration(59000)).toBe('59s');
|
||||
});
|
||||
|
||||
it('should format minutes', () => {
|
||||
expect(humanizeDuration(60000)).toBe('1m');
|
||||
expect(humanizeDuration(90000)).toBe('1m 30s');
|
||||
});
|
||||
|
||||
it('should format hours', () => {
|
||||
expect(humanizeDuration(3600000)).toBe('1h');
|
||||
expect(humanizeDuration(3661000)).toBe('1h 1m 1s');
|
||||
});
|
||||
|
||||
it('should format days', () => {
|
||||
expect(humanizeDuration(86400000)).toBe('1d');
|
||||
expect(humanizeDuration(90061000)).toBe('1d 1h 1m 1s');
|
||||
});
|
||||
|
||||
it('should handle zero', () => {
|
||||
expect(humanizeDuration(0)).toBe('0ms');
|
||||
});
|
||||
});
|
||||
|
||||
describe('debounce', () => {
|
||||
it('should return a function', () => {
|
||||
const fn = debounce(() => {}, 100);
|
||||
expect(typeof fn).toBe('function');
|
||||
});
|
||||
|
||||
it('should only call once when invoked multiple times within delay', () => {
|
||||
let callCount = 0;
|
||||
const fn = debounce(() => {
|
||||
callCount++;
|
||||
}, 100);
|
||||
fn();
|
||||
const firstCall = callCount;
|
||||
fn();
|
||||
fn();
|
||||
expect(callCount).toBe(firstCall);
|
||||
});
|
||||
});
|
||||
|
||||
describe('clone', () => {
|
||||
it('should deep clone objects', () => {
|
||||
const original = { a: 1, b: { c: 2 } };
|
||||
const cloned = clone(original);
|
||||
|
||||
expect(cloned).toEqual(original);
|
||||
expect(cloned).not.toBe(original);
|
||||
expect(cloned.b).not.toBe(original.b);
|
||||
});
|
||||
|
||||
it('should handle arrays', () => {
|
||||
const original = [1, 2, [3, 4]];
|
||||
const cloned = clone(original);
|
||||
|
||||
expect(cloned).toEqual(original);
|
||||
expect(cloned).not.toBe(original);
|
||||
expect(cloned[2]).not.toBe(original[2]);
|
||||
});
|
||||
|
||||
it('should handle primitives', () => {
|
||||
expect(clone(42)).toBe(42);
|
||||
expect(clone('hello')).toBe('hello');
|
||||
expect(clone(true)).toBe(true);
|
||||
expect(clone(null)).toBe(null);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,72 +0,0 @@
|
||||
import { isObject, mergeDeep } from '$lib/helpers/deepMerge';
|
||||
import { describe, expect, it } from 'vitest';
|
||||
|
||||
describe('deepMerge', () => {
|
||||
describe('isObject', () => {
|
||||
it('should return true for plain objects', () => {
|
||||
expect(isObject({})).toBe(true);
|
||||
expect(isObject({ a: 1 })).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false for non-objects', () => {
|
||||
expect(isObject([])).toBe(false);
|
||||
expect(isObject('string')).toBe(false);
|
||||
expect(isObject(42)).toBe(false);
|
||||
expect(isObject(undefined)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('mergeDeep', () => {
|
||||
it('should merge two flat objects', () => {
|
||||
const target = { a: 1, b: 2 };
|
||||
const source = { b: 3, c: 4 };
|
||||
const result = mergeDeep(target, source);
|
||||
|
||||
expect(result).toEqual({ a: 1, b: 3, c: 4 });
|
||||
});
|
||||
|
||||
it('should deeply merge nested objects', () => {
|
||||
const target = { a: { x: 1 }, b: { y: 2 } };
|
||||
const source = { a: { y: 2 }, c: { z: 3 } };
|
||||
const result = mergeDeep(target, source);
|
||||
|
||||
expect(result).toEqual({
|
||||
a: { x: 1, y: 2 },
|
||||
b: { y: 2 },
|
||||
c: { z: 3 }
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle multiple sources', () => {
|
||||
const target = { a: 1 };
|
||||
const source1 = { b: 2 };
|
||||
const source2 = { c: 3 };
|
||||
const result = mergeDeep(target, source1, source2);
|
||||
|
||||
expect(result).toEqual({ a: 1, b: 2, c: 3 });
|
||||
});
|
||||
|
||||
it('should return target if no sources provided', () => {
|
||||
const target = { a: 1 };
|
||||
const result = mergeDeep(target);
|
||||
|
||||
expect(result).toBe(target);
|
||||
});
|
||||
|
||||
it('should overwrite non-object values', () => {
|
||||
const target = { a: { b: 1 } };
|
||||
const source = { a: 'string' };
|
||||
const result = mergeDeep(target, source);
|
||||
|
||||
expect(result.a).toBe('string');
|
||||
});
|
||||
|
||||
it('should handle arrays by replacing', () => {
|
||||
const target = { a: [1, 2] };
|
||||
const source = { a: [3, 4] };
|
||||
const result = mergeDeep(target, source);
|
||||
|
||||
expect(result.a).toEqual([3, 4]);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -2,7 +2,6 @@ import {
|
||||
type AsyncCache,
|
||||
type NodeDefinition,
|
||||
NodeDefinitionSchema,
|
||||
type NodeId,
|
||||
type NodeRegistry
|
||||
} from '@nodarium/types';
|
||||
import { createLogger, createWasmWrapper } from '@nodarium/utils';
|
||||
@@ -13,7 +12,6 @@ log.mute();
|
||||
export class RemoteNodeRegistry implements NodeRegistry {
|
||||
status: 'loading' | 'ready' | 'error' = 'loading';
|
||||
private nodes: Map<string, NodeDefinition> = new Map();
|
||||
private memory = new WebAssembly.Memory({ initial: 1024, maximum: 8192 });
|
||||
|
||||
constructor(
|
||||
private url: string,
|
||||
@@ -165,13 +163,6 @@ export class RemoteNodeRegistry implements NodeRegistry {
|
||||
}
|
||||
|
||||
getAllNodes() {
|
||||
const allNodes = [...this.nodes.values()];
|
||||
log.info('getting all nodes', allNodes);
|
||||
return allNodes;
|
||||
}
|
||||
|
||||
async overwriteNode(nodeId: NodeId, node: NodeDefinition) {
|
||||
log.info('Overwritten node', { nodeId, node });
|
||||
this.nodes.set(nodeId, node);
|
||||
return [...this.nodes.values()];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -86,7 +86,7 @@
|
||||
position: absolute;
|
||||
}
|
||||
svg {
|
||||
height: 126px;
|
||||
height: 124px;
|
||||
margin: 24px 0px;
|
||||
border-top: solid thin var(--color-outline);
|
||||
border-bottom: solid thin var(--color-outline);
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
<script lang="ts">
|
||||
import { defaultPlant, lottaFaces, plant, simple } from '$lib/graph-templates';
|
||||
import { defaultPlant, lottaFaces, plant } from '$lib/graph-templates';
|
||||
import type { Graph } from '$lib/types';
|
||||
import { InputSelect } from '@nodarium/ui';
|
||||
import type { ProjectManager } from './project-manager.svelte';
|
||||
|
||||
const { projectManager } = $props<{ projectManager: ProjectManager }>();
|
||||
|
||||
let showNewProject = $state(false);
|
||||
let newProjectName = $state('');
|
||||
let selectedTemplate = $state('defaultPlant');
|
||||
|
||||
const templates = [
|
||||
{
|
||||
@@ -16,27 +16,25 @@
|
||||
graph: defaultPlant as unknown as Graph
|
||||
},
|
||||
{ name: 'Plant', value: 'plant', graph: plant as unknown as Graph },
|
||||
{ name: 'Simple', value: 'simple', graph: simple as unknown as Graph },
|
||||
{
|
||||
name: 'Lotta Faces',
|
||||
value: 'lottaFaces',
|
||||
graph: lottaFaces as unknown as Graph
|
||||
}
|
||||
];
|
||||
let selectedTemplateIndex = $state(0);
|
||||
|
||||
function handleCreate() {
|
||||
const template = templates[selectedTemplateIndex] || templates[0];
|
||||
const template = templates.find((t) => t.value === selectedTemplate) || templates[0];
|
||||
projectManager.handleCreateProject(template.graph, newProjectName);
|
||||
newProjectName = '';
|
||||
showNewProject = false;
|
||||
}
|
||||
</script>
|
||||
|
||||
<header class="flex justify-between px-4 h-[70px] border-b-1 border-outline items-center bg-layer-2">
|
||||
<header class="flex justify-between px-4 h-[70px] border-b-1 border-outline items-center">
|
||||
<h3>Project</h3>
|
||||
<button
|
||||
class="px-3 py-1 bg-layer-1 rounded"
|
||||
class="px-3 py-1 bg-layer-0 rounded"
|
||||
onclick={() => (showNewProject = !showNewProject)}
|
||||
>
|
||||
New
|
||||
@@ -44,17 +42,24 @@
|
||||
</header>
|
||||
|
||||
{#if showNewProject}
|
||||
<div class="flex flex-col px-4 py-3.5 mt-[1px] border-b-1 border-outline gap-3">
|
||||
<div class="flex flex-col px-4 py-3 border-b-1 border-outline gap-2">
|
||||
<input
|
||||
type="text"
|
||||
bind:value={newProjectName}
|
||||
placeholder="Project name"
|
||||
class="w-full px-2 py-2 bg-layer-2 rounded"
|
||||
class="w-full px-2 py-2 bg-gray-800 border border-gray-700 rounded"
|
||||
onkeydown={(e) => e.key === 'Enter' && handleCreate()}
|
||||
/>
|
||||
<InputSelect options={templates.map(t => t.name)} bind:value={selectedTemplateIndex} />
|
||||
<select
|
||||
bind:value={selectedTemplate}
|
||||
class="w-full px-2 py-2 bg-gray-800 border border-gray-700 rounded"
|
||||
>
|
||||
{#each templates as template (template.name)}
|
||||
<option value={template.value}>{template.name}</option>
|
||||
{/each}
|
||||
</select>
|
||||
<button
|
||||
class="cursor-pointer self-end px-3 py-1 bg-selected rounded"
|
||||
class="cursor-pointer self-end px-3 py-1 bg-blue-600 rounded"
|
||||
onclick={() => handleCreate()}
|
||||
>
|
||||
Create
|
||||
@@ -62,22 +67,20 @@
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<div class="text-white min-h-screen">
|
||||
<div class="p-4 text-white min-h-screen">
|
||||
{#if projectManager.loading}
|
||||
<p>Loading...</p>
|
||||
{/if}
|
||||
|
||||
<ul>
|
||||
<ul class="space-y-2">
|
||||
{#each projectManager.projects as project (project.id)}
|
||||
<li>
|
||||
<div
|
||||
class="
|
||||
h-[70px] border-b-1 border-b-outline
|
||||
flex
|
||||
w-full text-left px-3 py-2 cursor-pointer {projectManager
|
||||
w-full text-left px-3 py-2 rounded cursor-pointer {projectManager
|
||||
.activeProjectId.value === project.id
|
||||
? 'border-l-2 border-l-selected pl-2.5!'
|
||||
: ''}
|
||||
? 'bg-blue-600'
|
||||
: 'bg-gray-800 hover:bg-gray-700'}
|
||||
"
|
||||
onclick={() => projectManager.handleSelectProject(project.id!)}
|
||||
role="button"
|
||||
@@ -86,10 +89,10 @@
|
||||
e.key === 'Enter'
|
||||
&& projectManager.handleSelectProject(project.id!)}
|
||||
>
|
||||
<div class="flex justify-between items-center grow">
|
||||
<div class="flex justify-between items-center">
|
||||
<span>{project.meta?.title || 'Untitled'}</span>
|
||||
<button
|
||||
class="text-layer-1! bg-red-500 w-7 text-xl rounded-sm cursor-pointer opacity-20 hover:opacity-80"
|
||||
class="text-red-400 hover:text-red-300"
|
||||
onclick={() => {
|
||||
projectManager.handleDeleteProject(project.id!);
|
||||
}}
|
||||
|
||||
@@ -54,7 +54,7 @@ export class ProjectManager {
|
||||
|
||||
g.id = id;
|
||||
if (!g.meta) g.meta = {};
|
||||
g.meta.title = title;
|
||||
if (!g.meta.title) g.meta.title = title;
|
||||
|
||||
db.saveGraph(g);
|
||||
this.projects = [...this.projects, g];
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
import { decodeFloat, splitNestedArray } from '@nodarium/utils';
|
||||
import type { PerformanceStore } from '@nodarium/utils';
|
||||
import { Canvas } from '@threlte/core';
|
||||
import { DoubleSide, Vector3 } from 'three';
|
||||
import { Vector3 } from 'three';
|
||||
import { type Group, MeshMatcapMaterial, TextureLoader } from 'three';
|
||||
import { createGeometryPool, createInstancedGeometryPool } from './geometryPool';
|
||||
import Scene from './Scene.svelte';
|
||||
@@ -14,8 +14,7 @@
|
||||
matcap.colorSpace = 'srgb';
|
||||
const material = new MeshMatcapMaterial({
|
||||
color: 0xffffff,
|
||||
matcap,
|
||||
side: DoubleSide
|
||||
matcap
|
||||
});
|
||||
|
||||
let sceneComponent = $state<ReturnType<typeof Scene>>();
|
||||
|
||||
@@ -1,39 +0,0 @@
|
||||
export function logInt32ArrayChanges(
|
||||
before: Int32Array,
|
||||
after: Int32Array,
|
||||
clamp = 10
|
||||
): void {
|
||||
if (before.length !== after.length) {
|
||||
throw new Error('Arrays must have the same length');
|
||||
}
|
||||
|
||||
let rangeStart: number | null = null;
|
||||
let collected: number[] = [];
|
||||
|
||||
const flush = (endIndex: number) => {
|
||||
if (rangeStart === null) return;
|
||||
|
||||
const preview = collected.slice(0, clamp);
|
||||
const suffix = collected.length > clamp ? '...' : '';
|
||||
|
||||
console.log(
|
||||
`Change ${rangeStart}-${endIndex}: [${preview.join(', ')}${suffix}]`
|
||||
);
|
||||
|
||||
rangeStart = null;
|
||||
collected = [];
|
||||
};
|
||||
|
||||
for (let i = 0; i < before.length; i++) {
|
||||
if (before[i] !== after[i]) {
|
||||
if (rangeStart === null) {
|
||||
rangeStart = i;
|
||||
}
|
||||
collected.push(after[i]);
|
||||
} else {
|
||||
flush(i - 1);
|
||||
}
|
||||
}
|
||||
|
||||
flush(before.length - 1);
|
||||
}
|
||||
@@ -1,5 +1,3 @@
|
||||
import type { SettingsToStore } from '$lib/settings/app-settings.svelte';
|
||||
import { RemoteNodeRegistry } from '@nodarium/registry';
|
||||
import type {
|
||||
Graph,
|
||||
NodeDefinition,
|
||||
@@ -9,42 +7,28 @@ import type {
|
||||
SyncCache
|
||||
} from '@nodarium/types';
|
||||
import {
|
||||
concatEncodedArrays,
|
||||
createLogger,
|
||||
createWasmWrapper,
|
||||
encodeFloat,
|
||||
fastHashArrayBuffer,
|
||||
type PerformanceStore
|
||||
} from '@nodarium/utils';
|
||||
import { DevSettingsType } from '../../routes/dev/settings.svelte';
|
||||
import { logInt32ArrayChanges } from './helpers';
|
||||
import type { RuntimeNode } from './types';
|
||||
|
||||
const log = createLogger('runtime-executor');
|
||||
// log.mute(); // Keep logging enabled for debug info
|
||||
log.mute();
|
||||
|
||||
const remoteRegistry = new RemoteNodeRegistry('');
|
||||
|
||||
type WasmExecute = (outputPos: number, args: number[]) => number;
|
||||
|
||||
function getValue(input: NodeInput, value?: unknown): number | number[] | Int32Array {
|
||||
function getValue(input: NodeInput, value?: unknown) {
|
||||
if (value === undefined && 'value' in input) {
|
||||
value = input.value;
|
||||
}
|
||||
|
||||
switch (input.type) {
|
||||
case 'float':
|
||||
return encodeFloat(value as number);
|
||||
|
||||
case 'select':
|
||||
return (value as number) ?? 0;
|
||||
|
||||
case 'vec3': {
|
||||
const arr = Array.isArray(value) ? value : [];
|
||||
return [0, arr.length + 1, ...arr.map(v => encodeFloat(v)), 1, 1];
|
||||
}
|
||||
if (input.type === 'float') {
|
||||
return encodeFloat(value as number);
|
||||
}
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
if (input.type === 'vec3' || input.type === 'shape') {
|
||||
if (input.type === 'vec3') {
|
||||
return [
|
||||
0,
|
||||
value.length + 1,
|
||||
@@ -56,97 +40,57 @@ function getValue(input: NodeInput, value?: unknown): number | number[] | Int32A
|
||||
return [0, value.length + 1, ...value, 1, 1] as number[];
|
||||
}
|
||||
|
||||
if (typeof value === 'boolean') return value ? 1 : 0;
|
||||
if (typeof value === 'number') return value;
|
||||
if (value instanceof Int32Array) return value;
|
||||
|
||||
throw new Error(`Unsupported input type: ${input.type}`);
|
||||
}
|
||||
|
||||
function compareInt32(a: Int32Array, b: Int32Array): boolean {
|
||||
if (a.length !== b.length) return false;
|
||||
for (let i = 0; i < a.length; i++) {
|
||||
if (a[i] !== b[i]) return false;
|
||||
if (typeof value === 'boolean') {
|
||||
return value ? 1 : 0;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
export type Pointer = {
|
||||
start: number;
|
||||
end: number;
|
||||
_title?: string;
|
||||
};
|
||||
if (typeof value === 'number') {
|
||||
return value;
|
||||
}
|
||||
|
||||
if (value instanceof Int32Array) {
|
||||
return value;
|
||||
}
|
||||
|
||||
throw new Error(`Unknown input type ${input.type}`);
|
||||
}
|
||||
|
||||
export class MemoryRuntimeExecutor implements RuntimeExecutor {
|
||||
private nodes = new Map<string, { definition: NodeDefinition; execute: WasmExecute }>();
|
||||
private definitionMap: Map<string, NodeDefinition> = new Map();
|
||||
|
||||
private offset = 0;
|
||||
private isRunning = false;
|
||||
private seed = Math.floor(Math.random() * 100000000);
|
||||
|
||||
private readonly memory = new WebAssembly.Memory({
|
||||
initial: 4096,
|
||||
maximum: 8192
|
||||
});
|
||||
|
||||
private memoryView!: Int32Array;
|
||||
|
||||
results: Record<number, Pointer> = {};
|
||||
inputPtrs: Record<number, Pointer[]> = {};
|
||||
allPtrs: Pointer[] = [];
|
||||
|
||||
seed = 42424242;
|
||||
perf?: PerformanceStore;
|
||||
|
||||
constructor(
|
||||
private readonly registry: NodeRegistry,
|
||||
private registry: NodeRegistry,
|
||||
public cache?: SyncCache<Int32Array>
|
||||
) {
|
||||
this.cache = undefined;
|
||||
this.refreshView();
|
||||
log.info('MemoryRuntimeExecutor initialized');
|
||||
}
|
||||
|
||||
private refreshView(): void {
|
||||
this.memoryView = new Int32Array(this.memory.buffer);
|
||||
log.info(`Memory view refreshed, length: ${this.memoryView.length}`);
|
||||
}
|
||||
|
||||
public getMemory(): Int32Array {
|
||||
return new Int32Array(this.memory.buffer);
|
||||
}
|
||||
|
||||
private map = new Map<string, { definition: NodeDefinition; execute: WasmExecute }>();
|
||||
private async getNodeDefinitions(graph: Graph) {
|
||||
if (this.registry.status !== 'ready') {
|
||||
throw new Error('Node registry is not ready');
|
||||
}
|
||||
|
||||
await this.registry.load(graph.nodes.map(n => n.type));
|
||||
log.info(`Loaded ${graph.nodes.length} node types from registry`);
|
||||
await this.registry.load(graph.nodes.map((node) => node.type));
|
||||
|
||||
for (const { type } of graph.nodes) {
|
||||
if (this.map.has(type)) continue;
|
||||
|
||||
const def = this.registry.getNode(type);
|
||||
if (!def) continue;
|
||||
|
||||
log.info(`Fetching WASM for node type: ${type}`);
|
||||
const buffer = await remoteRegistry.fetchArrayBuffer(`nodes/${type}.wasm`);
|
||||
const wrapper = createWasmWrapper(buffer, this.memory);
|
||||
|
||||
this.map.set(type, {
|
||||
definition: def,
|
||||
execute: wrapper.execute
|
||||
});
|
||||
log.info(`Node type ${type} loaded and wrapped`);
|
||||
const typeMap = new Map<string, NodeDefinition>();
|
||||
for (const node of graph.nodes) {
|
||||
if (!typeMap.has(node.type)) {
|
||||
const type = this.registry.getNode(node.type);
|
||||
if (type) {
|
||||
typeMap.set(node.type, type);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return this.map;
|
||||
return typeMap;
|
||||
}
|
||||
|
||||
private async addMetaData(graph: Graph) {
|
||||
this.nodes = await this.getNodeDefinitions(graph);
|
||||
log.info(`Metadata added for ${this.nodes.size} nodes`);
|
||||
// First, lets check if all nodes have a definition
|
||||
this.definitionMap = await this.getNodeDefinitions(graph);
|
||||
|
||||
const graphNodes = graph.nodes.map(node => {
|
||||
const n = node as RuntimeNode;
|
||||
@@ -159,72 +103,55 @@ export class MemoryRuntimeExecutor implements RuntimeExecutor {
|
||||
return n;
|
||||
});
|
||||
|
||||
const outputNode = graphNodes.find(n => n.type.endsWith('/output') || n.type.endsWith('/debug'))
|
||||
?? graphNodes[0];
|
||||
const outputNode = graphNodes.find((node) => node.type.endsWith('/output'));
|
||||
if (!outputNode) {
|
||||
throw new Error('No output node found');
|
||||
}
|
||||
|
||||
const nodeMap = new Map(graphNodes.map(n => [n.id, n]));
|
||||
const nodeMap = new Map(
|
||||
graphNodes.map((node) => [node.id, node])
|
||||
);
|
||||
|
||||
// loop through all edges and assign the parent and child nodes to each node
|
||||
for (const edge of graph.edges) {
|
||||
const [parentId, /*_parentOutput*/, childId, childInput] = edge;
|
||||
const parent = nodeMap.get(parentId);
|
||||
const child = nodeMap.get(childId);
|
||||
if (!parent || !child) continue;
|
||||
|
||||
parent.state.children.push(child);
|
||||
child.state.parents.push(parent);
|
||||
child.state.inputNodes[childInput] = parent;
|
||||
if (parent && child) {
|
||||
parent.state.children.push(child);
|
||||
child.state.parents.push(parent);
|
||||
child.state.inputNodes[childInput] = parent;
|
||||
}
|
||||
}
|
||||
|
||||
const ordered: RuntimeNode[] = [];
|
||||
const stack = [outputNode];
|
||||
const nodes = [];
|
||||
|
||||
// loop through all the nodes and assign each nodes its depth
|
||||
const stack = [outputNode];
|
||||
while (stack.length) {
|
||||
const node = stack.pop()!;
|
||||
const node = stack.pop();
|
||||
if (!node) continue;
|
||||
for (const parent of node.state.parents) {
|
||||
parent.state = parent.state || {};
|
||||
parent.state.depth = node.state.depth + 1;
|
||||
stack.push(parent);
|
||||
}
|
||||
ordered.push(node);
|
||||
nodes.push(node);
|
||||
}
|
||||
|
||||
log.info(`Output node: ${outputNode.id}, total nodes ordered: ${ordered.length}`);
|
||||
return [outputNode, ordered] as const;
|
||||
}
|
||||
|
||||
private writeToMemory(value: number | number[] | Int32Array, title?: string): Pointer {
|
||||
const start = this.offset;
|
||||
|
||||
if (typeof value === 'number') {
|
||||
this.memoryView[this.offset++] = value;
|
||||
} else {
|
||||
this.memoryView.set(value, this.offset);
|
||||
this.offset += value.length;
|
||||
}
|
||||
|
||||
const ptr = { start, end: this.offset, _title: title };
|
||||
this.allPtrs.push(ptr);
|
||||
log.info(`Memory written for ${title}: start=${ptr.start}, end=${ptr.end}`);
|
||||
return ptr;
|
||||
}
|
||||
|
||||
private printMemory() {
|
||||
this.memoryView = new Int32Array(this.memory.buffer);
|
||||
console.log('MEMORY', this.memoryView.slice(0, 10));
|
||||
return [outputNode, nodes] as const;
|
||||
}
|
||||
|
||||
async execute(graph: Graph, settings: Record<string, unknown>) {
|
||||
this.offset = 0;
|
||||
this.inputPtrs = {};
|
||||
this.seed = this.seed += 2;
|
||||
this.results = {};
|
||||
this.allPtrs = [];
|
||||
this.perf?.addPoint('runtime');
|
||||
|
||||
if (this.isRunning) return undefined as unknown as Int32Array;
|
||||
this.isRunning = true;
|
||||
let a = performance.now();
|
||||
|
||||
// Then we add some metadata to the graph
|
||||
const [_outputNode, nodes] = await this.addMetaData(graph);
|
||||
const [outputNode, nodes] = await this.addMetaData(graph);
|
||||
let b = performance.now();
|
||||
|
||||
this.perf?.addPoint('collect-metadata', b - a);
|
||||
|
||||
/*
|
||||
* Here we sort the nodes into buckets, which we then execute one by one
|
||||
@@ -242,75 +169,58 @@ export class MemoryRuntimeExecutor implements RuntimeExecutor {
|
||||
(a, b) => (b.state?.depth || 0) - (a.state?.depth || 0)
|
||||
);
|
||||
|
||||
console.log({ settings });
|
||||
// here we store the intermediate results of the nodes
|
||||
const results: Record<string, Int32Array> = {};
|
||||
|
||||
this.printMemory();
|
||||
const seedPtr = this.writeToMemory(this.seed, 'seed');
|
||||
|
||||
const settingPtrs = new Map<string, Pointer>(
|
||||
Object.entries(settings).map((
|
||||
[key, value]
|
||||
) => [key as string, this.writeToMemory(value as number, `setting.${key}`)])
|
||||
);
|
||||
if (settings['randomSeed']) {
|
||||
this.seed = Math.floor(Math.random() * 100000000);
|
||||
}
|
||||
|
||||
for (const node of sortedNodes) {
|
||||
const node_type = this.nodes.get(node.type)!;
|
||||
|
||||
console.log('---------------');
|
||||
console.log('STARTING NODE EXECUTION', node_type.definition.id + '/' + node.id);
|
||||
this.printMemory();
|
||||
|
||||
// console.log(node_type.definition.inputs);
|
||||
const inputs = Object.entries(node_type.definition.inputs || {}).map(
|
||||
([key, input]) => {
|
||||
// We should probably initially write this to memory
|
||||
if (input.type === 'seed') {
|
||||
return seedPtr;
|
||||
}
|
||||
|
||||
const title = `${node.id}.${key}`;
|
||||
|
||||
// We should probably initially write this to memory
|
||||
// If the input is linked to a setting, we use that value
|
||||
// TODO: handle nodes which reference undefined settings
|
||||
if (input.setting) {
|
||||
return settingPtrs.get(input.setting)!;
|
||||
}
|
||||
|
||||
// check if the input is connected to another node
|
||||
const inputNode = node.state.inputNodes[key];
|
||||
if (inputNode) {
|
||||
if (this.results[inputNode.id] === undefined) {
|
||||
throw new Error(
|
||||
`Node ${node.type}/${node.id} is missing input from node ${inputNode.type}/${inputNode.id}`
|
||||
);
|
||||
}
|
||||
return this.results[inputNode.id];
|
||||
}
|
||||
|
||||
// If the value is stored in the node itself, we use that value
|
||||
if (node.props?.[key] !== undefined) {
|
||||
const value = getValue(input, node.props[key]);
|
||||
console.log(`Writing prop for ${node.id} -> ${key} to memory`, node.props[key], value);
|
||||
return this.writeToMemory(value, title);
|
||||
}
|
||||
|
||||
return this.writeToMemory(getValue(input), title);
|
||||
}
|
||||
);
|
||||
|
||||
this.printMemory();
|
||||
const node_type = this.definitionMap.get(node.type)!;
|
||||
|
||||
if (!node_type || !node.state || !node_type.execute) {
|
||||
log.warn(`Node ${node.id} has no definition`);
|
||||
continue;
|
||||
}
|
||||
|
||||
this.inputPtrs[node.id] = inputs;
|
||||
const args = inputs.map(s => [s.start, s.end]).flat();
|
||||
console.log('ARGS', inputs);
|
||||
a = performance.now();
|
||||
|
||||
// Collect the inputs for the node
|
||||
const inputs = Object.entries(node_type.inputs || {}).map(
|
||||
([key, input]) => {
|
||||
if (input.type === 'seed') {
|
||||
return this.seed;
|
||||
}
|
||||
|
||||
// If the input is linked to a setting, we use that value
|
||||
if (input.setting) {
|
||||
return getValue(input, settings[input.setting]);
|
||||
}
|
||||
|
||||
// check if the input is connected to another node
|
||||
const inputNode = node.state.inputNodes[key];
|
||||
if (inputNode) {
|
||||
if (results[inputNode.id] === undefined) {
|
||||
throw new Error(
|
||||
`Node ${node.type} is missing input from node ${inputNode.type}`
|
||||
);
|
||||
}
|
||||
return results[inputNode.id];
|
||||
}
|
||||
|
||||
// If the value is stored in the node itself, we use that value
|
||||
if (node.props?.[key] !== undefined) {
|
||||
return getValue(input, node.props[key]);
|
||||
}
|
||||
|
||||
return getValue(input);
|
||||
}
|
||||
);
|
||||
b = performance.now();
|
||||
|
||||
this.perf?.addPoint('collected-inputs', b - a);
|
||||
|
||||
this.printMemory();
|
||||
try {
|
||||
a = performance.now();
|
||||
const encoded_inputs = concatEncodedArrays(inputs);
|
||||
@@ -339,138 +249,28 @@ export class MemoryRuntimeExecutor implements RuntimeExecutor {
|
||||
b = performance.now();
|
||||
|
||||
if (this.cache && node.id !== outputNode.id) {
|
||||
this.cache.set(inputHash, this.results[node.id]);
|
||||
this.cache.set(inputHash, results[node.id]);
|
||||
}
|
||||
|
||||
this.perf?.addPoint('node/' + node_type.id, b - a);
|
||||
log.log('Result:', results[node.id]);
|
||||
log.groupEnd();
|
||||
} catch (e) {
|
||||
console.error(`Failed to execute node ${node.type}/${node.id}`, e);
|
||||
this.isRunning = false;
|
||||
log.groupEnd();
|
||||
log.error(`Error executing node ${node_type.id || node.id}`, e);
|
||||
}
|
||||
}
|
||||
|
||||
this.isRunning = true;
|
||||
log.info('Execution started');
|
||||
// return the result of the parent of the output node
|
||||
const res = results[outputNode.id];
|
||||
|
||||
try {
|
||||
this.offset = 0;
|
||||
this.results = {};
|
||||
this.inputPtrs = {};
|
||||
this.allPtrs = [];
|
||||
this.seed += 2;
|
||||
|
||||
this.refreshView();
|
||||
|
||||
const [outputNode, nodes] = await this.addMetaData(graph);
|
||||
|
||||
const sortedNodes = [...nodes].sort(
|
||||
(a, b) => (b.state.depth ?? 0) - (a.state.depth ?? 0)
|
||||
);
|
||||
|
||||
const seedPtr = this.writeToMemory(this.seed, 'seed');
|
||||
|
||||
const settingPtrs = new Map<string, Pointer>();
|
||||
for (const [key, value] of Object.entries(settings)) {
|
||||
const ptr = this.writeToMemory(value as number, `setting.${key}`);
|
||||
settingPtrs.set(key, ptr);
|
||||
}
|
||||
|
||||
let lastNodePtr: Pointer | undefined = undefined;
|
||||
|
||||
for (const node of sortedNodes) {
|
||||
const nodeType = this.nodes.get(node.type);
|
||||
if (!nodeType) continue;
|
||||
|
||||
log.info(`Executing node: ${node.id} (type: ${node.type})`);
|
||||
|
||||
const inputs = Object.entries(nodeType.definition.inputs || {}).map(
|
||||
([key, input]) => {
|
||||
if (input.type === 'seed') return seedPtr;
|
||||
|
||||
if (input.setting) {
|
||||
const ptr = settingPtrs.get(input.setting);
|
||||
if (!ptr) throw new Error(`Missing setting: ${input.setting}`);
|
||||
return ptr;
|
||||
}
|
||||
|
||||
const src = node.state.inputNodes[key];
|
||||
if (src) {
|
||||
const res = this.results[src.id];
|
||||
if (!res) {
|
||||
throw new Error(`Missing input from ${src.type}/${src.id}`);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
if (node.props?.[key] !== undefined) {
|
||||
return this.writeToMemory(
|
||||
getValue(input, node.props[key]),
|
||||
`${node.id}.${key}`
|
||||
);
|
||||
}
|
||||
|
||||
return this.writeToMemory(getValue(input), `${node.id}.${key}`);
|
||||
}
|
||||
);
|
||||
|
||||
this.inputPtrs[node.id] = inputs;
|
||||
const args = inputs.flatMap(p => [p.start * 4, p.end * 4]);
|
||||
|
||||
log.info(`Executing node ${node.type}/${node.id}`);
|
||||
const memoryBefore = this.memoryView.slice(0, this.offset);
|
||||
const bytesWritten = nodeType.execute(this.offset * 4, args);
|
||||
this.refreshView();
|
||||
const memoryAfter = this.memoryView.slice(0, this.offset);
|
||||
logInt32ArrayChanges(memoryBefore, memoryAfter);
|
||||
this.refreshView();
|
||||
|
||||
const outLen = bytesWritten >> 2;
|
||||
const outputStart = this.offset;
|
||||
|
||||
if (
|
||||
args.length === 2
|
||||
&& inputs[0].end - inputs[0].start === outLen
|
||||
&& compareInt32(
|
||||
this.memoryView.slice(inputs[0].start, inputs[0].end),
|
||||
this.memoryView.slice(outputStart, outputStart + outLen)
|
||||
)
|
||||
) {
|
||||
this.results[node.id] = inputs[0];
|
||||
this.allPtrs.push(this.results[node.id]);
|
||||
log.info(`Node ${node.id} result reused input memory`);
|
||||
} else {
|
||||
this.results[node.id] = {
|
||||
start: outputStart,
|
||||
end: outputStart + outLen,
|
||||
_title: `${node.id} ->`
|
||||
};
|
||||
this.allPtrs.push(this.results[node.id]);
|
||||
this.offset += outLen;
|
||||
lastNodePtr = this.results[node.id];
|
||||
log.info(
|
||||
`Node ${node.id} wrote result to memory: start=${outputStart}, end=${outputStart + outLen
|
||||
}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const res = this.results[outputNode.id] ?? lastNodePtr;
|
||||
if (!res) throw new Error('Output node produced no result');
|
||||
|
||||
log.info(`Execution finished, output pointer: start=${res.start}, end=${res.end}`);
|
||||
this.refreshView();
|
||||
return this.memoryView.slice(res.start, res.end);
|
||||
} catch (e) {
|
||||
log.info('Execution error:', e);
|
||||
console.error(e);
|
||||
} finally {
|
||||
this.isRunning = false;
|
||||
console.log('Final Memory', [...this.memoryView.slice(0, 20)]);
|
||||
this.perf?.endPoint('runtime');
|
||||
log.info('Executor state reset');
|
||||
if (this.cache) {
|
||||
this.cache.size = sortedNodes.length * 2;
|
||||
}
|
||||
|
||||
this.perf?.endPoint('runtime');
|
||||
|
||||
return res as unknown as Int32Array;
|
||||
}
|
||||
|
||||
getPerformanceData() {
|
||||
|
||||
@@ -56,10 +56,6 @@
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (Array.isArray(inputValue) && node.type === 'vec3') {
|
||||
return inputValue;
|
||||
}
|
||||
|
||||
// If the component is supplied with a default value use that
|
||||
if (inputValue !== undefined && typeof inputValue !== 'object') {
|
||||
return inputValue;
|
||||
@@ -102,7 +98,7 @@
|
||||
&& typeof internalValue === 'number'
|
||||
) {
|
||||
value[key] = node?.options?.[internalValue];
|
||||
} else if (internalValue !== undefined) {
|
||||
} else if (internalValue) {
|
||||
value[key] = internalValue;
|
||||
}
|
||||
});
|
||||
@@ -128,6 +124,7 @@
|
||||
|
||||
{#if key && isNodeInput(type?.[key])}
|
||||
{@const inputType = type[key]}
|
||||
<!-- Leaf input -->
|
||||
<div class="input input-{inputType.type}" class:first-level={depth === 1}>
|
||||
{#if inputType.type === 'button'}
|
||||
<button onclick={handleClick}>
|
||||
@@ -141,6 +138,7 @@
|
||||
{/if}
|
||||
</div>
|
||||
{:else if depth === 0}
|
||||
<!-- Root: iterate over top-level keys -->
|
||||
{#each Object.keys(type ?? {}).filter((k) => k !== 'title') as childKey (childKey)}
|
||||
<NestedSettings
|
||||
id={`${id}.${childKey}`}
|
||||
@@ -152,6 +150,7 @@
|
||||
{/each}
|
||||
<hr />
|
||||
{:else if key && type?.[key]}
|
||||
<!-- Group -->
|
||||
{#if depth > 0}
|
||||
<hr />
|
||||
{/if}
|
||||
@@ -211,7 +210,7 @@
|
||||
.first-level.input {
|
||||
padding-left: 1em;
|
||||
padding-right: 1em;
|
||||
padding-bottom: 0.5px;
|
||||
padding-bottom: 1px;
|
||||
gap: 3px;
|
||||
}
|
||||
|
||||
|
||||
@@ -6,7 +6,6 @@ const themes = [
|
||||
'catppuccin',
|
||||
'solarized',
|
||||
'high-contrast',
|
||||
'high-contrast-light',
|
||||
'nord',
|
||||
'dracula'
|
||||
] as const;
|
||||
@@ -30,11 +29,10 @@ export const AppSettingTypes = {
|
||||
},
|
||||
nodeInterface: {
|
||||
title: 'Node Interface',
|
||||
backgroundType: {
|
||||
type: 'select',
|
||||
label: 'Background',
|
||||
options: ['grid', 'dots', 'none'],
|
||||
value: 'grid'
|
||||
showNodeGrid: {
|
||||
type: 'boolean',
|
||||
label: 'Show Grid',
|
||||
value: true
|
||||
},
|
||||
snapToGrid: {
|
||||
type: 'boolean',
|
||||
|
||||
@@ -34,7 +34,7 @@
|
||||
<div class="wrapper" class:hidden>
|
||||
{#if title}
|
||||
<header class="bg-layer-2">
|
||||
<h3>{title}</h3>
|
||||
<h3 class="font-bold">{title}</h3>
|
||||
</header>
|
||||
{/if}
|
||||
{@render children?.()}
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
</script>
|
||||
|
||||
<div class='{node?"border-l-2 pl-3.5!":""} bg-layer-2 flex items-center h-[70px] border-b-1 border-l-selected border-b-outline pl-4'>
|
||||
<h3>Node Settings</h3>
|
||||
<h3 class="font-bold">Node Settings</h3>
|
||||
</div>
|
||||
|
||||
{#if node}
|
||||
|
||||
@@ -1,185 +0,0 @@
|
||||
<script lang="ts">
|
||||
import { Details } from '@nodarium/ui';
|
||||
import { micromark } from 'micromark';
|
||||
|
||||
type Props = {
|
||||
git?: Record<string, string>;
|
||||
changelog?: string;
|
||||
};
|
||||
|
||||
const {
|
||||
git,
|
||||
changelog
|
||||
}: Props = $props();
|
||||
|
||||
const typeMap = new Map([
|
||||
['fix', 'border-l-red-800'],
|
||||
['feat', 'border-l-green-800'],
|
||||
['chore', 'border-l-gray-800'],
|
||||
['docs', 'border-l-blue-800'],
|
||||
['refactor', 'border-l-purple-800'],
|
||||
['ci', 'border-l-red-400']
|
||||
]);
|
||||
|
||||
function detectCommitType(commit: string) {
|
||||
for (const key of typeMap.keys()) {
|
||||
if (commit.startsWith(key)) {
|
||||
return key;
|
||||
}
|
||||
}
|
||||
return '';
|
||||
}
|
||||
|
||||
function parseCommit(line?: string) {
|
||||
if (!line) return;
|
||||
|
||||
const regex = /^\s*-\s*\[([a-f0-9]+)\]\((https?:\/\/[^\s)]+)\)\s+(.+)$/;
|
||||
|
||||
const match = line.match(regex);
|
||||
if (!match) {
|
||||
return;
|
||||
}
|
||||
|
||||
const [, sha, link, description] = match;
|
||||
|
||||
return {
|
||||
sha,
|
||||
link,
|
||||
description,
|
||||
type: detectCommitType(description)
|
||||
};
|
||||
}
|
||||
|
||||
function parseChangelog(md: string) {
|
||||
return md.split(/^# v/gm)
|
||||
.filter(l => !!l.length)
|
||||
.map(release => {
|
||||
const [firstLine, ...rest] = release.split('\n');
|
||||
const title = firstLine.trim();
|
||||
|
||||
const blocks = rest
|
||||
.join('\n')
|
||||
.split('---');
|
||||
|
||||
const commits = blocks.length > 1
|
||||
? blocks
|
||||
.at(-1)
|
||||
?.split('\n')
|
||||
?.map(line => parseCommit(line))
|
||||
?.filter(c => !!c)
|
||||
: [];
|
||||
|
||||
const description = (
|
||||
blocks.length > 1
|
||||
? blocks
|
||||
.slice(0, -1)
|
||||
.join('\n')
|
||||
: blocks[0]
|
||||
).trim();
|
||||
|
||||
return {
|
||||
description: micromark(description),
|
||||
title,
|
||||
commits
|
||||
};
|
||||
});
|
||||
}
|
||||
</script>
|
||||
|
||||
<div id="changelog" class="p-4 font-mono text-text overflow-y-auto max-h-full space-y-5">
|
||||
{#if git}
|
||||
<div class="mb-4 p-3 bg-layer-2 text-xs rounded">
|
||||
<p><strong>Branch:</strong> {git.branch}</p>
|
||||
<p>
|
||||
<strong>Commit:</strong>
|
||||
<a
|
||||
href="https://git.max-richter.dev/max/nodarium/commit/{git.sha}"
|
||||
class="link"
|
||||
target="_blank"
|
||||
>
|
||||
{git.sha.slice(0, 7)}
|
||||
</a>
|
||||
– {git.commit_message}
|
||||
</p>
|
||||
<p>
|
||||
<strong>Commits since last release:</strong>
|
||||
{git.commits_since_last_release}
|
||||
</p>
|
||||
<p>
|
||||
<strong>Timestamp:</strong>
|
||||
{new Date(git.commit_timestamp).toLocaleString()}
|
||||
</p>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
{#if changelog}
|
||||
{#each parseChangelog(changelog) as release (release)}
|
||||
<Details title={release.title}>
|
||||
<!-- eslint-disable-next-line svelte/no-at-html-tags -->
|
||||
<div id="description" class="pb-5">{@html release.description}</div>
|
||||
|
||||
{#if release?.commits?.length}
|
||||
<Details
|
||||
title="All Commits"
|
||||
class="commits"
|
||||
>
|
||||
{#each release.commits as commit (commit)}
|
||||
<p class="py-1 leading-7 text-xs border-b-1 border-l-1 border-b-outline last:border-b-0 -ml-2 pl-2 {typeMap.get(commit.type)}">
|
||||
<!-- eslint-disable-next-line svelte/no-navigation-without-resolve -->
|
||||
<a href={commit.link} class="link" target="_blank">{commit.sha}</a>
|
||||
{commit.description}
|
||||
</p>
|
||||
{/each}
|
||||
</Details>
|
||||
{/if}
|
||||
</Details>
|
||||
{/each}
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<style lang="postcss">
|
||||
@reference "tailwindcss";
|
||||
|
||||
#changelog :global(.commits) {
|
||||
margin-left: -16px;
|
||||
margin-right: -16px;
|
||||
border-radius: 0px 0px 2px 2px !important;
|
||||
}
|
||||
|
||||
#changelog :global(details > div){
|
||||
padding-bottom: 0px;
|
||||
}
|
||||
|
||||
#changelog :global(.commits > div) {
|
||||
padding-bottom: 0px;
|
||||
padding-top: 0px;
|
||||
}
|
||||
|
||||
#description :global(h2) {
|
||||
@apply font-bold mt-4 mb-1;
|
||||
}
|
||||
#description :global(h2:first-child) {
|
||||
margin-top: 0px !important;
|
||||
}
|
||||
|
||||
#description :global(ul) {
|
||||
padding-left: 1em;
|
||||
}
|
||||
#description :global(li),
|
||||
#description :global(p) {
|
||||
@apply text-xs!;
|
||||
list-style-type: disc;
|
||||
}
|
||||
|
||||
#changelog :global(details > details[open] > summary){
|
||||
margin-bottom: 20px !important;
|
||||
}
|
||||
|
||||
.link {
|
||||
color: #60a5fa;
|
||||
text-decoration: none;
|
||||
}
|
||||
.link:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
</style>
|
||||
@@ -7,7 +7,7 @@
|
||||
return JSON.stringify(
|
||||
{
|
||||
...g,
|
||||
nodes: g.nodes.map((n: object) => ({ ...n, tmp: undefined, state: undefined }))
|
||||
nodes: g.nodes.map((n: object) => ({ ...n, tmp: undefined }))
|
||||
},
|
||||
null,
|
||||
2
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
<script lang="ts">
|
||||
import { page } from '$app/state';
|
||||
</script>
|
||||
|
||||
<main class="w-screen h-screen flex flex-col items-center justify-center">
|
||||
<div class="outline-1 outline-outline bg-layer-2">
|
||||
<h1 class="p-8 text-3xl">@nodarium/error</h1>
|
||||
<hr>
|
||||
<pre class="p-8">{JSON.stringify(page.error, null, 2)}</pre>
|
||||
<hr>
|
||||
<div class="flex p-4">
|
||||
<button
|
||||
class="bg-layer-2 outline-1 outline-outline p-3 px-6 rounded-sm cursor-pointer"
|
||||
on:click={() => window.location.reload()}
|
||||
>
|
||||
reload
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</main>
|
||||
@@ -1,28 +1 @@
|
||||
export const prerender = true;
|
||||
|
||||
export async function load({ fetch }) {
|
||||
async function fetchChangelog() {
|
||||
try {
|
||||
const res = await fetch('/CHANGELOG.md');
|
||||
return await res.text();
|
||||
} catch (error) {
|
||||
console.log('Failed to fetch CHANGELOG.md', error);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
async function fetchGitInfo() {
|
||||
try {
|
||||
const res = await fetch('/git.json');
|
||||
return await res.json();
|
||||
} catch (error) {
|
||||
console.log('Failed to fetch git.json', error);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
git: await fetchGitInfo(),
|
||||
changelog: await fetchChangelog()
|
||||
};
|
||||
}
|
||||
|
||||
@@ -17,7 +17,6 @@
|
||||
import Panel from '$lib/sidebar/Panel.svelte';
|
||||
import ActiveNodeSettings from '$lib/sidebar/panels/ActiveNodeSettings.svelte';
|
||||
import BenchmarkPanel from '$lib/sidebar/panels/BenchmarkPanel.svelte';
|
||||
import Changelog from '$lib/sidebar/panels/Changelog.svelte';
|
||||
import ExportSettings from '$lib/sidebar/panels/ExportSettings.svelte';
|
||||
import GraphSource from '$lib/sidebar/panels/GraphSource.svelte';
|
||||
import Keymap from '$lib/sidebar/panels/Keymap.svelte';
|
||||
@@ -29,8 +28,6 @@
|
||||
|
||||
let performanceStore = createPerformanceStore();
|
||||
|
||||
const { data } = $props();
|
||||
|
||||
const registryCache = new IndexDBCache('node-registry');
|
||||
const nodeRegistry = new RemoteNodeRegistry('', registryCache);
|
||||
const workerRuntime = new WorkerRuntimeExecutor();
|
||||
@@ -86,6 +83,11 @@
|
||||
let graphSettingTypes = $state({
|
||||
randomSeed: { type: 'boolean', value: false }
|
||||
});
|
||||
$effect(() => {
|
||||
if (graphSettings && graphSettingTypes) {
|
||||
manager?.setSettings($state.snapshot(graphSettings));
|
||||
}
|
||||
});
|
||||
|
||||
async function update(
|
||||
g: Graph,
|
||||
@@ -166,7 +168,7 @@
|
||||
graph={pm.graph}
|
||||
bind:this={graphInterface}
|
||||
registry={nodeRegistry}
|
||||
backgroundType={appSettings.value.nodeInterface.backgroundType}
|
||||
showGrid={appSettings.value.nodeInterface.showNodeGrid}
|
||||
snapToGrid={appSettings.value.nodeInterface.snapToGrid}
|
||||
bind:activeNode
|
||||
bind:showHelp={appSettings.value.nodeInterface.showHelp}
|
||||
@@ -247,13 +249,6 @@
|
||||
/>
|
||||
<ActiveNodeSettings {manager} bind:node={activeNode} />
|
||||
</Panel>
|
||||
<Panel
|
||||
id="changelog"
|
||||
title="Changelog"
|
||||
icon="i-[tabler--file-text-spark] bg-green-400"
|
||||
>
|
||||
<Changelog git={data.git} changelog={data.changelog} />
|
||||
</Panel>
|
||||
</Sidebar>
|
||||
</Grid.Cell>
|
||||
</Grid.Row>
|
||||
|
||||
@@ -3,6 +3,6 @@
|
||||
const { children } = $props<{ children?: Snippet }>();
|
||||
</script>
|
||||
|
||||
<main class="w-screen h-screen overflow-x-hidden">
|
||||
<main class="w-screen overflow-x-hidden">
|
||||
{@render children()}
|
||||
</main>
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
import Sidebar from '$lib/sidebar/Sidebar.svelte';
|
||||
import { type NodeId, type NodeInstance } from '@nodarium/types';
|
||||
import { concatEncodedArrays, createWasmWrapper, encodeNestedArray } from '@nodarium/utils';
|
||||
import Code from './Code.svelte';
|
||||
|
||||
const registryCache = new IndexDBCache('node-registry');
|
||||
const nodeRegistry = new RemoteNodeRegistry('', registryCache);
|
||||
@@ -44,9 +45,8 @@
|
||||
}
|
||||
}
|
||||
|
||||
let graphSettings = $state<Record<string, any>>({});
|
||||
let graphSettingTypes = $state({
|
||||
randomSeed: { type: "boolean", value: false },
|
||||
$effect(() => {
|
||||
fetchNodeData(activeNode.value);
|
||||
});
|
||||
|
||||
$effect(() => {
|
||||
@@ -62,107 +62,31 @@
|
||||
});
|
||||
</script>
|
||||
|
||||
<svelte:window
|
||||
bind:innerHeight={windowHeight}
|
||||
onkeydown={(ev) => ev.key === "r" && handleResult()}
|
||||
/>
|
||||
<div class="node-wrapper absolute bottom-8 left-8">
|
||||
{#if nodeInstance}
|
||||
<NodeHTML inView position="relative" z={5} bind:node={nodeInstance} />
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<Grid.Row>
|
||||
<Grid.Cell>
|
||||
{#if visibleRows?.length}
|
||||
<table
|
||||
class="min-w-full select-none overflow-auto text-left text-sm flex-1"
|
||||
onscroll={(e) => {
|
||||
const scrollTop = e.currentTarget.scrollTop;
|
||||
start.value = Math.floor(scrollTop / rowHeight);
|
||||
}}
|
||||
>
|
||||
<thead class="">
|
||||
<tr>
|
||||
<th class="px-4 py-2 border-b border-[var(--outline)]">i</th>
|
||||
<th
|
||||
class="px-4 py-2 border-b border-[var(--outline)] w-[50px]"
|
||||
style:width="50px">Ptrs</th
|
||||
>
|
||||
<th class="px-4 py-2 border-b border-[var(--outline)]">Value</th>
|
||||
<th class="px-4 py-2 border-b border-[var(--outline)]">Float</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody
|
||||
onscroll={(e) => {
|
||||
const scrollTop = e.currentTarget.scrollTop;
|
||||
start.value = Math.floor(scrollTop / rowHeight);
|
||||
}}
|
||||
>
|
||||
{#each visibleRows as r, i}
|
||||
{@const index = i + start.value}
|
||||
{@const ptr = ptrs[i]}
|
||||
<tr class="h-[40px] odd:bg-[var(--layer-1)]">
|
||||
<td class="px-4 border-b border-[var(--outline)] w-8">{index}</td>
|
||||
<td
|
||||
class="border-b border-[var(--outline)] overflow-hidden text-ellipsis pl-2
|
||||
{ptr?._title?.includes('->')
|
||||
? 'bg-red-500'
|
||||
: 'bg-blue-500'}"
|
||||
style="width: 100px; min-width: 100px; max-width: 100px;"
|
||||
>
|
||||
{ptr?._title}
|
||||
</td>
|
||||
<td
|
||||
class="px-4 border-b border-[var(--outline)] cursor-pointer text-blue-600 hover:text-blue-800"
|
||||
onclick={() =>
|
||||
(rowIsFloat.value[index] = !rowIsFloat.value[index])}
|
||||
>
|
||||
{decodeValue(r, rowIsFloat.value[index])}
|
||||
</td>
|
||||
<td class="px-4 border-b border-[var(--outline)] italic w-5">
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={rowIsFloat.value[index]}
|
||||
onclick={() =>
|
||||
(rowIsFloat.value[index] = !rowIsFloat.value[index])}
|
||||
/>
|
||||
</td>
|
||||
</tr>
|
||||
{/each}
|
||||
</tbody>
|
||||
</table>
|
||||
<button
|
||||
onclick={() => copyVisibleMemory(visibleRows, ptrs, start.value)}
|
||||
class="flex items-center cursor-pointer absolute bottom-4 left-4 z-100 bg-gray-200 px-2 py-1 rounded hover:bg-gray-300"
|
||||
>
|
||||
Copy Visible Memory
|
||||
</button>
|
||||
<input
|
||||
class="absolute bottom-4 right-4 bg-white"
|
||||
bind:value={start.value}
|
||||
min="0"
|
||||
type="number"
|
||||
step="1"
|
||||
/>
|
||||
{/if}
|
||||
<pre>
|
||||
<code>
|
||||
{JSON.stringify(nodeInstance?.props)}
|
||||
</code>
|
||||
</pre>
|
||||
</Grid.Cell>
|
||||
|
||||
<Grid.Cell>
|
||||
<div class="h-screen w-[80vw] overflow-y-auto"></div>
|
||||
<div class="h-screen w-[80vw] overflow-y-auto">
|
||||
{#if nodeWasm}
|
||||
<Code wasm={nodeWasm} />
|
||||
{/if}
|
||||
</div>
|
||||
</Grid.Cell>
|
||||
</Grid.Row>
|
||||
|
||||
<Sidebar>
|
||||
<Panel id="general" title="General" icon="i-[tabler--settings]">
|
||||
<h3 class="p-4 pb-0">Debug Settings</h3>
|
||||
<NestedSettings
|
||||
id="Debug"
|
||||
bind:value={devSettings.value}
|
||||
type={DevSettingsType}
|
||||
/>
|
||||
<hr />
|
||||
<NestedSettings
|
||||
id="general"
|
||||
bind:value={appSettings.value}
|
||||
type={AppSettingTypes}
|
||||
/>
|
||||
</Panel>
|
||||
<Panel
|
||||
id="node-store"
|
||||
classes="text-green-400"
|
||||
|
||||
26
app/src/routes/dev/Code.svelte
Normal file
26
app/src/routes/dev/Code.svelte
Normal file
@@ -0,0 +1,26 @@
|
||||
<script lang="ts">
|
||||
import wabtInit from 'wabt';
|
||||
|
||||
const { wasm } = $props<{ wasm: ArrayBuffer }>();
|
||||
|
||||
async function toWat(arrayBuffer: ArrayBuffer) {
|
||||
const wabt = await wabtInit();
|
||||
|
||||
const module = wabt.readWasm(new Uint8Array(arrayBuffer), {
|
||||
readDebugNames: true
|
||||
});
|
||||
|
||||
module.generateNames();
|
||||
module.applyNames();
|
||||
|
||||
return module.toText({ foldExprs: false, inlineExport: false });
|
||||
}
|
||||
</script>
|
||||
|
||||
{#await toWat(wasm)}
|
||||
<p>Converting to WAT</p>
|
||||
{:then c}
|
||||
<pre>
|
||||
<code class="text-gray-50">{c}</code>
|
||||
</pre>
|
||||
{/await}
|
||||
@@ -1,74 +0,0 @@
|
||||
{
|
||||
"settings": {
|
||||
"resolution.circle": 26,
|
||||
"resolution.curve": 39
|
||||
},
|
||||
"nodes": [
|
||||
{
|
||||
"id": 9,
|
||||
"position": [
|
||||
225,
|
||||
65
|
||||
],
|
||||
"type": "max/plantarium/output",
|
||||
"props": {
|
||||
"out": 0
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": 10,
|
||||
"position": [
|
||||
200,
|
||||
60
|
||||
],
|
||||
"type": "max/plantarium/math",
|
||||
"props": {
|
||||
"op_type": 3,
|
||||
"a": 2,
|
||||
"b": 0.38
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": 11,
|
||||
"position": [
|
||||
175,
|
||||
60
|
||||
],
|
||||
"type": "max/plantarium/float",
|
||||
"props": {
|
||||
"value": 0.66
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": 12,
|
||||
"position": [
|
||||
175,
|
||||
80
|
||||
],
|
||||
"type": "max/plantarium/float",
|
||||
"props": {
|
||||
"value": 1
|
||||
}
|
||||
}
|
||||
],
|
||||
"edges": [
|
||||
[
|
||||
11,
|
||||
0,
|
||||
10,
|
||||
"a"
|
||||
],
|
||||
[
|
||||
12,
|
||||
0,
|
||||
10,
|
||||
"b"
|
||||
],
|
||||
[
|
||||
10,
|
||||
0,
|
||||
9,
|
||||
"out"
|
||||
]
|
||||
]
|
||||
}
|
||||
@@ -1,48 +0,0 @@
|
||||
import type { Pointer } from '$lib/runtime';
|
||||
|
||||
export function copyVisibleMemory(rows: Int32Array, currentPtrs: Pointer[], start: number) {
|
||||
if (!rows?.length) return;
|
||||
|
||||
// Build an array of rows for the table
|
||||
const tableRows = [...rows].map((value, i) => {
|
||||
const index = start + i;
|
||||
const ptr = currentPtrs[i];
|
||||
return {
|
||||
index,
|
||||
ptr: ptr?._title ?? '',
|
||||
value: value
|
||||
};
|
||||
});
|
||||
|
||||
// Compute column widths
|
||||
const indexWidth = Math.max(
|
||||
5,
|
||||
...tableRows.map((r) => r.index.toString().length)
|
||||
);
|
||||
const ptrWidth = Math.max(
|
||||
10,
|
||||
...tableRows.map((r) => r.ptr.length)
|
||||
);
|
||||
const valueWidth = Math.max(
|
||||
10,
|
||||
...tableRows.map((r) => r.value.toString().length)
|
||||
);
|
||||
|
||||
// Build header
|
||||
let output =
|
||||
`| ${'Index'.padEnd(indexWidth)} | ${'Ptr'.padEnd(ptrWidth)} | ${'Value'.padEnd(valueWidth)
|
||||
} |\n`
|
||||
+ `|-${'-'.repeat(indexWidth)}-|-${'-'.repeat(ptrWidth)}-|-${'-'.repeat(valueWidth)}-|\n`;
|
||||
|
||||
// Add rows
|
||||
for (const row of tableRows) {
|
||||
output += `| ${row.index.toString().padEnd(indexWidth)} | ${row.ptr.padEnd(ptrWidth)} | ${row.value.toString().padEnd(valueWidth)
|
||||
} |\n`;
|
||||
}
|
||||
|
||||
// Copy to clipboard
|
||||
navigator.clipboard
|
||||
.writeText(output)
|
||||
.then(() => console.log('Memory + metadata copied as table'))
|
||||
.catch((err) => console.error('Failed to copy memory:', err));
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
import { localState } from '$lib/helpers/localState.svelte';
|
||||
import { settingsToStore } from '$lib/settings/app-settings.svelte';
|
||||
|
||||
export const DevSettingsType = {
|
||||
debugNode: {
|
||||
type: 'boolean',
|
||||
label: 'Debug Nodes',
|
||||
value: true
|
||||
}
|
||||
} as const;
|
||||
|
||||
export let devSettings = localState(
|
||||
'dev-settings',
|
||||
settingsToStore(DevSettingsType)
|
||||
);
|
||||
2
app/static/.gitignore
vendored
2
app/static/.gitignore
vendored
@@ -1,3 +1 @@
|
||||
nodes/
|
||||
CHANGELOG.md
|
||||
git.json
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
import { sveltekit } from '@sveltejs/kit/vite';
|
||||
import tailwindcss from '@tailwindcss/vite';
|
||||
import { playwright } from '@vitest/browser-playwright';
|
||||
import { defineConfig } from 'vite';
|
||||
import comlink from 'vite-plugin-comlink';
|
||||
import glsl from 'vite-plugin-glsl';
|
||||
import wasm from 'vite-plugin-wasm';
|
||||
import { defineConfig } from 'vitest/config';
|
||||
|
||||
export default defineConfig({
|
||||
plugins: [
|
||||
@@ -21,36 +20,5 @@ export default defineConfig({
|
||||
},
|
||||
ssr: {
|
||||
noExternal: ['three']
|
||||
},
|
||||
build: {
|
||||
chunkSizeWarningLimit: 2000
|
||||
},
|
||||
test: {
|
||||
expect: { requireAssertions: true },
|
||||
projects: [
|
||||
{
|
||||
extends: './vite.config.ts',
|
||||
test: {
|
||||
name: 'client',
|
||||
browser: {
|
||||
enabled: true,
|
||||
provider: playwright(),
|
||||
instances: [{ browser: 'firefox', headless: true }]
|
||||
},
|
||||
include: ['src/**/*.svelte.{test,spec}.{js,ts}'],
|
||||
exclude: ['src/lib/server/**']
|
||||
}
|
||||
},
|
||||
|
||||
{
|
||||
extends: './vite.config.ts',
|
||||
test: {
|
||||
name: 'server',
|
||||
environment: 'node',
|
||||
include: ['src/**/*.{test,spec}.{js,ts}'],
|
||||
exclude: ['src/**/*.svelte.{test,spec}.{js,ts}']
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
});
|
||||
|
||||
@@ -4,19 +4,20 @@ This guide will help you developing your first Nodarium Node written in Rust. As
|
||||
|
||||
## Prerequesites
|
||||
|
||||
You need to have [Rust](https://www.rust-lang.org/tools/install) installed. Rust is the language we are going to develop our node in and cargo compiles our rust code into webassembly.
|
||||
You need to have [Rust](https://www.rust-lang.org/tools/install) and [wasm-pack](https://rustwasm.github.io/docs/wasm-pack/) installed. Rust is the language we are going to develop our node in and wasm-pack helps us compile our rust code into a webassembly file.
|
||||
|
||||
```bash
|
||||
# install rust
|
||||
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh
|
||||
# install wasm-pack
|
||||
cargo install wasm-pack
|
||||
```
|
||||
|
||||
## Clone Template
|
||||
|
||||
```bash
|
||||
# copy the template directory
|
||||
cp -r nodes/max/plantarium/.template nodes/max/plantarium/my-new-node
|
||||
cd nodes/max/plantarium/my-new-node
|
||||
wasm-pack new my-new-node --template https://github.com/jim-fx/nodarium_template
|
||||
cd my-new-node
|
||||
```
|
||||
|
||||
## Setup Definition
|
||||
|
||||
@@ -1,18 +1,20 @@
|
||||
use nodarium_macros::nodarium_definition_file;
|
||||
use nodarium_macros::nodarium_execute;
|
||||
use nodarium_utils::{
|
||||
encode_float, evaluate_float, geometry::calculate_normals, wrap_arg,
|
||||
read_i32_slice
|
||||
encode_float, evaluate_float, geometry::calculate_normals,log,
|
||||
split_args, wrap_arg,
|
||||
};
|
||||
|
||||
nodarium_definition_file!("src/input.json");
|
||||
|
||||
#[nodarium_execute]
|
||||
pub fn execute(size: (i32, i32)) -> Vec<i32> {
|
||||
pub fn execute(input: &[i32]) -> Vec<i32> {
|
||||
|
||||
let args = read_i32_slice(size);
|
||||
let args = split_args(input);
|
||||
|
||||
let size = evaluate_float(&args);
|
||||
log!("WASM(cube): input: {:?} -> {:?}", input, args);
|
||||
|
||||
let size = evaluate_float(args[0]);
|
||||
|
||||
let p = encode_float(size);
|
||||
let n = encode_float(-size);
|
||||
@@ -75,6 +77,8 @@ pub fn execute(size: (i32, i32)) -> Vec<i32> {
|
||||
|
||||
let res = wrap_arg(&cube_geometry);
|
||||
|
||||
log!("WASM(box): output: {:?}", res);
|
||||
|
||||
res
|
||||
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
use nodarium_macros::nodarium_definition_file;
|
||||
use nodarium_macros::nodarium_execute;
|
||||
use nodarium_utils::read_i32_slice;
|
||||
use nodarium_utils::{
|
||||
concat_arg_vecs, evaluate_float, evaluate_int,
|
||||
geometry::{
|
||||
@@ -14,25 +13,15 @@ use std::f32::consts::PI;
|
||||
nodarium_definition_file!("src/input.json");
|
||||
|
||||
#[nodarium_execute]
|
||||
pub fn execute(
|
||||
path: (i32, i32),
|
||||
length: (i32, i32),
|
||||
thickness: (i32, i32),
|
||||
offset_single: (i32, i32),
|
||||
lowest_branch: (i32, i32),
|
||||
highest_branch: (i32, i32),
|
||||
depth: (i32, i32),
|
||||
amount: (i32, i32),
|
||||
resolution_curve: (i32, i32),
|
||||
rotation: (i32, i32),
|
||||
) -> Vec<i32> {
|
||||
let arg = read_i32_slice(path);
|
||||
let paths = split_args(arg.as_slice());
|
||||
pub fn execute(input: &[i32]) -> Vec<i32> {
|
||||
let args = split_args(input);
|
||||
|
||||
let paths = split_args(args[0]);
|
||||
|
||||
let mut output: Vec<Vec<i32>> = Vec::new();
|
||||
|
||||
let resolution = evaluate_int(read_i32_slice(resolution_curve).as_slice()).max(4) as usize;
|
||||
let depth = evaluate_int(read_i32_slice(depth).as_slice());
|
||||
let resolution = evaluate_int(args[8]).max(4) as usize;
|
||||
let depth = evaluate_int(args[6]);
|
||||
|
||||
let mut max_depth = 0;
|
||||
for path_data in paths.iter() {
|
||||
@@ -51,18 +40,18 @@ pub fn execute(
|
||||
|
||||
let path = wrap_path(path_data);
|
||||
|
||||
let branch_amount = evaluate_int(read_i32_slice(amount).as_slice()).max(1);
|
||||
let branch_amount = evaluate_int(args[7]).max(1);
|
||||
|
||||
let lowest_branch = evaluate_float(read_i32_slice(lowest_branch).as_slice());
|
||||
let highest_branch = evaluate_float(read_i32_slice(highest_branch).as_slice());
|
||||
let lowest_branch = evaluate_float(args[4]);
|
||||
let highest_branch = evaluate_float(args[5]);
|
||||
|
||||
for i in 0..branch_amount {
|
||||
let a = i as f32 / (branch_amount - 1).max(1) as f32;
|
||||
|
||||
let length = evaluate_float(read_i32_slice(length).as_slice());
|
||||
let thickness = evaluate_float(read_i32_slice(thickness).as_slice());
|
||||
let length = evaluate_float(args[1]);
|
||||
let thickness = evaluate_float(args[2]);
|
||||
let offset_single = if i % 2 == 0 {
|
||||
evaluate_float(read_i32_slice(offset_single).as_slice())
|
||||
evaluate_float(args[3])
|
||||
} else {
|
||||
0.0
|
||||
};
|
||||
@@ -76,8 +65,7 @@ pub fn execute(
|
||||
root_alpha + (offset_single - 0.5) * 6.0 / resolution as f32,
|
||||
);
|
||||
|
||||
let rotation_angle =
|
||||
(evaluate_float(read_i32_slice(rotation).as_slice()) * PI / 180.0) * i as f32;
|
||||
let rotation_angle = (evaluate_float(args[9]) * PI / 180.0) * i as f32;
|
||||
|
||||
// check if diration contains NaN
|
||||
if orthogonal[0].is_nan() || orthogonal[1].is_nan() || orthogonal[2].is_nan() {
|
||||
|
||||
6
nodes/max/plantarium/debug/.gitignore
vendored
6
nodes/max/plantarium/debug/.gitignore
vendored
@@ -1,6 +0,0 @@
|
||||
/target
|
||||
**/*.rs.bk
|
||||
Cargo.lock
|
||||
bin/
|
||||
pkg/
|
||||
wasm-pack.log
|
||||
@@ -1,12 +0,0 @@
|
||||
[package]
|
||||
name = "debug"
|
||||
version = "0.1.0"
|
||||
authors = ["Max Richter <jim-x@web.de>"]
|
||||
edition = "2018"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[dependencies]
|
||||
nodarium_macros = { version = "0.1.0", path = "../../../../packages/macros" }
|
||||
nodarium_utils = { version = "0.1.0", path = "../../../../packages/utils" }
|
||||
@@ -1,22 +0,0 @@
|
||||
{
|
||||
"id": "max/plantarium/debug",
|
||||
"outputs": [],
|
||||
"inputs": {
|
||||
"input": {
|
||||
"type": "float",
|
||||
"accepts": [
|
||||
"*"
|
||||
],
|
||||
"external": true
|
||||
},
|
||||
"type": {
|
||||
"type": "select",
|
||||
"options": [
|
||||
"float",
|
||||
"vec3",
|
||||
"geometry"
|
||||
],
|
||||
"internal": true
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,25 +0,0 @@
|
||||
use nodarium_macros::nodarium_definition_file;
|
||||
use nodarium_macros::nodarium_execute;
|
||||
use nodarium_utils::encode_float;
|
||||
use nodarium_utils::evaluate_float;
|
||||
use nodarium_utils::evaluate_vec3;
|
||||
use nodarium_utils::read_i32;
|
||||
use nodarium_utils::read_i32_slice;
|
||||
|
||||
nodarium_definition_file!("src/input.json");
|
||||
|
||||
#[nodarium_execute]
|
||||
pub fn execute(input: (i32, i32), input_type: (i32, i32)) -> Vec<i32> {
|
||||
let inp = read_i32_slice(input);
|
||||
let t = read_i32(input_type.0);
|
||||
if t == 0 {
|
||||
let f = evaluate_float(inp.as_slice());
|
||||
return vec![encode_float(f)];
|
||||
}
|
||||
if t == 1 {
|
||||
let f = evaluate_vec3(inp.as_slice());
|
||||
return vec![encode_float(f[0]), encode_float(f[1]), encode_float(f[2])];
|
||||
}
|
||||
|
||||
return inp;
|
||||
}
|
||||
@@ -2,14 +2,11 @@
|
||||
name = "float"
|
||||
version = "0.1.0"
|
||||
authors = ["Max Richter <jim-x@web.de>"]
|
||||
edition = "2021"
|
||||
edition = "2018"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[profile.dev]
|
||||
panic = "unwind"
|
||||
|
||||
[dependencies]
|
||||
nodarium_macros = { version = "0.1.0", path = "../../../../packages/macros" }
|
||||
nodarium_utils = { version = "0.1.0", path = "../../../../packages/utils" }
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
use nodarium_macros::nodarium_definition_file;
|
||||
use nodarium_macros::nodarium_execute;
|
||||
use nodarium_utils::read_i32;
|
||||
|
||||
nodarium_definition_file!("src/input.json");
|
||||
|
||||
#[nodarium_execute]
|
||||
pub fn execute(a: (i32, i32)) -> Vec<i32> {
|
||||
vec![read_i32(a.0)]
|
||||
pub fn execute(args: &[i32]) -> Vec<i32> {
|
||||
args.into()
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
use glam::Vec3;
|
||||
use nodarium_macros::nodarium_definition_file;
|
||||
use nodarium_macros::nodarium_execute;
|
||||
use nodarium_utils::read_i32_slice;
|
||||
use nodarium_utils::{
|
||||
concat_args, evaluate_float, evaluate_int,
|
||||
geometry::{wrap_path, wrap_path_mut},
|
||||
@@ -15,17 +14,13 @@ fn lerp_vec3(a: Vec3, b: Vec3, t: f32) -> Vec3 {
|
||||
}
|
||||
|
||||
#[nodarium_execute]
|
||||
pub fn execute(
|
||||
plant: (i32, i32),
|
||||
strength: (i32, i32),
|
||||
curviness: (i32, i32),
|
||||
depth: (i32, i32),
|
||||
) -> Vec<i32> {
|
||||
pub fn execute(input: &[i32]) -> Vec<i32> {
|
||||
reset_call_count();
|
||||
|
||||
let arg = read_i32_slice(plant);
|
||||
let plants = split_args(arg.as_slice());
|
||||
let depth = evaluate_int(read_i32_slice(depth).as_slice());
|
||||
let args = split_args(input);
|
||||
|
||||
let plants = split_args(args[0]);
|
||||
let depth = evaluate_int(args[3]);
|
||||
|
||||
let mut max_depth = 0;
|
||||
for path_data in plants.iter() {
|
||||
@@ -60,9 +55,9 @@ pub fn execute(
|
||||
|
||||
let length = direction.length();
|
||||
|
||||
let str = evaluate_float(read_i32_slice(strength).as_slice());
|
||||
let curviness = evaluate_float(read_i32_slice(curviness).as_slice());
|
||||
let strength = str / curviness.max(0.0001) * str;
|
||||
let curviness = evaluate_float(args[2]);
|
||||
let strength =
|
||||
evaluate_float(args[1]) / curviness.max(0.0001) * evaluate_float(args[1]);
|
||||
|
||||
log!(
|
||||
"length: {}, curviness: {}, strength: {}",
|
||||
|
||||
@@ -28,13 +28,6 @@
|
||||
"value": 1,
|
||||
"hidden": true
|
||||
},
|
||||
"rotation": {
|
||||
"type": "float",
|
||||
"min": 0,
|
||||
"max": 1,
|
||||
"value": 0.5,
|
||||
"hidden": true
|
||||
},
|
||||
"depth": {
|
||||
"type": "integer",
|
||||
"min": 1,
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
use glam::{Mat4, Quat, Vec3};
|
||||
use nodarium_macros::{nodarium_execute, nodarium_definition_file};
|
||||
use nodarium_macros::nodarium_execute;
|
||||
use nodarium_macros::nodarium_definition_file;
|
||||
use nodarium_utils::{
|
||||
concat_args, evaluate_float, evaluate_int,
|
||||
geometry::{create_instance_data, wrap_geometry_data, wrap_instance_data, wrap_path},
|
||||
split_args,
|
||||
geometry::{
|
||||
create_instance_data, wrap_geometry_data, wrap_instance_data, wrap_path,
|
||||
},
|
||||
log, split_args,
|
||||
};
|
||||
|
||||
nodarium_definition_file!("src/input.json");
|
||||
@@ -12,13 +15,13 @@ nodarium_definition_file!("src/input.json");
|
||||
pub fn execute(input: &[i32]) -> Vec<i32> {
|
||||
let args = split_args(input);
|
||||
let mut inputs = split_args(args[0]);
|
||||
log!("WASM(instance): inputs: {:?}", inputs);
|
||||
|
||||
let mut geo_data = read_i32_slice(geometry);
|
||||
let mut geo_data = args[1].to_vec();
|
||||
let geo = wrap_geometry_data(&mut geo_data);
|
||||
|
||||
let mut transforms: Vec<Mat4> = Vec::new();
|
||||
|
||||
// Find max depth
|
||||
let mut max_depth = 0;
|
||||
for path_data in inputs.iter() {
|
||||
if path_data[2] != 0 {
|
||||
@@ -27,8 +30,7 @@ pub fn execute(input: &[i32]) -> Vec<i32> {
|
||||
max_depth = max_depth.max(path_data[3]);
|
||||
}
|
||||
|
||||
let rotation = evaluate_float(args[5]);
|
||||
let depth = evaluate_int(args[6]);
|
||||
let depth = evaluate_int(args[5]);
|
||||
|
||||
for path_data in inputs.iter() {
|
||||
if path_data[3] < (max_depth - depth + 1) {
|
||||
@@ -36,34 +38,24 @@ pub fn execute(input: &[i32]) -> Vec<i32> {
|
||||
}
|
||||
|
||||
let amount = evaluate_int(args[2]);
|
||||
|
||||
let lowest_instance = evaluate_float(args[3]);
|
||||
let highest_instance = evaluate_float(args[4]);
|
||||
|
||||
let path = wrap_path(path_data);
|
||||
|
||||
for i in 0..amount {
|
||||
let alpha = lowest_instance
|
||||
+ (i as f32 / (amount - 1) as f32) * (highest_instance - lowest_instance);
|
||||
let alpha =
|
||||
lowest_instance + (i as f32 / amount as f32) * (highest_instance - lowest_instance);
|
||||
|
||||
let point = path.get_point_at(alpha);
|
||||
let tangent = path.get_direction_at(alpha);
|
||||
let size = point[3] + 0.01;
|
||||
|
||||
let axis_rotation = Quat::from_axis_angle(
|
||||
Vec3::from_slice(&tangent).normalize(),
|
||||
i as f32 * rotation,
|
||||
);
|
||||
|
||||
let path_rotation = Quat::from_rotation_arc(Vec3::Y, Vec3::from_slice(&tangent).normalize());
|
||||
|
||||
let rotation = path_rotation * axis_rotation;
|
||||
let direction = path.get_direction_at(alpha);
|
||||
|
||||
let transform = Mat4::from_scale_rotation_translation(
|
||||
Vec3::new(size, size, size),
|
||||
rotation,
|
||||
Vec3::new(point[3], point[3], point[3]),
|
||||
Quat::from_xyzw(direction[0], direction[1], direction[2], 1.0).normalize(),
|
||||
Vec3::from_slice(&point),
|
||||
);
|
||||
|
||||
transforms.push(transform);
|
||||
}
|
||||
}
|
||||
@@ -75,11 +67,11 @@ pub fn execute(input: &[i32]) -> Vec<i32> {
|
||||
);
|
||||
let mut instances = wrap_instance_data(&mut instance_data);
|
||||
instances.set_geometry(geo);
|
||||
(0..transforms.len()).for_each(|i| {
|
||||
instances.set_transformation_matrix(i, &transforms[i].to_cols_array());
|
||||
});
|
||||
|
||||
for (i, transform) in transforms.iter().enumerate() {
|
||||
instances.set_transformation_matrix(i, &transform.to_cols_array());
|
||||
}
|
||||
|
||||
log!("WASM(instance): geo: {:?}", instance_data);
|
||||
inputs.push(&instance_data);
|
||||
|
||||
concat_args(inputs)
|
||||
|
||||
6
nodes/max/plantarium/leaf/.gitignore
vendored
6
nodes/max/plantarium/leaf/.gitignore
vendored
@@ -1,6 +0,0 @@
|
||||
/target
|
||||
**/*.rs.bk
|
||||
Cargo.lock
|
||||
bin/
|
||||
pkg/
|
||||
wasm-pack.log
|
||||
@@ -1,12 +0,0 @@
|
||||
[package]
|
||||
name = "leaf"
|
||||
version = "0.1.0"
|
||||
authors = ["Max Richter <jim-x@web.de>"]
|
||||
edition = "2018"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[dependencies]
|
||||
nodarium_macros = { version = "0.1.0", path = "../../../../packages/macros" }
|
||||
nodarium_utils = { version = "0.1.0", path = "../../../../packages/utils" }
|
||||
@@ -1,24 +0,0 @@
|
||||
{
|
||||
"id": "max/plantarium/leaf",
|
||||
"outputs": [
|
||||
"geometry"
|
||||
],
|
||||
"inputs": {
|
||||
"shape": {
|
||||
"type": "shape",
|
||||
"external": true
|
||||
},
|
||||
"size": {
|
||||
"type": "float",
|
||||
"value": 1
|
||||
},
|
||||
"xResolution": {
|
||||
"type": "integer",
|
||||
"description": "The amount of stems to produce",
|
||||
"min": 1,
|
||||
"max": 64,
|
||||
"value": 1,
|
||||
"hidden": true
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,166 +0,0 @@
|
||||
use std::convert::TryInto;
|
||||
use std::f32::consts::PI;
|
||||
|
||||
use nodarium_macros::nodarium_definition_file;
|
||||
use nodarium_macros::nodarium_execute;
|
||||
use nodarium_utils::encode_float;
|
||||
use nodarium_utils::evaluate_float;
|
||||
use nodarium_utils::evaluate_int;
|
||||
use nodarium_utils::log;
|
||||
use nodarium_utils::wrap_arg;
|
||||
use nodarium_utils::{split_args, decode_float};
|
||||
|
||||
nodarium_definition_file!("src/input.json");
|
||||
|
||||
fn calculate_y(x: f32) -> f32 {
|
||||
let term1 = (x * PI * 2.0).sin().abs();
|
||||
let term2 = (x * 2.0 * PI + (PI / 2.0)).sin() / 2.0;
|
||||
term1 + term2
|
||||
}
|
||||
|
||||
// Helper vector math functions
|
||||
fn vec_sub(a: &[f32; 3], b: &[f32; 3]) -> [f32; 3] {
|
||||
[a[0] - b[0], a[1] - b[1], a[2] - b[2]]
|
||||
}
|
||||
|
||||
fn vec_cross(a: &[f32; 3], b: &[f32; 3]) -> [f32; 3] {
|
||||
[
|
||||
a[1] * b[2] - a[2] * b[1],
|
||||
a[2] * b[0] - a[0] * b[2],
|
||||
a[0] * b[1] - a[1] * b[0],
|
||||
]
|
||||
}
|
||||
|
||||
fn vec_normalize(v: &[f32; 3]) -> [f32; 3] {
|
||||
let len = (v[0] * v[0] + v[1] * v[1] + v[2] * v[2]).sqrt();
|
||||
if len == 0.0 { [0.0, 0.0, 0.0] } else { [v[0]/len, v[1]/len, v[2]/len] }
|
||||
}
|
||||
|
||||
#[nodarium_execute]
|
||||
pub fn execute(input: &[i32]) -> Vec<i32> {
|
||||
let args = split_args(input);
|
||||
let input_path = split_args(args[0])[0];
|
||||
let size = evaluate_float(args[1]);
|
||||
let width_resolution = evaluate_int(args[2]).max(3) as usize;
|
||||
let path_length = (input_path.len() - 4) / 2;
|
||||
|
||||
let slice_count = path_length;
|
||||
let face_amount = (slice_count - 1) * (width_resolution - 1) * 2;
|
||||
let position_amount = slice_count * width_resolution;
|
||||
|
||||
let out_length =
|
||||
3 // metadata
|
||||
+ face_amount * 3 // indices
|
||||
+ position_amount * 3 // positions
|
||||
+ position_amount * 3; // normals
|
||||
|
||||
let mut out = vec![0 as i32; out_length];
|
||||
|
||||
log!("face_amount={:?} position_amount={:?}", face_amount, position_amount);
|
||||
|
||||
out[0] = 1;
|
||||
out[1] = position_amount.try_into().unwrap();
|
||||
out[2] = face_amount.try_into().unwrap();
|
||||
let mut offset = 3;
|
||||
|
||||
// Writing Indices
|
||||
let mut idx = 0;
|
||||
for i in 0..(slice_count - 1) {
|
||||
let base0 = (i * width_resolution) as i32;
|
||||
let base1 = ((i + 1) * width_resolution) as i32;
|
||||
|
||||
for j in 0..(width_resolution - 1) {
|
||||
let a = base0 + j as i32;
|
||||
let b = base0 + j as i32 + 1;
|
||||
let c = base1 + j as i32;
|
||||
let d = base1 + j as i32 + 1;
|
||||
|
||||
// triangle 1
|
||||
out[offset + idx + 0] = a;
|
||||
out[offset + idx + 1] = b;
|
||||
out[offset + idx + 2] = c;
|
||||
|
||||
// triangle 2
|
||||
out[offset + idx + 3] = b;
|
||||
out[offset + idx + 4] = d;
|
||||
out[offset + idx + 5] = c;
|
||||
|
||||
idx += 6;
|
||||
}
|
||||
}
|
||||
|
||||
offset += face_amount * 3;
|
||||
|
||||
// Writing Positions
|
||||
let width = 50.0;
|
||||
let mut positions = vec![[0.0f32; 3]; position_amount];
|
||||
for i in 0..slice_count {
|
||||
let ax = i as f32 / (slice_count -1) as f32;
|
||||
|
||||
let px = decode_float(input_path[2 + i * 2 + 0]);
|
||||
let pz = decode_float(input_path[2 + i * 2 + 1]);
|
||||
|
||||
|
||||
for j in 0..width_resolution {
|
||||
let alpha = j as f32 / (width_resolution - 1) as f32;
|
||||
let x = 2.0 * (-px * (alpha - 0.5) + alpha * width);
|
||||
let py = calculate_y(alpha-0.5)*5.0*(ax*PI).sin();
|
||||
let pz_val = pz - 100.0;
|
||||
|
||||
let pos_idx = i * width_resolution + j;
|
||||
positions[pos_idx] = [x - width, py, pz_val];
|
||||
|
||||
let flat_idx = offset + pos_idx * 3;
|
||||
out[flat_idx + 0] = encode_float((x - width) * size);
|
||||
out[flat_idx + 1] = encode_float(py * size);
|
||||
out[flat_idx + 2] = encode_float(pz_val * size);
|
||||
}
|
||||
}
|
||||
|
||||
// Writing Normals
|
||||
offset += position_amount * 3;
|
||||
let mut normals = vec![[0.0f32; 3]; position_amount];
|
||||
|
||||
for i in 0..(slice_count - 1) {
|
||||
for j in 0..(width_resolution - 1) {
|
||||
let a = i * width_resolution + j;
|
||||
let b = i * width_resolution + j + 1;
|
||||
let c = (i + 1) * width_resolution + j;
|
||||
let d = (i + 1) * width_resolution + j + 1;
|
||||
|
||||
// triangle 1: a,b,c
|
||||
let u = vec_sub(&positions[b], &positions[a]);
|
||||
let v = vec_sub(&positions[c], &positions[a]);
|
||||
let n1 = vec_cross(&u, &v);
|
||||
|
||||
// triangle 2: b,d,c
|
||||
let u2 = vec_sub(&positions[d], &positions[b]);
|
||||
let v2 = vec_sub(&positions[c], &positions[b]);
|
||||
let n2 = vec_cross(&u2, &v2);
|
||||
|
||||
for &idx in &[a, b, c] {
|
||||
normals[idx][0] += n1[0];
|
||||
normals[idx][1] += n1[1];
|
||||
normals[idx][2] += n1[2];
|
||||
}
|
||||
|
||||
for &idx in &[b, d, c] {
|
||||
normals[idx][0] += n2[0];
|
||||
normals[idx][1] += n2[1];
|
||||
normals[idx][2] += n2[2];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// normalize and write to output
|
||||
for i in 0..position_amount {
|
||||
let n = vec_normalize(&normals[i]);
|
||||
let flat_idx = offset + i * 3;
|
||||
out[flat_idx + 0] = encode_float(n[0]);
|
||||
out[flat_idx + 1] = encode_float(n[1]);
|
||||
out[flat_idx + 2] = encode_float(n[2]);
|
||||
}
|
||||
|
||||
wrap_arg(&out)
|
||||
}
|
||||
|
||||
@@ -1,15 +1,13 @@
|
||||
use nodarium_macros::nodarium_definition_file;
|
||||
use nodarium_macros::nodarium_execute;
|
||||
use nodarium_utils::log;
|
||||
use nodarium_utils::{concat_arg_vecs, read_i32_slice};
|
||||
|
||||
nodarium_definition_file!("src/input.json");
|
||||
use nodarium_utils::{
|
||||
concat_args, split_args
|
||||
};
|
||||
|
||||
#[nodarium_execute]
|
||||
pub fn execute(op_type: (i32, i32), a: (i32, i32), b: (i32, i32)) -> Vec<i32> {
|
||||
log!("math.op {:?}", op_type);
|
||||
let op = read_i32_slice(op_type);
|
||||
let a_val = read_i32_slice(a);
|
||||
let b_val = read_i32_slice(b);
|
||||
concat_arg_vecs(vec![vec![0], op, a_val, b_val])
|
||||
pub fn execute(args: &[i32]) -> Vec<i32> {
|
||||
let args = split_args(args);
|
||||
concat_args(vec![&[0], args[0], args[1], args[2]])
|
||||
}
|
||||
|
||||
nodarium_definition_file!("src/input.json");
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
name = "noise"
|
||||
version = "0.1.0"
|
||||
authors = ["Max Richter <jim-x@web.de>"]
|
||||
edition = "2021"
|
||||
edition = "2018"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
use nodarium_macros::nodarium_definition_file;
|
||||
use nodarium_macros::nodarium_execute;
|
||||
use nodarium_utils::read_i32_slice;
|
||||
use nodarium_utils::{
|
||||
concat_args, evaluate_float, evaluate_int, evaluate_vec3, geometry::wrap_path_mut, read_i32,
|
||||
concat_args, evaluate_float, evaluate_int, evaluate_vec3, geometry::wrap_path_mut,
|
||||
reset_call_count, split_args,
|
||||
};
|
||||
use noise::{HybridMulti, MultiFractal, NoiseFn, OpenSimplex};
|
||||
@@ -14,31 +13,23 @@ fn lerp(a: f32, b: f32, t: f32) -> f32 {
|
||||
}
|
||||
|
||||
#[nodarium_execute]
|
||||
pub fn execute(
|
||||
plant: (i32, i32),
|
||||
scale: (i32, i32),
|
||||
strength: (i32, i32),
|
||||
fix_bottom: (i32, i32),
|
||||
seed: (i32, i32),
|
||||
directional_strength: (i32, i32),
|
||||
depth: (i32, i32),
|
||||
octaves: (i32, i32),
|
||||
) -> Vec<i32> {
|
||||
pub fn execute(input: &[i32]) -> Vec<i32> {
|
||||
reset_call_count();
|
||||
|
||||
let arg = read_i32_slice(plant);
|
||||
let plants = split_args(arg.as_slice());
|
||||
let scale = (evaluate_float(read_i32_slice(scale).as_slice()) * 0.1) as f64;
|
||||
let strength = evaluate_float(read_i32_slice(strength).as_slice());
|
||||
let fix_bottom = evaluate_float(read_i32_slice(fix_bottom).as_slice());
|
||||
let args = split_args(input);
|
||||
|
||||
let seed = read_i32(seed.0);
|
||||
let plants = split_args(args[0]);
|
||||
let scale = (evaluate_float(args[1]) * 0.1) as f64;
|
||||
let strength = evaluate_float(args[2]);
|
||||
let fix_bottom = evaluate_float(args[3]);
|
||||
|
||||
let directional_strength = evaluate_vec3(read_i32_slice(directional_strength).as_slice());
|
||||
let seed = args[4][0];
|
||||
|
||||
let depth = evaluate_int(read_i32_slice(depth).as_slice());
|
||||
let directional_strength = evaluate_vec3(args[5]);
|
||||
|
||||
let octaves = evaluate_int(read_i32_slice(octaves).as_slice());
|
||||
let depth = evaluate_int(args[6]);
|
||||
|
||||
let octaves = evaluate_int(args[7]);
|
||||
|
||||
let noise_x: HybridMulti<OpenSimplex> =
|
||||
HybridMulti::new(seed as u32 + 1).set_octaves(octaves as usize);
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
"input": {
|
||||
"type": "path",
|
||||
"accepts": [
|
||||
"*"
|
||||
"geometry"
|
||||
],
|
||||
"external": true
|
||||
},
|
||||
@@ -1,11 +1,44 @@
|
||||
use nodarium_macros::nodarium_definition_file;
|
||||
use nodarium_macros::nodarium_execute;
|
||||
use nodarium_utils::read_i32_slice;
|
||||
use nodarium_utils::{
|
||||
concat_args, evaluate_int,
|
||||
geometry::{extrude_path, wrap_path},
|
||||
log, split_args,
|
||||
};
|
||||
|
||||
nodarium_definition_file!("src/input.json");
|
||||
nodarium_definition_file!("src/inputs.json");
|
||||
|
||||
#[nodarium_execute]
|
||||
pub fn execute(input: (i32, i32), _res: (i32, i32)) -> Vec<i32> {
|
||||
let inp = read_i32_slice(input);
|
||||
return inp;
|
||||
pub fn execute(input: &[i32]) -> Vec<i32> {
|
||||
log!("WASM(output): input: {:?}", input);
|
||||
|
||||
let args = split_args(input);
|
||||
|
||||
log!("WASM(output) args: {:?}", args);
|
||||
|
||||
assert_eq!(args.len(), 2, "Expected 2 arguments, got {}", args.len());
|
||||
let inputs = split_args(args[0]);
|
||||
|
||||
let resolution = evaluate_int(args[1]) as usize;
|
||||
|
||||
log!("inputs: {}, resolution: {}", inputs.len(), resolution);
|
||||
|
||||
let mut output: Vec<Vec<i32>> = Vec::new();
|
||||
for arg in inputs {
|
||||
let arg_type = arg[2];
|
||||
log!("arg_type: {}, \n {:?}", arg_type, arg,);
|
||||
|
||||
if arg_type == 0 {
|
||||
// if this is path we need to extrude it
|
||||
output.push(arg.to_vec());
|
||||
let path_data = wrap_path(arg);
|
||||
let geometry = extrude_path(path_data, resolution);
|
||||
output.push(geometry);
|
||||
continue;
|
||||
}
|
||||
|
||||
output.push(arg.to_vec());
|
||||
}
|
||||
|
||||
concat_args(output.iter().map(|v| v.as_slice()).collect())
|
||||
}
|
||||
|
||||
@@ -1,17 +1,11 @@
|
||||
use nodarium_macros::nodarium_definition_file;
|
||||
use nodarium_macros::nodarium_execute;
|
||||
use nodarium_utils::concat_arg_vecs;
|
||||
use nodarium_utils::read_i32_slice;
|
||||
use nodarium_utils::{concat_args, split_args};
|
||||
|
||||
nodarium_definition_file!("src/input.json");
|
||||
nodarium_definition_file!("src/definition.json");
|
||||
|
||||
#[nodarium_execute]
|
||||
pub fn execute(min: (i32, i32), max: (i32, i32), seed: (i32, i32)) -> Vec<i32> {
|
||||
nodarium_utils::log!("random execute start");
|
||||
concat_arg_vecs(vec![
|
||||
vec![1],
|
||||
read_i32_slice(min),
|
||||
read_i32_slice(max),
|
||||
read_i32_slice(seed),
|
||||
])
|
||||
pub fn execute(args: &[i32]) -> Vec<i32> {
|
||||
let args = split_args(args);
|
||||
concat_args(vec![&[1], args[0], args[1], args[2]])
|
||||
}
|
||||
|
||||
@@ -1,26 +1,23 @@
|
||||
use glam::{Mat4, Vec3};
|
||||
use nodarium_macros::nodarium_definition_file;
|
||||
use nodarium_macros::nodarium_execute;
|
||||
use nodarium_utils::read_i32_slice;
|
||||
use nodarium_utils::{
|
||||
concat_args, evaluate_float, evaluate_int, geometry::wrap_path_mut, log, split_args,
|
||||
concat_args, evaluate_float, evaluate_int, geometry::wrap_path_mut, log,
|
||||
split_args,
|
||||
};
|
||||
|
||||
nodarium_definition_file!("src/input.json");
|
||||
|
||||
#[nodarium_execute]
|
||||
pub fn execute(
|
||||
plant: (i32, i32),
|
||||
axis: (i32, i32),
|
||||
angle: (i32, i32),
|
||||
spread: (i32, i32),
|
||||
) -> Vec<i32> {
|
||||
log!("DEBUG args: {:?}", plant);
|
||||
pub fn execute(input: &[i32]) -> Vec<i32> {
|
||||
|
||||
let arg = read_i32_slice(plant);
|
||||
let plants = split_args(arg.as_slice());
|
||||
let axis = evaluate_int(read_i32_slice(axis).as_slice()); // 0 =x, 1 = y, 2 = z
|
||||
let spread = evaluate_int(read_i32_slice(spread).as_slice());
|
||||
log!("DEBUG args: {:?}", input);
|
||||
|
||||
let args = split_args(input);
|
||||
|
||||
let plants = split_args(args[0]);
|
||||
let axis = evaluate_int(args[1]); // 0 =x, 1 = y, 2 = z
|
||||
let spread = evaluate_int(args[3]);
|
||||
|
||||
let output: Vec<Vec<i32>> = plants
|
||||
.iter()
|
||||
@@ -35,7 +32,7 @@ pub fn execute(
|
||||
|
||||
let path = wrap_path_mut(&mut path_data);
|
||||
|
||||
let angle = evaluate_float(read_i32_slice(angle).as_slice());
|
||||
let angle = evaluate_float(args[2]);
|
||||
|
||||
let origin = [path.points[0], path.points[1], path.points[2]];
|
||||
|
||||
|
||||
6
nodes/max/plantarium/shape/.gitignore
vendored
6
nodes/max/plantarium/shape/.gitignore
vendored
@@ -1,6 +0,0 @@
|
||||
/target
|
||||
**/*.rs.bk
|
||||
Cargo.lock
|
||||
bin/
|
||||
pkg/
|
||||
wasm-pack.log
|
||||
@@ -1,12 +0,0 @@
|
||||
[package]
|
||||
name = "shape"
|
||||
version = "0.1.0"
|
||||
authors = ["Max Richter <jim-x@web.de>"]
|
||||
edition = "2018"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[dependencies]
|
||||
nodarium_macros = { version = "0.1.0", path = "../../../../packages/macros" }
|
||||
nodarium_utils = { version = "0.1.0", path = "../../../../packages/utils" }
|
||||
@@ -1,27 +0,0 @@
|
||||
{
|
||||
"id": "max/plantarium/shape",
|
||||
"outputs": [
|
||||
"shape"
|
||||
],
|
||||
"inputs": {
|
||||
"shape": {
|
||||
"type": "shape",
|
||||
"internal": true,
|
||||
"value": [
|
||||
47.8,
|
||||
100,
|
||||
47.8,
|
||||
82.8,
|
||||
30.9,
|
||||
69.1,
|
||||
23.2,
|
||||
40.7,
|
||||
27.1,
|
||||
14.5,
|
||||
42.5,
|
||||
0
|
||||
],
|
||||
"label": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
use nodarium_macros::nodarium_definition_file;
|
||||
use nodarium_macros::nodarium_execute;
|
||||
use nodarium_utils::{concat_args, split_args};
|
||||
|
||||
nodarium_definition_file!("src/input.json");
|
||||
|
||||
#[nodarium_execute]
|
||||
pub fn execute(input: &[i32]) -> Vec<i32> {
|
||||
concat_args(split_args(input))
|
||||
}
|
||||
@@ -3,29 +3,30 @@ use nodarium_macros::nodarium_execute;
|
||||
use nodarium_utils::{
|
||||
evaluate_float, evaluate_int, evaluate_vec3,
|
||||
geometry::{create_multiple_paths, wrap_multiple_paths},
|
||||
log, reset_call_count,
|
||||
read_i32_slice, read_i32,
|
||||
log, reset_call_count, split_args,
|
||||
};
|
||||
|
||||
nodarium_definition_file!("src/input.json");
|
||||
|
||||
#[nodarium_execute]
|
||||
pub fn execute(origin: (i32, i32), _amount: (i32,i32), length: (i32, i32), thickness: (i32, i32), resolution_curve: (i32, i32)) -> Vec<i32> {
|
||||
pub fn execute(input: &[i32]) -> Vec<i32> {
|
||||
reset_call_count();
|
||||
|
||||
let amount = evaluate_int(read_i32_slice(_amount).as_slice()) as usize;
|
||||
let path_resolution = read_i32(resolution_curve.0) as usize;
|
||||
let args = split_args(input);
|
||||
|
||||
log!("stem args: amount={:?}", amount);
|
||||
let amount = evaluate_int(args[1]) as usize;
|
||||
let path_resolution = evaluate_int(args[4]) as usize;
|
||||
|
||||
log!("stem args: {:?}", args);
|
||||
|
||||
let mut stem_data = create_multiple_paths(amount, path_resolution, 1);
|
||||
|
||||
let mut stems = wrap_multiple_paths(&mut stem_data);
|
||||
|
||||
for stem in stems.iter_mut() {
|
||||
let origin = evaluate_vec3(read_i32_slice(origin).as_slice());
|
||||
let length = evaluate_float(read_i32_slice(length).as_slice());
|
||||
let thickness = evaluate_float(read_i32_slice(thickness).as_slice());
|
||||
let origin = evaluate_vec3(args[0]);
|
||||
let length = evaluate_float(args[2]);
|
||||
let thickness = evaluate_float(args[3]);
|
||||
let amount_points = stem.points.len() / 4;
|
||||
|
||||
for i in 0..amount_points {
|
||||
|
||||
@@ -1,48 +1,45 @@
|
||||
use nodarium_macros::nodarium_definition_file;
|
||||
use nodarium_macros::nodarium_execute;
|
||||
use nodarium_utils::read_i32_slice;
|
||||
use nodarium_utils::{decode_float, encode_float, evaluate_int, log, wrap_arg};
|
||||
use nodarium_utils::{
|
||||
decode_float, encode_float, evaluate_int, split_args, wrap_arg, log
|
||||
};
|
||||
|
||||
nodarium_definition_file!("src/input.json");
|
||||
|
||||
#[nodarium_execute]
|
||||
pub fn execute(size: (i32, i32)) -> Vec<i32> {
|
||||
let size = evaluate_int(read_i32_slice(size).as_slice());
|
||||
pub fn execute(input: &[i32]) -> Vec<i32> {
|
||||
|
||||
let args = split_args(input);
|
||||
|
||||
let size = evaluate_int(args[0]);
|
||||
let decoded = decode_float(size);
|
||||
let negative_size = encode_float(-decoded);
|
||||
|
||||
log!("WASM(triangle): input: {:?} -> {}", size, decoded);
|
||||
log!("WASM(triangle): input: {:?} -> {}", args[0],decoded);
|
||||
|
||||
// [[1,3, x, y, z, x, y,z,x,y,z]];
|
||||
wrap_arg(&[
|
||||
1, // 1: geometry
|
||||
3, // 3 vertices
|
||||
1, // 1 face
|
||||
1, // 1: geometry
|
||||
3, // 3 vertices
|
||||
1, // 1 face
|
||||
// this are the indeces for the face
|
||||
0,
|
||||
2,
|
||||
1,
|
||||
0, 2, 1,
|
||||
//
|
||||
negative_size, // x -> point 1
|
||||
0, // y
|
||||
0, // z
|
||||
negative_size, // x -> point 1
|
||||
0, // y
|
||||
0, // z
|
||||
//
|
||||
size, // x -> point 2
|
||||
0, // y
|
||||
0, // z
|
||||
size, // x -> point 2
|
||||
0, // y
|
||||
0, // z
|
||||
//
|
||||
0, // x -> point 3
|
||||
0, // y
|
||||
size, // z
|
||||
0, // x -> point 3
|
||||
0, // y
|
||||
size, // z
|
||||
// this is the normal for the single face 1065353216 == 1.0f encoded is i32
|
||||
0,
|
||||
1065353216,
|
||||
0,
|
||||
0,
|
||||
1065353216,
|
||||
0,
|
||||
0,
|
||||
1065353216,
|
||||
0,
|
||||
0, 1065353216, 0,
|
||||
0, 1065353216, 0,
|
||||
0, 1065353216, 0,
|
||||
])
|
||||
|
||||
}
|
||||
|
||||
@@ -1,17 +1,13 @@
|
||||
use nodarium_macros::nodarium_definition_file;
|
||||
use nodarium_macros::nodarium_execute;
|
||||
use nodarium_utils::concat_args;
|
||||
use nodarium_utils::log;
|
||||
use nodarium_utils::read_i32_slice;
|
||||
use nodarium_utils::{concat_args, log, split_args};
|
||||
|
||||
nodarium_definition_file!("src/input.json");
|
||||
|
||||
#[nodarium_execute]
|
||||
pub fn execute(x: (i32, i32), y: (i32, i32), z: (i32, i32)) -> Vec<i32> {
|
||||
log!("vec3 x: {:?}", x);
|
||||
concat_args(vec![
|
||||
read_i32_slice(x).as_slice(),
|
||||
read_i32_slice(y).as_slice(),
|
||||
read_i32_slice(z).as_slice(),
|
||||
])
|
||||
pub fn execute(input: &[i32]) -> Vec<i32> {
|
||||
let args = split_args(input);
|
||||
log!("vec3 input: {:?}", input);
|
||||
log!("vec3 args: {:?}", args);
|
||||
concat_args(args)
|
||||
}
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
{
|
||||
"version": "0.0.3",
|
||||
"scripts": {
|
||||
"postinstall": "pnpm run -r --filter 'ui' build",
|
||||
"build": "pnpm build:nodes && pnpm build:app",
|
||||
"lint": "pnpm run -r --parallel lint",
|
||||
"format": "pnpm dprint fmt",
|
||||
"format:check": "pnpm dprint check",
|
||||
"test": "pnpm run -r --parallel test",
|
||||
"check": "pnpm run -r --parallel check",
|
||||
"build": "pnpm build:nodes && pnpm build:app",
|
||||
"check": "pnpm run -r check",
|
||||
"build:story": "pnpm -r --filter 'ui' story:build",
|
||||
"build:app": "BASE_PATH=/ui pnpm -r --filter 'ui' build && pnpm -r --filter 'app' build",
|
||||
"build:nodes": "cargo build --workspace --target wasm32-unknown-unknown --release && rm -rf ./app/static/nodes/max/plantarium/ && mkdir -p ./app/static/nodes/max/plantarium/ && cp -R ./target/wasm32-unknown-unknown/release/*.wasm ./app/static/nodes/max/plantarium/",
|
||||
"build:deploy": "pnpm build && cp -R packages/ui/build app/build/ui",
|
||||
"dev:nodes": "chokidar './nodes/**' --initial -i '/pkg/' -c 'pnpm build:nodes'",
|
||||
"dev:app_ui": "pnpm -r --parallel --filter 'app' --filter './packages/ui' dev",
|
||||
"dev_ui": "pnpm -r --filter 'ui' dev:ui",
|
||||
|
||||
@@ -6,202 +6,96 @@ use std::env;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use syn::parse_macro_input;
|
||||
use syn::spanned::Spanned;
|
||||
|
||||
fn add_line_numbers(input: String) -> String {
|
||||
input
|
||||
return input
|
||||
.split('\n')
|
||||
.enumerate()
|
||||
.map(|(i, line)| format!("{:2}: {}", i + 1, line))
|
||||
.collect::<Vec<String>>()
|
||||
.join("\n")
|
||||
}
|
||||
|
||||
fn read_node_definition(file_path: &Path) -> NodeDefinition {
|
||||
let project_dir = env::var("CARGO_MANIFEST_DIR").unwrap();
|
||||
let full_path = Path::new(&project_dir).join(file_path);
|
||||
let json_content = fs::read_to_string(&full_path).unwrap_or_else(|err| {
|
||||
panic!(
|
||||
"Failed to read JSON file at '{}/{}': {}",
|
||||
project_dir,
|
||||
file_path.to_string_lossy(),
|
||||
err
|
||||
)
|
||||
});
|
||||
serde_json::from_str(&json_content).unwrap_or_else(|err| {
|
||||
panic!(
|
||||
"JSON file contains invalid JSON: \n{} \n{}",
|
||||
err,
|
||||
add_line_numbers(json_content.clone())
|
||||
)
|
||||
})
|
||||
.join("\n");
|
||||
}
|
||||
|
||||
#[proc_macro_attribute]
|
||||
pub fn nodarium_execute(_attr: TokenStream, item: TokenStream) -> TokenStream {
|
||||
let input_fn = parse_macro_input!(item as syn::ItemFn);
|
||||
let fn_name = &input_fn.sig.ident;
|
||||
let fn_vis = &input_fn.vis;
|
||||
let _fn_name = &input_fn.sig.ident;
|
||||
let _fn_vis = &input_fn.vis;
|
||||
let fn_body = &input_fn.block;
|
||||
let inner_fn_name = syn::Ident::new(&format!("__nodarium_inner_{}", fn_name), fn_name.span());
|
||||
|
||||
let def: NodeDefinition = read_node_definition(Path::new("src/input.json"));
|
||||
|
||||
let input_count = def.inputs.as_ref().map(|i| i.len()).unwrap_or(0);
|
||||
|
||||
validate_signature(&input_fn.sig, input_count, &def);
|
||||
|
||||
let input_param_names: Vec<_> = input_fn
|
||||
.sig
|
||||
.inputs
|
||||
.iter()
|
||||
.filter_map(|arg| {
|
||||
if let syn::FnArg::Typed(pat_type) = arg {
|
||||
if let syn::Pat::Ident(pat_ident) = &*pat_type.pat {
|
||||
Some(pat_ident.ident.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
let param_count = input_fn.sig.inputs.len();
|
||||
let total_c_params = param_count * 2;
|
||||
|
||||
let arg_names: Vec<_> = (0..total_c_params)
|
||||
.map(|i| syn::Ident::new(&format!("arg{i}"), input_fn.sig.span()))
|
||||
.collect();
|
||||
|
||||
let mut tuple_args = Vec::new();
|
||||
for i in 0..param_count {
|
||||
let start_name = &arg_names[i * 2];
|
||||
let end_name = &arg_names[i * 2 + 1];
|
||||
let tuple_arg = quote! {
|
||||
(#start_name, #end_name)
|
||||
};
|
||||
tuple_args.push(tuple_arg);
|
||||
}
|
||||
let first_arg_ident = if let Some(syn::FnArg::Typed(pat_type)) = input_fn.sig.inputs.first() {
|
||||
if let syn::Pat::Ident(pat_ident) = &*pat_type.pat {
|
||||
&pat_ident.ident
|
||||
} else {
|
||||
panic!("Expected a simple identifier for the first argument");
|
||||
}
|
||||
} else {
|
||||
panic!("The execute function must have at least one argument (the input slice)");
|
||||
};
|
||||
|
||||
// We create a wrapper that handles the C ABI and pointer math
|
||||
let expanded = quote! {
|
||||
|
||||
extern "C" {
|
||||
fn __nodarium_log(ptr: *const u8, len: usize);
|
||||
fn host_log_panic(ptr: *const u8, len: usize);
|
||||
fn host_log(ptr: *const u8, len: usize);
|
||||
}
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
fn init_panic_hook() {
|
||||
std::panic::set_hook(Box::new(|_info| {
|
||||
unsafe {
|
||||
__nodarium_log(b"PANIC\0".as_ptr(), 5);
|
||||
}
|
||||
}));
|
||||
fn setup_panic_hook() {
|
||||
static SET_HOOK: std::sync::Once = std::sync::Once::new();
|
||||
SET_HOOK.call_once(|| {
|
||||
std::panic::set_hook(Box::new(|info| {
|
||||
let msg = info.to_string();
|
||||
unsafe { host_log_panic(msg.as_ptr(), msg.len()); }
|
||||
}));
|
||||
});
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn __alloc(len: usize) -> *mut i32 {
|
||||
let mut buf = Vec::with_capacity(len);
|
||||
let ptr = buf.as_mut_ptr();
|
||||
std::mem::forget(buf);
|
||||
ptr
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn init_allocator() {
|
||||
nodarium_utils::allocator::ALLOCATOR.init();
|
||||
}
|
||||
|
||||
#fn_vis fn #inner_fn_name(#( #input_param_names: (i32, i32) ),*) -> Vec<i32> {
|
||||
#fn_body
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
#fn_vis extern "C" fn execute(output_pos: i32, #( #arg_names: i32 ),*) -> i32 {
|
||||
|
||||
nodarium_utils::allocator::ALLOCATOR.init();
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
init_panic_hook();
|
||||
nodarium_utils::log!("before_fn");
|
||||
let result = #inner_fn_name(
|
||||
#( #tuple_args ),*
|
||||
);
|
||||
nodarium_utils::log!("after_fn: result_len={}", result.len());
|
||||
|
||||
let len_bytes = result.len() * 4;
|
||||
pub extern "C" fn __free(ptr: *mut i32, len: usize) {
|
||||
unsafe {
|
||||
let src = result.as_ptr() as *const u8;
|
||||
let dst = output_pos as *mut u8;
|
||||
nodarium_utils::log!("writing output_pos={:?} src={:?} len_bytes={:?}", output_pos, src, len_bytes);
|
||||
dst.copy_from_nonoverlapping(src, len_bytes);
|
||||
let _ = Vec::from_raw_parts(ptr, 0, len);
|
||||
}
|
||||
}
|
||||
|
||||
len_bytes as i32
|
||||
static mut OUTPUT_BUFFER: Vec<i32> = Vec::new();
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn execute(ptr: *const i32, len: usize) -> *mut i32 {
|
||||
setup_panic_hook();
|
||||
// 1. Convert raw pointer to slice
|
||||
let input = unsafe { core::slice::from_raw_parts(ptr, len) };
|
||||
|
||||
// 2. Call the logic (which we define below)
|
||||
let result_data: Vec<i32> = internal_logic(input);
|
||||
|
||||
// 3. Use the static buffer for the result
|
||||
let result_len = result_data.len();
|
||||
unsafe {
|
||||
OUTPUT_BUFFER.clear();
|
||||
OUTPUT_BUFFER.reserve(result_len + 1);
|
||||
OUTPUT_BUFFER.push(result_len as i32);
|
||||
OUTPUT_BUFFER.extend(result_data);
|
||||
|
||||
OUTPUT_BUFFER.as_mut_ptr()
|
||||
}
|
||||
}
|
||||
|
||||
fn internal_logic(#first_arg_ident: &[i32]) -> Vec<i32> {
|
||||
#fn_body
|
||||
}
|
||||
};
|
||||
|
||||
TokenStream::from(expanded)
|
||||
}
|
||||
|
||||
fn validate_signature(fn_sig: &syn::Signature, expected_inputs: usize, def: &NodeDefinition) {
|
||||
let param_count = fn_sig.inputs.len();
|
||||
let expected_params = expected_inputs;
|
||||
|
||||
if param_count != expected_params {
|
||||
panic!(
|
||||
"Execute function has {} parameters but definition has {} inputs\n\
|
||||
Definition inputs: {:?}\n\
|
||||
Expected signature:\n\
|
||||
pub fn execute({}) -> Vec<i32>",
|
||||
param_count,
|
||||
expected_inputs,
|
||||
def.inputs
|
||||
.as_ref()
|
||||
.map(|i| i.keys().collect::<Vec<_>>())
|
||||
.unwrap_or_default(),
|
||||
(0..expected_inputs)
|
||||
.map(|i| format!("arg{i}: (i32, i32)"))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
);
|
||||
}
|
||||
|
||||
for (i, arg) in fn_sig.inputs.iter().enumerate() {
|
||||
match arg {
|
||||
syn::FnArg::Typed(pat_type) => {
|
||||
let type_str = quote! { #pat_type.ty }.to_string();
|
||||
let clean_type = type_str
|
||||
.trim()
|
||||
.trim_start_matches("_")
|
||||
.trim_end_matches(".ty")
|
||||
.trim()
|
||||
.to_string();
|
||||
if !clean_type.contains("(") && !clean_type.contains(",") {
|
||||
panic!(
|
||||
"Parameter {i} has type '{clean_type}' but should be a tuple (i32, i32) representing (start, end) positions in memory",
|
||||
);
|
||||
}
|
||||
}
|
||||
syn::FnArg::Receiver(_) => {
|
||||
panic!("Execute function cannot have 'self' parameter");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
match &fn_sig.output {
|
||||
syn::ReturnType::Type(_, ty) => {
|
||||
let is_vec = match &**ty {
|
||||
syn::Type::Path(tp) => tp
|
||||
.path
|
||||
.segments
|
||||
.first()
|
||||
.map(|seg| seg.ident == "Vec")
|
||||
.unwrap_or(false),
|
||||
_ => false,
|
||||
};
|
||||
if !is_vec {
|
||||
panic!("Execute function must return Vec<i32>");
|
||||
}
|
||||
}
|
||||
syn::ReturnType::Default => {
|
||||
panic!("Execute function must return Vec<i32>");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[proc_macro]
|
||||
pub fn nodarium_definition_file(input: TokenStream) -> TokenStream {
|
||||
let path_lit = syn::parse_macro_input!(input as syn::LitStr);
|
||||
@@ -211,23 +105,30 @@ pub fn nodarium_definition_file(input: TokenStream) -> TokenStream {
|
||||
let full_path = Path::new(&project_dir).join(&file_path);
|
||||
|
||||
let json_content = fs::read_to_string(&full_path).unwrap_or_else(|err| {
|
||||
panic!("Failed to read JSON file at '{project_dir}/{file_path}': {err}",)
|
||||
panic!("Failed to read JSON file at '{}/{}': {}", project_dir, file_path, err)
|
||||
});
|
||||
|
||||
let _: NodeDefinition = serde_json::from_str(&json_content).unwrap_or_else(|err| {
|
||||
panic!(
|
||||
"JSON file contains invalid JSON: \n{} \n{}",
|
||||
err,
|
||||
add_line_numbers(json_content.clone())
|
||||
)
|
||||
panic!("JSON file contains invalid JSON: \n{} \n{}", err, add_line_numbers(json_content.clone()))
|
||||
});
|
||||
|
||||
// We use the span from the input path literal
|
||||
let bytes = syn::LitByteStr::new(json_content.as_bytes(), path_lit.span());
|
||||
let len = json_content.len();
|
||||
|
||||
let expanded = quote! {
|
||||
#[link_section = "nodarium_definition"]
|
||||
static DEFINITION_DATA: [u8; #len] = *#bytes;
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn get_definition_ptr() -> *const u8 {
|
||||
DEFINITION_DATA.as_ptr()
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn get_definition_len() -> usize {
|
||||
DEFINITION_DATA.len()
|
||||
}
|
||||
};
|
||||
|
||||
TokenStream::from(expanded)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@nodarium/types",
|
||||
"version": "0.0.3",
|
||||
"version": "0.0.0",
|
||||
"description": "",
|
||||
"main": "src/index.ts",
|
||||
"scripts": {
|
||||
@@ -17,7 +17,7 @@
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"zod": "^4.3.6"
|
||||
"zod": "^4.3.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
"dprint": "^0.51.1"
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user