chore: setup linting
This commit is contained in:
@@ -1,21 +1,21 @@
|
||||
import { test, expect } from "vitest"
|
||||
import { encodeFloat, decodeFloat } from "./encoding"
|
||||
import { expect, test } from 'vitest';
|
||||
import { decodeFloat, encodeFloat } from './encoding';
|
||||
|
||||
test("encode_float", () => {
|
||||
test('encode_float', () => {
|
||||
const input = 1.23;
|
||||
const encoded = encodeFloat(input)
|
||||
const output = decodeFloat(encoded)
|
||||
console.log(input, output)
|
||||
const encoded = encodeFloat(input);
|
||||
const output = decodeFloat(encoded);
|
||||
console.log(input, output);
|
||||
expect(output).toBeCloseTo(input);
|
||||
});
|
||||
|
||||
test("encode 2.0", () => {
|
||||
test('encode 2.0', () => {
|
||||
const input = 2.0;
|
||||
const encoded = encodeFloat(input)
|
||||
expect(encoded).toEqual(1073741824)
|
||||
const encoded = encodeFloat(input);
|
||||
expect(encoded).toEqual(1073741824);
|
||||
});
|
||||
|
||||
test("floating point imprecision", () => {
|
||||
test('floating point imprecision', () => {
|
||||
let maxError = 0;
|
||||
new Array(10_000).fill(null).forEach((_, i) => {
|
||||
const input = i < 5_000 ? i : Math.random() * 100;
|
||||
@@ -32,7 +32,7 @@ test("floating point imprecision", () => {
|
||||
});
|
||||
|
||||
// Test with negative numbers
|
||||
test("negative numbers", () => {
|
||||
test('negative numbers', () => {
|
||||
const inputs = [-1, -0.5, -123.456, -0.0001];
|
||||
inputs.forEach(input => {
|
||||
const encoded = encodeFloat(input);
|
||||
@@ -42,31 +42,31 @@ test("negative numbers", () => {
|
||||
});
|
||||
|
||||
// Test with very small numbers
|
||||
test("very small numbers", () => {
|
||||
test('very small numbers', () => {
|
||||
const input = 1.2345e-38;
|
||||
const encoded = encodeFloat(input)
|
||||
const output = decodeFloat(encoded)
|
||||
const encoded = encodeFloat(input);
|
||||
const output = decodeFloat(encoded);
|
||||
expect(output).toBeCloseTo(input);
|
||||
});
|
||||
|
||||
// Test with zero
|
||||
test("zero", () => {
|
||||
test('zero', () => {
|
||||
const input = 0;
|
||||
const encoded = encodeFloat(input)
|
||||
const output = decodeFloat(encoded)
|
||||
const encoded = encodeFloat(input);
|
||||
const output = decodeFloat(encoded);
|
||||
expect(output).toBe(0);
|
||||
});
|
||||
|
||||
// Test with infinity
|
||||
test("infinity", () => {
|
||||
test('infinity', () => {
|
||||
const input = Infinity;
|
||||
const encoded = encodeFloat(input)
|
||||
const output = decodeFloat(encoded)
|
||||
const encoded = encodeFloat(input);
|
||||
const output = decodeFloat(encoded);
|
||||
expect(output).toBe(Infinity);
|
||||
});
|
||||
|
||||
// Test with large numbers
|
||||
test("large numbers", () => {
|
||||
test('large numbers', () => {
|
||||
const inputs = [1e+5, 1e+10];
|
||||
inputs.forEach(input => {
|
||||
const encoded = encodeFloat(input);
|
||||
|
||||
@@ -4,7 +4,7 @@ const view = new DataView(buffer);
|
||||
|
||||
export function encodeFloat(value: number): number {
|
||||
// Write the number as a float to the buffer
|
||||
view.setFloat32(0, value, true); // 'true' for little-endian
|
||||
view.setFloat32(0, value, true); // 'true' for little-endian
|
||||
|
||||
// Read the buffer as an integer
|
||||
return view.getInt32(0, true);
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { test, expect } from 'vitest';
|
||||
import { expect, test } from 'vitest';
|
||||
import { fastHashArrayBuffer, fastHashString } from './fastHash';
|
||||
|
||||
test('fastHashString doesnt produce clashes', () => {
|
||||
@@ -10,8 +10,7 @@ test('fastHashString doesnt produce clashes', () => {
|
||||
expect(hashB).toEqual(hashC);
|
||||
});
|
||||
|
||||
test("fastHashArray doesnt product collisions", () => {
|
||||
|
||||
test('fastHashArray doesnt product collisions', () => {
|
||||
const a = new Int32Array(1000);
|
||||
|
||||
const hash_a = fastHashArrayBuffer(a);
|
||||
@@ -20,11 +19,9 @@ test("fastHashArray doesnt product collisions", () => {
|
||||
const hash_b = fastHashArrayBuffer(a);
|
||||
|
||||
expect(hash_a).not.toEqual(hash_b);
|
||||
|
||||
});
|
||||
|
||||
test('fastHashArray is fast(ish) < 20ms', () => {
|
||||
|
||||
const a = new Int32Array(10_000);
|
||||
|
||||
const t0 = performance.now();
|
||||
|
||||
@@ -1,43 +1,43 @@
|
||||
export function fastHashArrayBuffer(input: string | Int32Array): string {
|
||||
const mask = (1n << 64n) - 1n
|
||||
const mask = (1n << 64n) - 1n;
|
||||
|
||||
// FNV-1a 64-bit constants
|
||||
let h = 0xcbf29ce484222325n // offset basis
|
||||
const FNV_PRIME = 0x100000001b3n
|
||||
let h = 0xcbf29ce484222325n; // offset basis
|
||||
const FNV_PRIME = 0x100000001b3n;
|
||||
|
||||
// get bytes for string or Int32Array
|
||||
let bytes: Uint8Array
|
||||
if (typeof input === "string") {
|
||||
let bytes: Uint8Array;
|
||||
if (typeof input === 'string') {
|
||||
// utf-8 encoding
|
||||
bytes = new TextEncoder().encode(input)
|
||||
bytes = new TextEncoder().encode(input);
|
||||
} else {
|
||||
// Int32Array -> bytes (little-endian)
|
||||
bytes = new Uint8Array(input.length * 4)
|
||||
bytes = new Uint8Array(input.length * 4);
|
||||
for (let i = 0; i < input.length; i++) {
|
||||
const v = input[i] >>> 0 // ensure unsigned 32-bit
|
||||
const base = i * 4
|
||||
bytes[base] = v & 0xff
|
||||
bytes[base + 1] = (v >>> 8) & 0xff
|
||||
bytes[base + 2] = (v >>> 16) & 0xff
|
||||
bytes[base + 3] = (v >>> 24) & 0xff
|
||||
const v = input[i] >>> 0; // ensure unsigned 32-bit
|
||||
const base = i * 4;
|
||||
bytes[base] = v & 0xff;
|
||||
bytes[base + 1] = (v >>> 8) & 0xff;
|
||||
bytes[base + 2] = (v >>> 16) & 0xff;
|
||||
bytes[base + 3] = (v >>> 24) & 0xff;
|
||||
}
|
||||
}
|
||||
|
||||
// FNV-1a byte-wise
|
||||
for (let i = 0; i < bytes.length; i++) {
|
||||
h = (h ^ BigInt(bytes[i])) & mask
|
||||
h = (h * FNV_PRIME) & mask
|
||||
h = (h ^ BigInt(bytes[i])) & mask;
|
||||
h = (h * FNV_PRIME) & mask;
|
||||
}
|
||||
|
||||
// MurmurHash3's fmix64 finalizer (good avalanche)
|
||||
h ^= h >> 33n
|
||||
h = (h * 0xff51afd7ed558ccdn) & mask
|
||||
h ^= h >> 33n
|
||||
h = (h * 0xc4ceb9fe1a85ec53n) & mask
|
||||
h ^= h >> 33n
|
||||
h ^= h >> 33n;
|
||||
h = (h * 0xff51afd7ed558ccdn) & mask;
|
||||
h ^= h >> 33n;
|
||||
h = (h * 0xc4ceb9fe1a85ec53n) & mask;
|
||||
h ^= h >> 33n;
|
||||
|
||||
// to 16-char hex
|
||||
return h.toString(16).padStart(16, "0").slice(-16)
|
||||
return h.toString(16).padStart(16, '0').slice(-16);
|
||||
}
|
||||
|
||||
export function fastHashString(input: string) {
|
||||
|
||||
@@ -1,25 +1,23 @@
|
||||
import { expect, test } from 'vitest'
|
||||
import { decodeNestedArray, encodeNestedArray, concatEncodedArrays } from './flatTree'
|
||||
|
||||
test("it correctly concats nested arrays", () => {
|
||||
import { expect, test } from 'vitest';
|
||||
import { concatEncodedArrays, decodeNestedArray, encodeNestedArray } from './flatTree';
|
||||
|
||||
test('it correctly concats nested arrays', () => {
|
||||
const input_a = encodeNestedArray([1, 2, 3]);
|
||||
const input_b = 2;
|
||||
const input_c = encodeNestedArray([4, 5, 6]);
|
||||
|
||||
const output = concatEncodedArrays([input_a, input_b, input_c]);
|
||||
|
||||
console.log("Output", output);
|
||||
console.log('Output', output);
|
||||
|
||||
const decoded = decodeNestedArray(output);
|
||||
|
||||
expect(decoded[0]).toEqual([1, 2, 3]);
|
||||
expect(decoded[1]).toEqual(2);
|
||||
expect(decoded[2]).toEqual([4, 5, 6]);
|
||||
|
||||
});
|
||||
|
||||
test("it correctly concats nested arrays with nested arrays", () => {
|
||||
test('it correctly concats nested arrays with nested arrays', () => {
|
||||
const input_c = encodeNestedArray([1, 2, 3]);
|
||||
const output = concatEncodedArrays([42, 12, input_c]);
|
||||
const decoded = decodeNestedArray(output);
|
||||
@@ -78,11 +76,11 @@ test('it correctly handles sequential nesting', () => {
|
||||
});
|
||||
|
||||
// Test with mixed data types (if supported)
|
||||
// Note: This test assumes your implementation supports mixed types.
|
||||
// Note: This test assumes your implementation supports mixed types.
|
||||
// If not, you can ignore or remove this test.
|
||||
test('it correctly handles arrays with mixed data types', () => {
|
||||
const input = [1, 'text', [true, [null, ['another text']]]];
|
||||
//@ts-ignore
|
||||
// @ts-ignore
|
||||
const decoded = decodeNestedArray(encodeNestedArray(input));
|
||||
expect(decoded).toEqual(input);
|
||||
});
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
type SparseArray<T = number> = (T | T[] | SparseArray<T>)[];
|
||||
|
||||
export function concatEncodedArrays(
|
||||
input: (number | number[] | Int32Array)[],
|
||||
input: (number | number[] | Int32Array)[]
|
||||
): Int32Array {
|
||||
let totalLength = 4;
|
||||
for (let i = 0; i < input.length; i++) {
|
||||
@@ -86,7 +86,7 @@ function decode_recursive(dense: number[] | Int32Array, index = 0) {
|
||||
// Opening bracket detected
|
||||
const [p, nextIndex, _nextBracketIndex] = decode_recursive(
|
||||
dense,
|
||||
index,
|
||||
index
|
||||
);
|
||||
decoded.push(...p);
|
||||
index = nextIndex + 1;
|
||||
@@ -109,12 +109,10 @@ export function decodeNestedArray(dense: number[] | Int32Array) {
|
||||
}
|
||||
|
||||
export function splitNestedArray(input: Int32Array) {
|
||||
|
||||
let index = 0;
|
||||
const length = input.length;
|
||||
let res: Int32Array[] = [];
|
||||
|
||||
|
||||
let nextBracketIndex = 0;
|
||||
let argStartIndex = 0;
|
||||
let depth = -1;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
export * from "./wasm-wrapper";
|
||||
export * from "./flatTree"
|
||||
export * from "./encoding"
|
||||
export * from "./fastHash"
|
||||
export * from "./logger"
|
||||
export * from "./performance"
|
||||
export * from './encoding';
|
||||
export * from './fastHash';
|
||||
export * from './flatTree';
|
||||
export * from './logger';
|
||||
export * from './performance';
|
||||
export * from './wasm-wrapper';
|
||||
|
||||
@@ -7,23 +7,34 @@ export const createLogger = (() => {
|
||||
let isGrouped = false;
|
||||
|
||||
function s(color: string, ...args: any) {
|
||||
return isGrouped ? [...args] : [`[%c${scope.padEnd(maxLength, " ")}]:`, `color: ${color}`, ...args];
|
||||
return isGrouped
|
||||
? [...args]
|
||||
: [`[%c${scope.padEnd(maxLength, ' ')}]:`, `color: ${color}`, ...args];
|
||||
}
|
||||
|
||||
return {
|
||||
log: (...args: any[]) => !muted && console.log(...s("#888", ...args)),
|
||||
info: (...args: any[]) => !muted && console.info(...s("#888", ...args)),
|
||||
warn: (...args: any[]) => !muted && console.warn(...s("#888", ...args)),
|
||||
error: (...args: any[]) => console.error(...s("#f88", ...args)),
|
||||
group: (...args: any[]) => { if (!muted) { console.groupCollapsed(...s("#888", ...args)); isGrouped = true; } },
|
||||
groupEnd: () => { if (!muted) { console.groupEnd(); isGrouped = false } },
|
||||
log: (...args: any[]) => !muted && console.log(...s('#888', ...args)),
|
||||
info: (...args: any[]) => !muted && console.info(...s('#888', ...args)),
|
||||
warn: (...args: any[]) => !muted && console.warn(...s('#888', ...args)),
|
||||
error: (...args: any[]) => console.error(...s('#f88', ...args)),
|
||||
group: (...args: any[]) => {
|
||||
if (!muted) {
|
||||
console.groupCollapsed(...s('#888', ...args));
|
||||
isGrouped = true;
|
||||
}
|
||||
},
|
||||
groupEnd: () => {
|
||||
if (!muted) {
|
||||
console.groupEnd();
|
||||
isGrouped = false;
|
||||
}
|
||||
},
|
||||
mute() {
|
||||
muted = true;
|
||||
},
|
||||
unmute() {
|
||||
muted = false;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
};
|
||||
};
|
||||
})();
|
||||
|
||||
@@ -11,7 +11,6 @@ export interface PerformanceStore {
|
||||
}
|
||||
|
||||
export function createPerformanceStore(): PerformanceStore {
|
||||
|
||||
let data: PerformanceData = [];
|
||||
|
||||
let currentRun: Record<string, number[]> | undefined;
|
||||
@@ -24,7 +23,7 @@ export function createPerformanceStore(): PerformanceStore {
|
||||
return () => {
|
||||
const i = listeners.indexOf(cb);
|
||||
if (i > -1) listeners.splice(i, 1);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function set(v: PerformanceData) {
|
||||
@@ -37,12 +36,12 @@ export function createPerformanceStore(): PerformanceStore {
|
||||
lastPoint = undefined;
|
||||
temp = {
|
||||
start: performance.now()
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function stopRun() {
|
||||
if (currentRun && temp) {
|
||||
currentRun["total"] = [performance.now() - temp.start];
|
||||
currentRun['total'] = [performance.now() - temp.start];
|
||||
data.push(currentRun);
|
||||
data = data.slice(-100);
|
||||
currentRun = undefined;
|
||||
@@ -69,7 +68,6 @@ export function createPerformanceStore(): PerformanceStore {
|
||||
}
|
||||
|
||||
function mergeData(newData: PerformanceData[number]) {
|
||||
|
||||
let r = currentRun;
|
||||
if (!r) return;
|
||||
|
||||
@@ -99,5 +97,5 @@ export function createPerformanceStore(): PerformanceStore {
|
||||
endPoint,
|
||||
mergeData,
|
||||
get
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@@ -13,12 +13,12 @@ export function createWasmWrapper(buffer: ArrayBuffer) {
|
||||
host_log_panic: (ptr: number, len: number) => {
|
||||
if (!exports) return;
|
||||
const view = new Uint8Array(exports.memory.buffer, ptr, len);
|
||||
console.error("RUST PANIC:", new TextDecoder().decode(view));
|
||||
console.error('RUST PANIC:', new TextDecoder().decode(view));
|
||||
},
|
||||
host_log: (ptr: number, len: number) => {
|
||||
if (!exports) return;
|
||||
const view = new Uint8Array(exports.memory.buffer, ptr, len);
|
||||
console.log("RUST:", new TextDecoder().decode(view));
|
||||
console.log('RUST:', new TextDecoder().decode(view));
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -43,7 +43,7 @@ export function createWasmWrapper(buffer: ArrayBuffer) {
|
||||
}
|
||||
|
||||
function get_definition() {
|
||||
const sections = WebAssembly.Module.customSections(module, "nodarium_definition");
|
||||
const sections = WebAssembly.Module.customSections(module, 'nodarium_definition');
|
||||
if (sections.length > 0) {
|
||||
const decoder = new TextDecoder();
|
||||
const jsonString = decoder.decode(sections[0]);
|
||||
|
||||
Reference in New Issue
Block a user