Compare commits
7 Commits
f80aa2dcfc
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
01bc9a8d15
|
|||
|
ddcd638937
|
|||
|
044e516ed3
|
|||
|
5c39d8add2
|
|||
|
f7c89046d1
|
|||
|
64b811f330
|
|||
|
77593fe3b4
|
@@ -7,6 +7,7 @@ const alicePublicKey = await alice.getPublicKey();
|
||||
const bobPublicKey = await bob.getPublicKey();
|
||||
|
||||
const keyStart = performance.now();
|
||||
|
||||
const aliceSharedSecret = await alice.getSharedSecret(bobPublicKey);
|
||||
const bobSharedSecret = await bob.getSharedSecret(alicePublicKey);
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { AESKey } from '../src/crypto/aes-key.js';
|
||||
import { StorageMemory, EncryptedStorage, type BaseStorage } from '../src/storage/index.js';
|
||||
import { BTreeCache, KvCache } from '../src/cache/index.js';
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helpers
|
||||
@@ -49,7 +50,13 @@ function fmtOps(ops: number): string {
|
||||
/**
|
||||
* Run a full suite of benchmarks against a given storage instance.
|
||||
*/
|
||||
async function benchmarkStorage(label: string, storage: BaseStorage<Doc>, docs: Doc[]) {
|
||||
async function benchmarkStorage(
|
||||
label: string,
|
||||
storage: BaseStorage<Doc>,
|
||||
docs: Doc[],
|
||||
options: { supportsRangeOps?: boolean; hasAgeIndex?: boolean } = {},
|
||||
) {
|
||||
const { supportsRangeOps = true, hasAgeIndex = false } = options;
|
||||
const count = docs.length;
|
||||
console.log(`\n${'='.repeat(60)}`);
|
||||
console.log(` ${label} (${count.toLocaleString()} documents)`);
|
||||
@@ -67,32 +74,63 @@ async function benchmarkStorage(label: string, storage: BaseStorage<Doc>, docs:
|
||||
});
|
||||
console.log(` find() ${findAllMs.toFixed(2)}ms (${fmtOps((count / findAllMs) * 1000)} docs/sec)`);
|
||||
|
||||
// --- Find by indexed field (single-key lookup, repeated) ---
|
||||
// --- Find by indexed field (equality) ---
|
||||
const lookupCount = Math.min(count, 1_000);
|
||||
const findIndexedMs = await time(async () => {
|
||||
for (let i = 0; i < lookupCount; i++) {
|
||||
await storage.findOne({ id: `id-${i}` } as Partial<Doc>);
|
||||
await storage.findOne({ id: `id-${i}` });
|
||||
}
|
||||
});
|
||||
console.log(` findOne indexed ${findIndexedMs.toFixed(2)}ms (${fmtOps((lookupCount / findIndexedMs) * 1000)} ops/sec) [${lookupCount} lookups]`);
|
||||
|
||||
// --- Find by non-indexed field (full scan, repeated) ---
|
||||
// --- Find by non-indexed field (full scan) ---
|
||||
const scanCount = Math.min(count, 1_000);
|
||||
const findScanMs = await time(async () => {
|
||||
for (let i = 0; i < scanCount; i++) {
|
||||
await storage.findOne({ email: `user-${i}@test.com` } as Partial<Doc>);
|
||||
await storage.findOne({ email: `user-${i}@test.com` });
|
||||
}
|
||||
});
|
||||
console.log(` findOne scan ${findScanMs.toFixed(2)}ms (${fmtOps((scanCount / findScanMs) * 1000)} ops/sec) [${scanCount} lookups]`);
|
||||
|
||||
// --- Range queries ---
|
||||
if (supportsRangeOps) {
|
||||
// Wide range: 20% selectivity (10 out of 50 age values).
|
||||
const rangeCount = Math.min(count, 100);
|
||||
let rangeWideTotal = 0;
|
||||
const findRangeWideMs = await time(async () => {
|
||||
for (let i = 0; i < rangeCount; i++) {
|
||||
const results = await storage.find({ age: { $gte: 30, $lt: 40 } });
|
||||
rangeWideTotal += results.length;
|
||||
}
|
||||
});
|
||||
const indexLabel = hasAgeIndex ? 'B+Tree' : 'scan';
|
||||
console.log(` find wide [${indexLabel}] ${findRangeWideMs.toFixed(2)}ms (${fmtOps((rangeCount / findRangeWideMs) * 1000)} ops/sec) [${rangeCount}x, ~${Math.round(rangeWideTotal / rangeCount)} hits, 20% sel.]`);
|
||||
|
||||
// Narrow range: 2% selectivity (1 out of 50 age values).
|
||||
let rangeNarrowTotal = 0;
|
||||
const findRangeNarrowMs = await time(async () => {
|
||||
for (let i = 0; i < rangeCount; i++) {
|
||||
const results = await storage.find({ age: { $gte: 42, $lt: 43 } });
|
||||
rangeNarrowTotal += results.length;
|
||||
}
|
||||
});
|
||||
console.log(` find narrow [${indexLabel}] ${findRangeNarrowMs.toFixed(2)}ms (${fmtOps((rangeCount / findRangeNarrowMs) * 1000)} ops/sec) [${rangeCount}x, ~${Math.round(rangeNarrowTotal / rangeCount)} hits, 2% sel.]`);
|
||||
|
||||
// --- Combined equality + operator ---
|
||||
const comboCount = Math.min(count, 1_000);
|
||||
const findComboMs = await time(async () => {
|
||||
for (let i = 0; i < comboCount; i++) {
|
||||
await storage.find({ id: `id-${i}`, age: { $gte: 20 } });
|
||||
}
|
||||
});
|
||||
console.log(` find idx+operator ${findComboMs.toFixed(2)}ms (${fmtOps((comboCount / findComboMs) * 1000)} ops/sec) [${comboCount} queries]`);
|
||||
}
|
||||
|
||||
// --- Update by indexed field ---
|
||||
const updateCount = Math.min(count, 1_000);
|
||||
const updateMs = await time(async () => {
|
||||
for (let i = 0; i < updateCount; i++) {
|
||||
await storage.updateOne(
|
||||
{ id: `id-${i}` } as Partial<Doc>,
|
||||
{ age: 99 } as Partial<Doc>,
|
||||
);
|
||||
await storage.updateOne({ id: `id-${i}` }, { name: `updated-${i}` });
|
||||
}
|
||||
});
|
||||
console.log(` updateOne indexed ${updateMs.toFixed(2)}ms (${fmtOps((updateCount / updateMs) * 1000)} ops/sec) [${updateCount} updates]`);
|
||||
@@ -101,7 +139,7 @@ async function benchmarkStorage(label: string, storage: BaseStorage<Doc>, docs:
|
||||
const deleteCount = Math.min(count, 1_000);
|
||||
const deleteMs = await time(async () => {
|
||||
for (let i = 0; i < deleteCount; i++) {
|
||||
await storage.deleteOne({ id: `id-${i}` } as Partial<Doc>);
|
||||
await storage.deleteOne({ id: `id-${i}` });
|
||||
}
|
||||
});
|
||||
console.log(` deleteOne indexed ${deleteMs.toFixed(2)}ms (${fmtOps((deleteCount / deleteMs) * 1000)} ops/sec) [${deleteCount} deletes]`);
|
||||
@@ -116,7 +154,7 @@ async function benchmarkStorage(label: string, storage: BaseStorage<Doc>, docs:
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// StorageMemory — indexed vs non-indexed
|
||||
// StorageMemory — B+ Tree range queries vs full scan
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const DOC_COUNTS = [1_000, 10_000, 50_000];
|
||||
@@ -124,32 +162,55 @@ const DOC_COUNTS = [1_000, 10_000, 50_000];
|
||||
for (const count of DOC_COUNTS) {
|
||||
const docs = generateDocs(count);
|
||||
|
||||
// Indexes on id, name, AND age with explicit B+ Tree cache.
|
||||
const indexedWithAgeBTree = StorageMemory.from<Doc>(
|
||||
['id', 'name', 'age'],
|
||||
new BTreeCache(),
|
||||
);
|
||||
await benchmarkStorage('StorageMemory (B+Tree cache, indexed: id,name,age)', indexedWithAgeBTree, docs, { hasAgeIndex: true });
|
||||
|
||||
// Same indexes, but KV cache (no range support).
|
||||
const indexedWithAgeKv = StorageMemory.from<Doc>(
|
||||
['id', 'name', 'age'],
|
||||
new KvCache(),
|
||||
);
|
||||
await benchmarkStorage('StorageMemory (KV cache, indexed: id,name,age)', indexedWithAgeKv, docs);
|
||||
|
||||
// Indexes on id, name only — range queries on age fall back to full scan.
|
||||
const indexed = StorageMemory.from<Doc>(['id', 'name']);
|
||||
await benchmarkStorage('StorageMemory (indexed: id,name)', indexed, docs);
|
||||
|
||||
// No indexes at all.
|
||||
const noIndex = StorageMemory.from<Doc>();
|
||||
await benchmarkStorage('StorageMemory (no indexes)', noIndex, docs);
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// EncryptedStorage — crypto overhead dominates, so use smaller counts
|
||||
// EncryptedStorage
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const ENCRYPTED_DOC_COUNTS = [100, 1_000, 10_000];
|
||||
const ENCRYPTED_DOC_COUNTS = [100, 1_000];
|
||||
const encryptionKey = await AESKey.fromSeed('benchmark-key');
|
||||
|
||||
for (const count of ENCRYPTED_DOC_COUNTS) {
|
||||
const docs = generateDocs(count);
|
||||
|
||||
// Encrypted + indexed backing store.
|
||||
const encBase = StorageMemory.from<Record<string, string>>(['id', 'name']);
|
||||
const encrypted = EncryptedStorage.from<Doc>(encBase, encryptionKey);
|
||||
await benchmarkStorage('EncryptedStorage (indexed backing store)', encrypted, docs);
|
||||
// Indexed + plaintextKeys (age) — range queries on age use B+ Tree via backing store.
|
||||
const encBaseA = StorageMemory.from<Record<string, any>>(['id', 'name', 'age']);
|
||||
const encA = EncryptedStorage.from<Doc>(encBaseA, encryptionKey, {
|
||||
plaintextKeys: ['age'],
|
||||
});
|
||||
await benchmarkStorage('Encrypted (indexed+age, plaintextKeys: age)', encA, docs, { hasAgeIndex: true });
|
||||
|
||||
// Encrypted + no-index backing store.
|
||||
const encBaseNoIdx = StorageMemory.from<Record<string, string>>();
|
||||
const encryptedNoIdx = EncryptedStorage.from<Doc>(encBaseNoIdx, encryptionKey);
|
||||
await benchmarkStorage('EncryptedStorage (no indexes)', encryptedNoIdx, docs);
|
||||
// Indexed, fully encrypted — no range ops.
|
||||
const encBaseB = StorageMemory.from<Record<string, any>>(['id', 'name']);
|
||||
const encB = EncryptedStorage.from<Doc>(encBaseB, encryptionKey);
|
||||
await benchmarkStorage('Encrypted (indexed, fully encrypted)', encB, docs, { supportsRangeOps: false });
|
||||
|
||||
// No indexes, fully encrypted — worst case.
|
||||
const encBaseC = StorageMemory.from<Record<string, any>>();
|
||||
const encC = EncryptedStorage.from<Doc>(encBaseC, encryptionKey);
|
||||
await benchmarkStorage('Encrypted (no indexes, fully encrypted)', encC, docs, { supportsRangeOps: false });
|
||||
}
|
||||
|
||||
console.log('\nDone.\n');
|
||||
|
||||
@@ -6,8 +6,13 @@
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"format": "prettier --write .",
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
|
||||
"test": "vitest --run",
|
||||
"test:watch": "vitest",
|
||||
|
||||
"benchmark:sha256": "tsx benchmarks/sha256.ts",
|
||||
"benchmark:diffie-helman": "tsx benchmarks/diffie-helman.ts",
|
||||
"benchmark:encryption": "tsx benchmarks/sekp256k1.ts",
|
||||
"benchmark:storage": "tsx benchmarks/storage.ts"
|
||||
},
|
||||
"keywords": [],
|
||||
|
||||
79
src/cache/b-tree-cache.ts
vendored
Normal file
79
src/cache/b-tree-cache.ts
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
import { BPlusTree, type BPlusTreeEntry } from 'src/utils/btree.js';
|
||||
|
||||
import { BaseCache, type CacheRangeOptions } from './base-cache.js';
|
||||
|
||||
function tupleCompare(a: any[], b: any[]): number {
|
||||
const len = Math.min(a.length, b.length);
|
||||
for (let i = 0; i < len; i++) {
|
||||
if (a[i] < b[i]) return -1;
|
||||
if (a[i] > b[i]) return 1;
|
||||
}
|
||||
return a.length - b.length;
|
||||
}
|
||||
|
||||
/**
|
||||
* B+ tree-backed cache implementation.
|
||||
*
|
||||
* - Equality: O(log n)
|
||||
* - Range: O(log n + k)
|
||||
*/
|
||||
export class BTreeCache extends BaseCache {
|
||||
private trees = new Map<string, BPlusTree<any, number>>();
|
||||
|
||||
registerIndex(indexName: string, fields: string[]): void {
|
||||
if (this.trees.has(indexName)) return;
|
||||
const comparator = fields.length > 1 ? tupleCompare : undefined;
|
||||
this.trees.set(indexName, new BPlusTree<any, number>(32, comparator));
|
||||
}
|
||||
|
||||
clearIndex(indexName: string): void {
|
||||
const tree = this.trees.get(indexName);
|
||||
if (!tree) return;
|
||||
tree.clear();
|
||||
}
|
||||
|
||||
insert(indexName: string, key: any, internalKey: number): void {
|
||||
const tree = this.trees.get(indexName);
|
||||
if (!tree) return;
|
||||
tree.insert(key, internalKey);
|
||||
}
|
||||
|
||||
delete(indexName: string, key: any, internalKey: number): void {
|
||||
const tree = this.trees.get(indexName);
|
||||
if (!tree) return;
|
||||
tree.delete(key, internalKey);
|
||||
}
|
||||
|
||||
get(indexName: string, key: any): Iterable<number> {
|
||||
const tree = this.trees.get(indexName);
|
||||
if (!tree) return [];
|
||||
return tree.get(key) ?? [];
|
||||
}
|
||||
|
||||
range(
|
||||
indexName: string,
|
||||
min?: any,
|
||||
max?: any,
|
||||
options: CacheRangeOptions = {},
|
||||
): Iterable<number> | null {
|
||||
const tree = this.trees.get(indexName);
|
||||
if (!tree) return [];
|
||||
const entries = tree.range(min, max, options);
|
||||
return this.flattenEntryKeys(entries);
|
||||
}
|
||||
|
||||
createChild(): BaseCache {
|
||||
return new BTreeCache();
|
||||
}
|
||||
|
||||
private flattenEntryKeys(entries: BPlusTreeEntry<any, number>[]): number[] {
|
||||
const result: number[] = [];
|
||||
for (const entry of entries) {
|
||||
for (const key of entry.values) {
|
||||
result.push(key);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
37
src/cache/base-cache.ts
vendored
Normal file
37
src/cache/base-cache.ts
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
export type CacheRangeOptions = {
|
||||
lowerInclusive?: boolean;
|
||||
upperInclusive?: boolean;
|
||||
};
|
||||
|
||||
/**
|
||||
* Shared cache abstraction used by storage adapters for secondary indexes.
|
||||
*
|
||||
* Implementations may support only equality (`get`) or both equality and
|
||||
* range queries (`range`). Returning `null` from `range` indicates the cache
|
||||
* cannot serve that range query efficiently.
|
||||
*/
|
||||
export abstract class BaseCache {
|
||||
abstract registerIndex(indexName: string, fields: string[]): void;
|
||||
|
||||
abstract clearIndex(indexName: string): void;
|
||||
|
||||
abstract insert(indexName: string, key: any, internalKey: number): void;
|
||||
|
||||
abstract delete(indexName: string, key: any, internalKey: number): void;
|
||||
|
||||
abstract get(indexName: string, key: any): Iterable<number>;
|
||||
|
||||
abstract range(
|
||||
indexName: string,
|
||||
min?: any,
|
||||
max?: any,
|
||||
options?: CacheRangeOptions,
|
||||
): Iterable<number> | null;
|
||||
|
||||
/**
|
||||
* Create a new empty cache instance of the same concrete type.
|
||||
* Used by deriveChild() to preserve cache strategy across children.
|
||||
*/
|
||||
abstract createChild(): BaseCache;
|
||||
}
|
||||
|
||||
4
src/cache/index.ts
vendored
Normal file
4
src/cache/index.ts
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
export * from './base-cache.js';
|
||||
export * from './b-tree-cache.js';
|
||||
export * from './kv-cache.js';
|
||||
|
||||
69
src/cache/kv-cache.ts
vendored
Normal file
69
src/cache/kv-cache.ts
vendored
Normal file
@@ -0,0 +1,69 @@
|
||||
import { BaseCache, type CacheRangeOptions } from './base-cache.js';
|
||||
|
||||
/**
|
||||
* Hash-map cache implementation.
|
||||
*
|
||||
* - Equality: O(1) average
|
||||
* - Range: unsupported (returns null -> caller falls back to scan)
|
||||
*/
|
||||
export class KvCache extends BaseCache {
|
||||
private indexes = new Map<string, Map<string, Set<number>>>();
|
||||
|
||||
registerIndex(indexName: string, _fields: string[]): void {
|
||||
if (!this.indexes.has(indexName)) {
|
||||
this.indexes.set(indexName, new Map());
|
||||
}
|
||||
}
|
||||
|
||||
clearIndex(indexName: string): void {
|
||||
this.indexes.get(indexName)?.clear();
|
||||
}
|
||||
|
||||
insert(indexName: string, key: any, internalKey: number): void {
|
||||
const index = this.indexes.get(indexName);
|
||||
if (!index) return;
|
||||
|
||||
const keyStr = this.encodeKey(key);
|
||||
const set = index.get(keyStr) ?? new Set<number>();
|
||||
set.add(internalKey);
|
||||
index.set(keyStr, set);
|
||||
}
|
||||
|
||||
delete(indexName: string, key: any, internalKey: number): void {
|
||||
const index = this.indexes.get(indexName);
|
||||
if (!index) return;
|
||||
|
||||
const keyStr = this.encodeKey(key);
|
||||
const set = index.get(keyStr);
|
||||
if (!set) return;
|
||||
|
||||
set.delete(internalKey);
|
||||
if (set.size === 0) {
|
||||
index.delete(keyStr);
|
||||
}
|
||||
}
|
||||
|
||||
get(indexName: string, key: any): Iterable<number> {
|
||||
const index = this.indexes.get(indexName);
|
||||
if (!index) return [];
|
||||
return index.get(this.encodeKey(key)) ?? [];
|
||||
}
|
||||
|
||||
range(
|
||||
_indexName: string,
|
||||
_min?: any,
|
||||
_max?: any,
|
||||
_options?: CacheRangeOptions,
|
||||
): Iterable<number> | null {
|
||||
return null;
|
||||
}
|
||||
|
||||
createChild(): BaseCache {
|
||||
return new KvCache();
|
||||
}
|
||||
|
||||
private encodeKey(value: any): string {
|
||||
return JSON.stringify(value);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,6 +19,24 @@
|
||||
* Only allow specific parameters, unless prefixed with 'x-'
|
||||
*/
|
||||
|
||||
/**
|
||||
* What I *think* this is:
|
||||
* Deps: List of transports
|
||||
*
|
||||
* Recieving:
|
||||
* 1. Create from a list of tranpsports and a lsit of items.
|
||||
* 2. Starts listening on all of the transports and returns a URL (maybe a promise too? Not sure whether it should be a promise or event emitter. Maybe both?)
|
||||
* 3. Once it recieves a message, decrypt it and resolve the promise with the message.
|
||||
*
|
||||
* Sending:
|
||||
* 1. Create from a list of transports and the URL.
|
||||
* 2. Encrypt the message
|
||||
* 3. Send the encrypted message to all of the transports that are in both the requestURL and the list of transports.
|
||||
*
|
||||
* Possibilities:
|
||||
* - Non-ephemeral. Keep listeners alive to allow for multiple requests
|
||||
*/
|
||||
|
||||
import { z } from 'zod/v4';
|
||||
|
||||
import { PublicKey } from 'src/crypto/index.js';
|
||||
|
||||
@@ -19,6 +19,86 @@ export type FindOptions = {
|
||||
*/
|
||||
export type IndexDefinition = string[] | string[][];
|
||||
|
||||
/**
|
||||
* MongoDB-style comparison and string operators for a single field value.
|
||||
*
|
||||
* - Comparison operators ($eq, $ne, $lt, $lte, $gt, $gte) are available for
|
||||
* all value types.
|
||||
* - String operators ($startsWith, $contains) are only available when the
|
||||
* field value type extends `string`.
|
||||
* - Field-level $not wraps another operator set, inverting its result:
|
||||
* `{ age: { $not: { $gte: 18 } } }` matches documents where age < 18.
|
||||
*/
|
||||
export type ComparisonOperators<V> = {
|
||||
$eq?: V;
|
||||
$ne?: V;
|
||||
$lt?: V;
|
||||
$lte?: V;
|
||||
$gt?: V;
|
||||
$gte?: V;
|
||||
$not?: ComparisonOperators<V>;
|
||||
} & ([V] extends [string] ? { $startsWith?: string; $contains?: string } : {});
|
||||
|
||||
/**
|
||||
* A filter value for a single field — either a plain value (equality shorthand)
|
||||
* or an object of comparison operators.
|
||||
*/
|
||||
export type FieldFilter<V> = V | ComparisonOperators<V>;
|
||||
|
||||
/**
|
||||
* Keys that represent top-level logical operators in a filter,
|
||||
* as opposed to document field names.
|
||||
*/
|
||||
const LOGICAL_KEYS = new Set(['$and', '$or', '$not']);
|
||||
|
||||
/**
|
||||
* Returns true when `key` is a top-level logical operator ($and, $or, $not)
|
||||
* rather than a document field name.
|
||||
*/
|
||||
export function isLogicalKey(key: string): boolean {
|
||||
return LOGICAL_KEYS.has(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Query filter that supports equality shorthand, comparison operators,
|
||||
* and top-level logical operators ($and, $or, $not).
|
||||
*
|
||||
* @example
|
||||
* // Equality shorthand
|
||||
* { name: 'foo' }
|
||||
*
|
||||
* // Comparison operators
|
||||
* { age: { $gte: 18, $lt: 65 } }
|
||||
*
|
||||
* // String operators (type-safe — only on string fields)
|
||||
* { name: { $startsWith: 'A' } }
|
||||
*
|
||||
* // Logical operators
|
||||
* { $or: [{ name: 'foo' }, { age: { $gte: 18 } }] }
|
||||
* { $not: { name: 'bar' } }
|
||||
* { $and: [{ age: { $gte: 18 } }, { name: { $startsWith: 'A' } }] }
|
||||
*/
|
||||
export type Filter<T extends Record<string, any>> = {
|
||||
[K in keyof T]?: FieldFilter<T[K]>;
|
||||
} & {
|
||||
$and?: Filter<T>[];
|
||||
$or?: Filter<T>[];
|
||||
$not?: Filter<T>;
|
||||
};
|
||||
|
||||
/**
|
||||
* Detect whether a filter value is an operator object (e.g. `{ $lt: 50 }`)
|
||||
* rather than a plain value. Guards against Date and Array which are objects
|
||||
* but represent document values, not operators.
|
||||
*/
|
||||
export function isOperatorObject(value: unknown): value is ComparisonOperators<any> {
|
||||
return value !== null
|
||||
&& typeof value === 'object'
|
||||
&& !Array.isArray(value)
|
||||
&& !(value instanceof Date)
|
||||
&& Object.keys(value).some((k) => k.startsWith('$'));
|
||||
}
|
||||
|
||||
export type StorageEvent<T = Record<string, any>> = {
|
||||
insert: {
|
||||
value: T;
|
||||
@@ -54,7 +134,7 @@ export abstract class BaseStorage<
|
||||
* Find a single document that matches the filter
|
||||
* @param filter MongoDB-like query filter
|
||||
*/
|
||||
async findOne(filter?: Partial<T>): Promise<T | null> {
|
||||
async findOne(filter?: Filter<T>): Promise<T | null> {
|
||||
const results = await this.find(filter);
|
||||
return results.length > 0 ? results[0] : null;
|
||||
}
|
||||
@@ -64,7 +144,7 @@ export abstract class BaseStorage<
|
||||
* @param filter MongoDB-like query filter
|
||||
* @param options Query options (limit, skip, sort)
|
||||
*/
|
||||
abstract find(filter?: Partial<T>, options?: FindOptions): Promise<T[]>;
|
||||
abstract find(filter?: Filter<T>, options?: FindOptions): Promise<T[]>;
|
||||
|
||||
/**
|
||||
* Update a document that matches the filter
|
||||
@@ -72,7 +152,7 @@ export abstract class BaseStorage<
|
||||
* @param update Document or fields to update
|
||||
* @returns True if a document was updated, false otherwise
|
||||
*/
|
||||
async updateOne(filter: Partial<T>, update?: Partial<T>): Promise<boolean> {
|
||||
async updateOne(filter: Filter<T>, update?: Partial<T>): Promise<boolean> {
|
||||
const results = await this.updateMany(filter, update, { limit: 1 });
|
||||
return results > 0;
|
||||
}
|
||||
@@ -85,7 +165,7 @@ export abstract class BaseStorage<
|
||||
* @returns Number of documents updated
|
||||
*/
|
||||
abstract updateMany(
|
||||
filter: Partial<T>,
|
||||
filter: Filter<T>,
|
||||
update: Partial<T>,
|
||||
options?: Partial<FindOptions>,
|
||||
): Promise<number>;
|
||||
@@ -95,7 +175,7 @@ export abstract class BaseStorage<
|
||||
* @param filter Query to match the document to delete
|
||||
* @returns True if a document was deleted, false otherwise
|
||||
*/
|
||||
async deleteOne(filter: Partial<T>): Promise<boolean> {
|
||||
async deleteOne(filter: Filter<T>): Promise<boolean> {
|
||||
const results = await this.deleteMany(filter, { limit: 1 });
|
||||
return results > 0;
|
||||
}
|
||||
@@ -107,8 +187,8 @@ export abstract class BaseStorage<
|
||||
* @returns Number of documents deleted
|
||||
*/
|
||||
abstract deleteMany(
|
||||
filter: Partial<T>,
|
||||
options: Partial<FindOptions>,
|
||||
filter: Filter<T>,
|
||||
options?: Partial<FindOptions>,
|
||||
): Promise<number>;
|
||||
|
||||
/**
|
||||
|
||||
@@ -3,15 +3,36 @@ import { Packr } from 'msgpackr';
|
||||
import { AESKey } from 'src/crypto/aes-key.js';
|
||||
import { Bytes } from 'src/crypto/bytes.js';
|
||||
|
||||
import { BaseStorage, type FindOptions } from './base-storage.js';
|
||||
import {
|
||||
BaseStorage,
|
||||
type ComparisonOperators,
|
||||
type FindOptions,
|
||||
type Filter,
|
||||
isOperatorObject,
|
||||
isLogicalKey,
|
||||
} from './base-storage.js';
|
||||
|
||||
import { encodeExtendedJson, decodeExtendedJson, encodeExtendedJsonObject, decodeExtendedJsonObject } from 'src/utils/ext-json.js';
|
||||
import { encodeExtendedJsonObject, decodeExtendedJsonObject } from 'src/utils/ext-json.js';
|
||||
|
||||
export type EncryptedStorageOptions = {
|
||||
/**
|
||||
* Fields that should be stored in plaintext (not encrypted).
|
||||
* These fields retain their original types in the backing store, which
|
||||
* allows comparison operators ($lt, $gt, etc.) to work on them.
|
||||
* All other fields are encrypted.
|
||||
*/
|
||||
plaintextKeys?: string[];
|
||||
};
|
||||
|
||||
export class EncryptedStorage<
|
||||
T extends Record<string, any> = Record<string, any>,
|
||||
> extends BaseStorage<T> {
|
||||
static from<T>(storage: BaseStorage<Record<string, string>>, key: AESKey) {
|
||||
return new EncryptedStorage<T>(storage, key);
|
||||
static from<T>(
|
||||
storage: BaseStorage<Record<string, any>>,
|
||||
key: AESKey,
|
||||
options?: EncryptedStorageOptions,
|
||||
) {
|
||||
return new EncryptedStorage<T>(storage, key, options);
|
||||
}
|
||||
|
||||
private readonly msgpackr = new Packr({
|
||||
@@ -33,34 +54,32 @@ export class EncryptedStorage<
|
||||
*/
|
||||
private readonly decryptCache = new Map<string, any>();
|
||||
|
||||
/** Set of field names that are stored in plaintext (not encrypted). */
|
||||
private readonly plaintextKeys: Set<string>;
|
||||
|
||||
constructor(
|
||||
private readonly storage: BaseStorage<Record<string, string>>,
|
||||
private readonly storage: BaseStorage<Record<string, any>>,
|
||||
private readonly key: AESKey,
|
||||
options?: EncryptedStorageOptions,
|
||||
) {
|
||||
super();
|
||||
|
||||
// Forward events from the underlying storage, decrypting the data
|
||||
this.storage.on('insert', async (event) => {
|
||||
// De-crypt the value before emitting the event.
|
||||
const decryptedValue = await this.convertToDecrypted(event.value as Record<string, string>);
|
||||
this.plaintextKeys = new Set(options?.plaintextKeys ?? []);
|
||||
|
||||
// Re-emit the insert event with the original payload.
|
||||
// Forward events from the underlying storage, converting data back.
|
||||
this.storage.on('insert', async (event) => {
|
||||
const decryptedValue = await this.convertFromStorage(event.value);
|
||||
this.emit('insert', { value: decryptedValue });
|
||||
});
|
||||
|
||||
this.storage.on('update', async (event) => {
|
||||
// Decrypt both old and new values before re-emitting.
|
||||
const decryptedOldValue = await this.convertToDecrypted(event.oldValue as Record<string, string>);
|
||||
const decryptedValue = await this.convertToDecrypted(event.value as Record<string, string>);
|
||||
|
||||
const decryptedOldValue = await this.convertFromStorage(event.oldValue);
|
||||
const decryptedValue = await this.convertFromStorage(event.value);
|
||||
this.emit('update', { oldValue: decryptedOldValue, value: decryptedValue });
|
||||
});
|
||||
|
||||
this.storage.on('delete', async (event) => {
|
||||
// De-crypt the value before emitting the event.
|
||||
const decryptedValue = await this.convertToDecrypted(event.value as Record<string, string>);
|
||||
|
||||
// Re-emit the delete event with the original payload.
|
||||
const decryptedValue = await this.convertFromStorage(event.value);
|
||||
this.emit('delete', { value: decryptedValue });
|
||||
});
|
||||
|
||||
@@ -70,53 +89,83 @@ export class EncryptedStorage<
|
||||
}
|
||||
|
||||
async insertMany(documents: Array<T>): Promise<void> {
|
||||
// Encrypt all documents in parallel.
|
||||
const encrypted = await Promise.all(
|
||||
documents.map((doc) => this.convertToEncrypted(doc)),
|
||||
const converted = await Promise.all(
|
||||
documents.map((doc) => this.convertForStorage(doc)),
|
||||
);
|
||||
await this.storage.insertMany(encrypted);
|
||||
await this.storage.insertMany(converted);
|
||||
}
|
||||
|
||||
async find(filter?: Partial<T>, options?: FindOptions): Promise<T[]> {
|
||||
const encryptedFilter = filter ? await this.convertToEncrypted(filter) : undefined;
|
||||
const documents = await this.storage.find(encryptedFilter, options);
|
||||
async find(filter?: Filter<T>, options?: FindOptions): Promise<T[]> {
|
||||
const convertedFilter = filter
|
||||
? await this.convertFilterForStorage(filter)
|
||||
: undefined;
|
||||
const documents = await this.storage.find(convertedFilter, options);
|
||||
return Promise.all(
|
||||
documents.map(async (document) => this.convertToDecrypted(document)),
|
||||
documents.map((doc) => this.convertFromStorage(doc)),
|
||||
);
|
||||
}
|
||||
|
||||
async updateMany(
|
||||
filter: Partial<T>,
|
||||
filter: Filter<T>,
|
||||
update: Partial<T>,
|
||||
options: Partial<FindOptions> = {},
|
||||
): Promise<number> {
|
||||
const encryptedFilter = await this.convertToEncrypted(filter);
|
||||
const encryptedUpdate = await this.convertToEncrypted(update);
|
||||
return this.storage.updateMany(encryptedFilter, encryptedUpdate, options);
|
||||
const convertedFilter = await this.convertFilterForStorage(filter);
|
||||
const convertedUpdate = await this.convertForStorage(update);
|
||||
return this.storage.updateMany(convertedFilter, convertedUpdate, options);
|
||||
}
|
||||
|
||||
async deleteMany(
|
||||
filter: Partial<T>,
|
||||
filter: Filter<T>,
|
||||
options: Partial<FindOptions> = {},
|
||||
): Promise<number> {
|
||||
const encryptedFilter = await this.convertToEncrypted(filter);
|
||||
return this.storage.deleteMany(encryptedFilter, options);
|
||||
const convertedFilter = await this.convertFilterForStorage(filter);
|
||||
return this.storage.deleteMany(convertedFilter, options);
|
||||
}
|
||||
|
||||
deriveChild<C>(path: string): BaseStorage<C> {
|
||||
return EncryptedStorage.from(this.storage.deriveChild(path), this.key);
|
||||
return EncryptedStorage.from<C>(
|
||||
this.storage.deriveChild(path),
|
||||
this.key,
|
||||
{ plaintextKeys: [...this.plaintextKeys] },
|
||||
);
|
||||
}
|
||||
|
||||
private async convertToEncrypted(
|
||||
// ---------------------------------------------------------------------------
|
||||
// Storage conversion — documents (insert/update values)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Convert a document for storage. Encrypted fields are encrypted;
|
||||
* plaintext fields are passed through with value formatting only.
|
||||
*/
|
||||
private async convertForStorage(
|
||||
document: Partial<T>,
|
||||
): Promise<Record<string, string>> {
|
||||
const encrypted: Record<string, string> = {};
|
||||
): Promise<Record<string, any>> {
|
||||
const result: Record<string, any> = {};
|
||||
const formattedDocument = this.formatDocumentForEncryption(document);
|
||||
const entries = Object.entries(formattedDocument);
|
||||
|
||||
// Encrypt all fields in parallel, using the cache when possible.
|
||||
const results = await Promise.all(
|
||||
entries.map(async ([key, value]) => {
|
||||
// Split into plaintext and encrypted fields.
|
||||
const plaintextEntries: Array<[string, any]> = [];
|
||||
const encryptedEntries: Array<[string, any]> = [];
|
||||
|
||||
for (const entry of entries) {
|
||||
if (this.plaintextKeys.has(entry[0])) {
|
||||
plaintextEntries.push(entry);
|
||||
} else {
|
||||
encryptedEntries.push(entry);
|
||||
}
|
||||
}
|
||||
|
||||
// Plaintext fields pass through directly.
|
||||
for (const [key, value] of plaintextEntries) {
|
||||
result[key] = value;
|
||||
}
|
||||
|
||||
// Encrypt fields in parallel, with memoization.
|
||||
const encrypted = await Promise.all(
|
||||
encryptedEntries.map(async ([key, value]) => {
|
||||
const bin = this.msgpackr.pack(value);
|
||||
const cacheKey = Bytes.from(bin).toBase64();
|
||||
|
||||
@@ -131,21 +180,36 @@ export class EncryptedStorage<
|
||||
}),
|
||||
);
|
||||
|
||||
for (const [key, ciphertext] of results) {
|
||||
encrypted[key] = ciphertext;
|
||||
for (const [key, ciphertext] of encrypted) {
|
||||
result[key] = ciphertext;
|
||||
}
|
||||
|
||||
return encrypted;
|
||||
return result;
|
||||
}
|
||||
|
||||
private async convertToDecrypted(
|
||||
document: Record<string, string>,
|
||||
/**
|
||||
* Convert a stored document back to its original form. Encrypted fields
|
||||
* are decrypted; plaintext fields are passed through with value formatting.
|
||||
*/
|
||||
private async convertFromStorage(
|
||||
document: Record<string, any>,
|
||||
): Promise<T> {
|
||||
const entries = Object.entries(document);
|
||||
|
||||
// Decrypt all fields in parallel, using the cache when possible.
|
||||
const results = await Promise.all(
|
||||
entries.map(async ([key, ciphertext]) => {
|
||||
const plaintextEntries: Array<[string, any]> = [];
|
||||
const encryptedEntries: Array<[string, any]> = [];
|
||||
|
||||
for (const entry of entries) {
|
||||
if (this.plaintextKeys.has(entry[0])) {
|
||||
plaintextEntries.push(entry);
|
||||
} else {
|
||||
encryptedEntries.push(entry);
|
||||
}
|
||||
}
|
||||
|
||||
// Decrypt encrypted fields in parallel, with memoization.
|
||||
const decrypted = await Promise.all(
|
||||
encryptedEntries.map(async ([key, ciphertext]) => {
|
||||
let value = this.decryptCache.get(ciphertext);
|
||||
if (value === undefined) {
|
||||
const bin = await this.key.decrypt(Bytes.fromBase64(ciphertext));
|
||||
@@ -156,70 +220,159 @@ export class EncryptedStorage<
|
||||
}),
|
||||
);
|
||||
|
||||
const decrypted: Record<string, any> = {};
|
||||
for (const [key, value] of results) {
|
||||
decrypted[key] = value;
|
||||
const result: Record<string, any> = {};
|
||||
|
||||
for (const [key, value] of plaintextEntries) {
|
||||
result[key] = value;
|
||||
}
|
||||
for (const [key, value] of decrypted) {
|
||||
result[key] = value;
|
||||
}
|
||||
|
||||
const decodedDocument = this.formatDocumentFromDecryption(decrypted);
|
||||
const decodedDocument = this.formatDocumentFromDecryption(result);
|
||||
return decodedDocument as T;
|
||||
}
|
||||
|
||||
private formatDocumentForEncryption(document: any): any {
|
||||
// First, iterate through each key and value in the document and format the value for encryption.
|
||||
const formattedDocument: any = {};
|
||||
// ---------------------------------------------------------------------------
|
||||
// Storage conversion — filters (may contain operator objects)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Convert a query filter for storage. Handles plain equality values,
|
||||
* operator objects, and top-level logical operators ($and, $or, $not).
|
||||
*
|
||||
* - Plaintext fields: values and operator objects pass through as-is.
|
||||
* - Encrypted fields: plain values are encrypted. Operator objects throw
|
||||
* because range/string comparisons are meaningless on ciphertext.
|
||||
* - Logical operators: sub-filters are recursively converted.
|
||||
*/
|
||||
private async convertFilterForStorage(
|
||||
filter: Filter<T>,
|
||||
): Promise<Filter<Record<string, any>>> {
|
||||
const result: Record<string, any> = {};
|
||||
const entries = Object.entries(filter);
|
||||
|
||||
// Recursively convert logical operator sub-filters.
|
||||
if (filter.$and) {
|
||||
result.$and = await Promise.all(
|
||||
filter.$and.map((f) => this.convertFilterForStorage(f)),
|
||||
);
|
||||
}
|
||||
if (filter.$or) {
|
||||
result.$or = await Promise.all(
|
||||
filter.$or.map((f) => this.convertFilterForStorage(f)),
|
||||
);
|
||||
}
|
||||
if (filter.$not) {
|
||||
result.$not = await this.convertFilterForStorage(filter.$not);
|
||||
}
|
||||
|
||||
const encryptionTasks: Array<Promise<readonly [string, any]>> = [];
|
||||
|
||||
for (const [key, value] of entries) {
|
||||
// Logical operator keys are already handled above.
|
||||
if (isLogicalKey(key)) continue;
|
||||
|
||||
if (this.plaintextKeys.has(key)) {
|
||||
// Plaintext field — pass through (including operator objects).
|
||||
result[key] = isOperatorObject(value)
|
||||
? this.formatOperatorValuesForStorage(value)
|
||||
: this.formatValueForEncryption(value);
|
||||
} else if (isOperatorObject(value)) {
|
||||
// Encrypted field with an operator — not supported.
|
||||
throw new Error(
|
||||
`Operators ($lt, $gt, $startsWith, $contains, $not, etc.) cannot be used on encrypted field '${key}'. ` +
|
||||
`Add '${key}' to plaintextKeys if you need operator queries on this field.`,
|
||||
);
|
||||
} else {
|
||||
// Encrypted field with a plain equality value — encrypt it.
|
||||
const formatted = this.formatValueForEncryption(value);
|
||||
encryptionTasks.push(
|
||||
(async () => {
|
||||
const bin = this.msgpackr.pack(formatted);
|
||||
const cacheKey = Bytes.from(bin).toBase64();
|
||||
|
||||
let ciphertext = this.encryptCache.get(cacheKey);
|
||||
if (ciphertext === undefined) {
|
||||
const encryptedValue = await this.key.encrypt(bin, true);
|
||||
ciphertext = encryptedValue.toBase64();
|
||||
this.encryptCache.set(cacheKey, ciphertext);
|
||||
}
|
||||
|
||||
return [key, ciphertext] as const;
|
||||
})(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const encryptedResults = await Promise.all(encryptionTasks);
|
||||
for (const [key, ciphertext] of encryptedResults) {
|
||||
result[key] = ciphertext;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize operator object values for plaintext fields so they use the
|
||||
* same storage representation as document values (e.g. Date).
|
||||
*/
|
||||
private formatOperatorValuesForStorage(
|
||||
ops: ComparisonOperators<any>,
|
||||
): ComparisonOperators<any> {
|
||||
const result: Record<string, any> = {};
|
||||
for (const [op, value] of Object.entries(ops)) {
|
||||
if (op === '$not' && isOperatorObject(value)) {
|
||||
result[op] = this.formatOperatorValuesForStorage(value);
|
||||
continue;
|
||||
}
|
||||
result[op] = this.formatValueForEncryption(value);
|
||||
}
|
||||
return result as ComparisonOperators<any>;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Value formatting
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
private formatDocumentForEncryption(document: any): any {
|
||||
const formattedDocument: any = {};
|
||||
for (const [key, value] of Object.entries(document)) {
|
||||
formattedDocument[key] = this.formatValueForEncryption(value);
|
||||
}
|
||||
|
||||
// Then, encode the document to extended JSON.
|
||||
const encodedDocument = encodeExtendedJsonObject(formattedDocument);
|
||||
|
||||
return encodedDocument;
|
||||
return encodeExtendedJsonObject(formattedDocument);
|
||||
}
|
||||
|
||||
private formatDocumentFromDecryption(document: any): any {
|
||||
// First, decode the document from extended JSON.
|
||||
const decodedDocument = decodeExtendedJsonObject(document);
|
||||
|
||||
// Then, iterate through each key and value in the document and format the value from decryption.
|
||||
for (const [key, value] of Object.entries(decodedDocument)) {
|
||||
decodedDocument[key] = this.formatValueFromDecryption(value);
|
||||
}
|
||||
|
||||
return decodedDocument;
|
||||
}
|
||||
|
||||
/**
|
||||
* Format a value before encryption. Converts types that msgpackr
|
||||
* doesn't natively support (e.g. Date) into serialisable forms.
|
||||
*/
|
||||
private formatValueForEncryption(value: any): any {
|
||||
// msgpackr doesnt support Date, so we need to convert it to a string.
|
||||
if (value instanceof Date) {
|
||||
return `<Date: ${value.getTime()}>`;
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Restore a value after decryption. Reverses the transformations
|
||||
* applied by `formatValueForEncryption`.
|
||||
*/
|
||||
private formatValueFromDecryption(value: any): any {
|
||||
// msgpackr doesnt support Date, so we need to convert it to a Date.
|
||||
if (typeof value === 'string') {
|
||||
// Check if this value matches an Extended JSON encoded date.
|
||||
// TODO: Do this without a regex for performance reasons.
|
||||
// const dateMatch = value.match(/<Date: (?<time>[0-9]+)>/);
|
||||
|
||||
// Without regex
|
||||
if (value.startsWith('<Date: ') && value.endsWith('>')) {
|
||||
const time = value.slice(7, -1);
|
||||
return new Date(parseInt(time));
|
||||
}
|
||||
|
||||
// If it does, convert it to a Date.
|
||||
// if (dateMatch) {
|
||||
// const { time } = dateMatch.groups!;
|
||||
// return new Date(parseInt(time));
|
||||
// }
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,13 @@
|
||||
import { BaseStorage, FindOptions, type IndexDefinition } from './base-storage.js';
|
||||
import {
|
||||
BaseStorage,
|
||||
FindOptions,
|
||||
type IndexDefinition,
|
||||
type Filter,
|
||||
type ComparisonOperators,
|
||||
isOperatorObject,
|
||||
isLogicalKey,
|
||||
} from './base-storage.js';
|
||||
import { BaseCache, BTreeCache } from 'src/cache/index.js';
|
||||
|
||||
/**
|
||||
* Key prefix separator used to namespace documents within localStorage.
|
||||
@@ -18,7 +27,14 @@ const MANIFEST_SUFFIX = '__keys__';
|
||||
const COUNTER_SUFFIX = '__next__';
|
||||
|
||||
/**
|
||||
* Separator used when joining multiple field values into a single index key.
|
||||
* Suffix for the mutation/version marker. This value increments on every
|
||||
* write operation so other tabs can detect index-staleness even when the
|
||||
* manifest key set itself is unchanged (e.g. updateMany).
|
||||
*/
|
||||
const VERSION_SUFFIX = '__version__';
|
||||
|
||||
/**
|
||||
* Separator used when joining field names to create the index map key.
|
||||
*/
|
||||
const INDEX_KEY_SEP = '\x00';
|
||||
|
||||
@@ -43,9 +59,9 @@ function normalizeIndexes(indexes?: IndexDefinition): string[][] {
|
||||
* tracks all internal keys so that read operations avoid scanning every
|
||||
* key in localStorage.
|
||||
*
|
||||
* Optional indexes provide fast lookups when a query filter matches
|
||||
* an index exactly. Indexes are held in memory and rebuilt only when a
|
||||
* cross-tab manifest change is detected.
|
||||
* Optional indexes are backed by B+ Trees, providing O(log n) equality
|
||||
* lookups and O(log n + k) range queries. Indexes are held in memory and
|
||||
* rebuilt only when a cross-tab manifest change is detected.
|
||||
*
|
||||
* Because localStorage is synchronous and string-only, all values are
|
||||
* JSON-serialised on write and parsed on read.
|
||||
@@ -59,8 +75,9 @@ export class StorageLocalStorage<
|
||||
static from<T extends Record<string, any>>(
|
||||
prefix = 'qs',
|
||||
indexes?: IndexDefinition,
|
||||
cache?: BaseCache,
|
||||
): StorageLocalStorage<T> {
|
||||
return new StorageLocalStorage<T>(prefix, indexes);
|
||||
return new StorageLocalStorage<T>(prefix, indexes, cache);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -79,17 +96,13 @@ export class StorageLocalStorage<
|
||||
* valid and we skip the expensive rebuild.
|
||||
*/
|
||||
private lastManifestRaw: string;
|
||||
private lastVersionRaw: string;
|
||||
|
||||
/** The normalized index definitions supplied at construction time. */
|
||||
private indexDefs: string[][];
|
||||
|
||||
/**
|
||||
* Secondary index maps (same structure as StorageMemory).
|
||||
* Outer key = index name (joined field names).
|
||||
* Inner key = index value (joined field values from a document).
|
||||
* Inner value = set of internal numeric keys.
|
||||
*/
|
||||
private indexes: Map<string, Map<string, Set<number>>>;
|
||||
/** Secondary index cache (B-Tree or KV implementation). */
|
||||
private cache: BaseCache;
|
||||
|
||||
/** Lazily-created child storage instances. */
|
||||
private children: Map<string, StorageLocalStorage<any>>;
|
||||
@@ -97,18 +110,21 @@ export class StorageLocalStorage<
|
||||
constructor(
|
||||
private readonly prefix: string = 'qs',
|
||||
indexes?: IndexDefinition,
|
||||
cache?: BaseCache,
|
||||
) {
|
||||
super();
|
||||
|
||||
this.children = new Map();
|
||||
this.indexDefs = normalizeIndexes(indexes);
|
||||
this.indexes = new Map();
|
||||
this.cache = cache ?? new BTreeCache();
|
||||
for (const fields of this.indexDefs) {
|
||||
this.indexes.set(fields.join(INDEX_KEY_SEP), new Map());
|
||||
const name = fields.join(INDEX_KEY_SEP);
|
||||
this.cache.registerIndex(name, fields);
|
||||
}
|
||||
|
||||
// Bootstrap from localStorage.
|
||||
this.lastManifestRaw = '';
|
||||
this.lastVersionRaw = '';
|
||||
this.manifest = new Set();
|
||||
this.nextKey = 0;
|
||||
this.refreshManifest();
|
||||
@@ -132,23 +148,24 @@ export class StorageLocalStorage<
|
||||
this.persistManifest();
|
||||
}
|
||||
|
||||
async find(filter?: Partial<T>, options?: FindOptions): Promise<T[]> {
|
||||
async find(filter?: Filter<T>, options?: FindOptions): Promise<T[]> {
|
||||
this.refreshManifest();
|
||||
|
||||
let results: T[];
|
||||
const indexedKeys = this.resolveIndexKeys(filter);
|
||||
const resolution = this.resolveIndexKeys(filter);
|
||||
|
||||
if (resolution !== null) {
|
||||
const { keys, resolvedFields } = resolution;
|
||||
const needsVerification = this.filterNeedsVerification(filter, resolvedFields);
|
||||
|
||||
if (indexedKeys !== null) {
|
||||
// Use the index to narrow which documents we read from localStorage.
|
||||
results = [];
|
||||
for (const key of indexedKeys) {
|
||||
for (const key of keys) {
|
||||
const raw = localStorage.getItem(this.docKey(key));
|
||||
if (raw === null) continue;
|
||||
const doc = JSON.parse(raw) as T;
|
||||
if (this.matchesFilter(doc, filter)) {
|
||||
if (needsVerification && !this.matchesFilter(doc, filter)) continue;
|
||||
results.push(doc);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Full scan over all documents in the manifest.
|
||||
results = [];
|
||||
@@ -175,7 +192,7 @@ export class StorageLocalStorage<
|
||||
}
|
||||
|
||||
async updateMany(
|
||||
filter: Partial<T>,
|
||||
filter: Filter<T>,
|
||||
update: Partial<T>,
|
||||
options: Partial<FindOptions> = {},
|
||||
): Promise<number> {
|
||||
@@ -207,7 +224,7 @@ export class StorageLocalStorage<
|
||||
}
|
||||
|
||||
async deleteMany(
|
||||
filter: Partial<T>,
|
||||
filter: Filter<T>,
|
||||
options: Partial<FindOptions> = {},
|
||||
): Promise<number> {
|
||||
this.refreshManifest();
|
||||
@@ -239,7 +256,7 @@ export class StorageLocalStorage<
|
||||
const childPrefix = `${this.prefix}${KEY_SEP}${path}`;
|
||||
this.children.set(
|
||||
path,
|
||||
new StorageLocalStorage<C>(childPrefix, this.indexDefs),
|
||||
new StorageLocalStorage<C>(childPrefix, this.indexDefs, this.cache.createChild()),
|
||||
);
|
||||
}
|
||||
return this.children.get(path) as StorageLocalStorage<C>;
|
||||
@@ -250,26 +267,94 @@ export class StorageLocalStorage<
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Checks whether a document satisfies every field in the filter.
|
||||
* Checks whether a document satisfies a filter.
|
||||
*
|
||||
* Handles top-level logical operators ($and, $or, $not) first via
|
||||
* recursion, then evaluates remaining field-level conditions.
|
||||
*/
|
||||
private matchesFilter(item: T, filter?: Partial<T>): boolean {
|
||||
private matchesFilter(item: T, filter?: Filter<T>): boolean {
|
||||
if (!filter || Object.keys(filter).length === 0) return true;
|
||||
|
||||
// Top-level logical operators.
|
||||
if (filter.$and && !filter.$and.every((f) => this.matchesFilter(item, f))) return false;
|
||||
if (filter.$or && !filter.$or.some((f) => this.matchesFilter(item, f))) return false;
|
||||
if (filter.$not && this.matchesFilter(item, filter.$not)) return false;
|
||||
|
||||
// Field-level conditions (skip logical operator keys).
|
||||
for (const [key, value] of Object.entries(filter)) {
|
||||
if (isLogicalKey(key)) continue;
|
||||
if (isOperatorObject(value)) {
|
||||
if (!this.matchesOperators(item[key], value)) return false;
|
||||
} else {
|
||||
if (item[key] !== value) return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Evaluate a set of comparison / string operators against a single field value.
|
||||
* All operators must pass for the field to match.
|
||||
*/
|
||||
private matchesOperators(fieldValue: any, ops: ComparisonOperators<any>): boolean {
|
||||
if (ops.$eq !== undefined && fieldValue !== ops.$eq) return false;
|
||||
if (ops.$ne !== undefined && fieldValue === ops.$ne) return false;
|
||||
if (ops.$lt !== undefined && !(fieldValue < ops.$lt)) return false;
|
||||
if (ops.$lte !== undefined && !(fieldValue <= ops.$lte)) return false;
|
||||
if (ops.$gt !== undefined && !(fieldValue > ops.$gt)) return false;
|
||||
if (ops.$gte !== undefined && !(fieldValue >= ops.$gte)) return false;
|
||||
|
||||
if (ops.$startsWith !== undefined) {
|
||||
if (typeof fieldValue !== 'string' || !fieldValue.startsWith(ops.$startsWith)) return false;
|
||||
}
|
||||
if (ops.$contains !== undefined) {
|
||||
if (typeof fieldValue !== 'string' || !fieldValue.includes(ops.$contains)) return false;
|
||||
}
|
||||
|
||||
// Field-level $not: invert the enclosed operator set.
|
||||
if (ops.$not !== undefined) {
|
||||
if (this.matchesOperators(fieldValue, ops.$not)) return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine whether candidate documents returned by index resolution still
|
||||
* require full filter verification.
|
||||
*/
|
||||
private filterNeedsVerification(
|
||||
filter: Filter<T> | undefined,
|
||||
resolvedFields: string[],
|
||||
): boolean {
|
||||
if (!filter) return false;
|
||||
const filterKeys = Object.keys(filter);
|
||||
const hasLogicalOps = filterKeys.some(isLogicalKey);
|
||||
return hasLogicalOps
|
||||
|| filterKeys.some((k) => !isLogicalKey(k) && !resolvedFields.includes(k));
|
||||
}
|
||||
|
||||
/**
|
||||
* Collect all [internalKey, document] pairs that match a filter.
|
||||
* Uses an index when possible, otherwise falls back to a full scan.
|
||||
*/
|
||||
private collectMatches(filter?: Partial<T>): Array<[number, T]> {
|
||||
const indexKeys = this.resolveIndexKeys(filter);
|
||||
const keysToScan = indexKeys ?? this.manifest;
|
||||
private collectMatches(filter?: Filter<T>): Array<[number, T]> {
|
||||
const resolution = this.resolveIndexKeys(filter);
|
||||
const results: Array<[number, T]> = [];
|
||||
|
||||
for (const key of keysToScan) {
|
||||
if (resolution !== null) {
|
||||
const { keys, resolvedFields } = resolution;
|
||||
const needsVerification = this.filterNeedsVerification(filter, resolvedFields);
|
||||
|
||||
for (const key of keys) {
|
||||
const raw = localStorage.getItem(this.docKey(key));
|
||||
if (raw === null) continue;
|
||||
const doc = JSON.parse(raw) as T;
|
||||
if (needsVerification && !this.matchesFilter(doc, filter)) continue;
|
||||
results.push([key, doc]);
|
||||
}
|
||||
} else {
|
||||
for (const key of this.manifest) {
|
||||
const raw = localStorage.getItem(this.docKey(key));
|
||||
if (raw === null) continue;
|
||||
const doc = JSON.parse(raw) as T;
|
||||
@@ -277,6 +362,7 @@ export class StorageLocalStorage<
|
||||
results.push([key, doc]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
@@ -300,82 +386,203 @@ export class StorageLocalStorage<
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Build the index value string for a given document and set of fields.
|
||||
* Returns `null` if any field is missing from the document.
|
||||
* Build the B+ Tree key for a document and a set of index fields.
|
||||
* - Single-field indexes return the raw field value.
|
||||
* - Compound indexes return an array of raw field values.
|
||||
* Returns `null` if any required field is missing from the document.
|
||||
*/
|
||||
private buildIndexValue(doc: Record<string, any>, fields: string[]): string | null {
|
||||
const parts: string[] = [];
|
||||
private buildIndexKey(doc: Record<string, any>, fields: string[]): any | null {
|
||||
if (fields.length === 1) {
|
||||
if (!(fields[0] in doc)) return null;
|
||||
return doc[fields[0]];
|
||||
}
|
||||
|
||||
const parts: any[] = [];
|
||||
for (const field of fields) {
|
||||
if (!(field in doc)) return null;
|
||||
parts.push(String(doc[field]));
|
||||
parts.push(doc[field]);
|
||||
}
|
||||
return parts.join(INDEX_KEY_SEP);
|
||||
return parts;
|
||||
}
|
||||
|
||||
/** Register a document in all applicable indexes. */
|
||||
private addToIndexes(internalKey: number, doc: T): void {
|
||||
for (const fields of this.indexDefs) {
|
||||
const indexName = fields.join(INDEX_KEY_SEP);
|
||||
const indexValue = this.buildIndexValue(doc, fields);
|
||||
if (indexValue === null) continue;
|
||||
const indexKey = this.buildIndexKey(doc, fields);
|
||||
if (indexKey === null) continue;
|
||||
|
||||
const indexMap = this.indexes.get(indexName)!;
|
||||
let bucket = indexMap.get(indexValue);
|
||||
if (!bucket) {
|
||||
bucket = new Set();
|
||||
indexMap.set(indexValue, bucket);
|
||||
}
|
||||
bucket.add(internalKey);
|
||||
const name = fields.join(INDEX_KEY_SEP);
|
||||
this.cache.insert(name, indexKey, internalKey);
|
||||
}
|
||||
}
|
||||
|
||||
/** Remove a document from all applicable indexes. */
|
||||
private removeFromIndexes(internalKey: number, doc: T): void {
|
||||
for (const fields of this.indexDefs) {
|
||||
const indexName = fields.join(INDEX_KEY_SEP);
|
||||
const indexValue = this.buildIndexValue(doc, fields);
|
||||
if (indexValue === null) continue;
|
||||
const indexKey = this.buildIndexKey(doc, fields);
|
||||
if (indexKey === null) continue;
|
||||
|
||||
const indexMap = this.indexes.get(indexName)!;
|
||||
const bucket = indexMap.get(indexValue);
|
||||
if (bucket) {
|
||||
bucket.delete(internalKey);
|
||||
if (bucket.size === 0) indexMap.delete(indexValue);
|
||||
}
|
||||
const name = fields.join(INDEX_KEY_SEP);
|
||||
this.cache.delete(name, indexKey, internalKey);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempt to resolve candidate internal keys from the indexes.
|
||||
* Returns `null` if no index can serve the query.
|
||||
*
|
||||
* Supports three resolution strategies:
|
||||
* 1. Equality lookup via B+ Tree `.get()` — O(log n)
|
||||
* 2. Range scan via B+ Tree `.range()` — O(log n + k)
|
||||
* 3. Compound equality — B+ Tree `.get()` with a tuple key
|
||||
*/
|
||||
private resolveIndexKeys(filter?: Partial<T>): Set<number> | null {
|
||||
private resolveIndexKeys(
|
||||
filter?: Filter<T>,
|
||||
): { keys: Iterable<number>; resolvedFields: string[] } | null {
|
||||
if (!filter) return null;
|
||||
const filterKeys = Object.keys(filter);
|
||||
if (filterKeys.length === 0) return null;
|
||||
|
||||
for (const fields of this.indexDefs) {
|
||||
if (!fields.every((f) => f in filter)) continue;
|
||||
|
||||
const indexName = fields.join(INDEX_KEY_SEP);
|
||||
const indexValue = this.buildIndexValue(filter, fields);
|
||||
if (indexValue === null) continue;
|
||||
|
||||
const indexMap = this.indexes.get(indexName)!;
|
||||
const bucket = indexMap.get(indexValue);
|
||||
return bucket ?? new Set();
|
||||
if (fields.length === 1) {
|
||||
// --- Single-field index ---
|
||||
const field = fields[0];
|
||||
if (!(field in filter)) continue;
|
||||
|
||||
const filterValue = (filter as any)[field];
|
||||
|
||||
if (isOperatorObject(filterValue)) {
|
||||
const keys = this.resolveOperatorViaTree(indexName, filterValue);
|
||||
if (keys !== null) return { keys, resolvedFields: [field] };
|
||||
continue;
|
||||
}
|
||||
|
||||
// Plain equality.
|
||||
return { keys: this.cache.get(indexName, filterValue), resolvedFields: [field] };
|
||||
} else {
|
||||
// --- Compound index — all fields must be plain equality ---
|
||||
if (!fields.every((f) => f in filter && !isOperatorObject((filter as any)[f]))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const tupleKey = fields.map((f) => (filter as any)[f]);
|
||||
return { keys: this.cache.get(indexName, tupleKey), resolvedFields: [...fields] };
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Rebuild all in-memory index maps by reading every document from
|
||||
* Try to resolve an operator filter against a single-field B+ Tree index.
|
||||
* Returns a flat array of matching internal keys, or null if the
|
||||
* operators can't be efficiently served by the tree.
|
||||
*
|
||||
* Supported acceleration:
|
||||
* - `$eq` → point lookup via `.get()`
|
||||
* - `$gt/$gte/$lt/$lte` → range scan via `.range()`
|
||||
* - `$startsWith` → converted to a range scan on the prefix
|
||||
* - `$ne`, `$contains`, `$not` → cannot use index, returns null
|
||||
*/
|
||||
private resolveOperatorViaTree(
|
||||
indexName: string,
|
||||
ops: ComparisonOperators<any>,
|
||||
): Iterable<number> | null {
|
||||
// Operators that prevent efficient index use.
|
||||
if (ops.$ne !== undefined || ops.$contains !== undefined || ops.$not !== undefined) return null;
|
||||
|
||||
if (ops.$eq !== undefined) {
|
||||
// If $eq is combined with other operators, this path does not fully
|
||||
// resolve the predicate. Let caller fall back to verification/full scan.
|
||||
if (
|
||||
ops.$gt !== undefined
|
||||
|| ops.$gte !== undefined
|
||||
|| ops.$lt !== undefined
|
||||
|| ops.$lte !== undefined
|
||||
|| ops.$startsWith !== undefined
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
return this.cache.get(indexName, ops.$eq);
|
||||
}
|
||||
|
||||
// $startsWith is converted to a range scan: "abc" → ["abc", "abd").
|
||||
if (ops.$startsWith !== undefined) {
|
||||
const prefix = ops.$startsWith;
|
||||
if (prefix.length === 0) return null;
|
||||
const upper = prefix.slice(0, -1)
|
||||
+ String.fromCharCode(prefix.charCodeAt(prefix.length - 1) + 1);
|
||||
const entries = this.cache.range(indexName, prefix, upper, {
|
||||
lowerInclusive: true,
|
||||
upperInclusive: false,
|
||||
});
|
||||
return entries;
|
||||
}
|
||||
|
||||
// Extract range bounds from the remaining operators.
|
||||
// If strict/non-strict variants are both provided, use the stricter bound.
|
||||
let min: any = undefined;
|
||||
let max: any = undefined;
|
||||
let lowerInclusive = true;
|
||||
let upperInclusive = false;
|
||||
|
||||
if (ops.$gt !== undefined && ops.$gte !== undefined) {
|
||||
if (ops.$gt > ops.$gte) {
|
||||
min = ops.$gt;
|
||||
lowerInclusive = false;
|
||||
} else if (ops.$gt < ops.$gte) {
|
||||
min = ops.$gte;
|
||||
lowerInclusive = true;
|
||||
} else {
|
||||
min = ops.$gt;
|
||||
lowerInclusive = false;
|
||||
}
|
||||
} else if (ops.$gt !== undefined) {
|
||||
min = ops.$gt;
|
||||
lowerInclusive = false;
|
||||
} else if (ops.$gte !== undefined) {
|
||||
min = ops.$gte;
|
||||
lowerInclusive = true;
|
||||
}
|
||||
|
||||
if (ops.$lt !== undefined && ops.$lte !== undefined) {
|
||||
if (ops.$lt < ops.$lte) {
|
||||
max = ops.$lt;
|
||||
upperInclusive = false;
|
||||
} else if (ops.$lt > ops.$lte) {
|
||||
max = ops.$lte;
|
||||
upperInclusive = true;
|
||||
} else {
|
||||
max = ops.$lt;
|
||||
upperInclusive = false;
|
||||
}
|
||||
} else if (ops.$lt !== undefined) {
|
||||
max = ops.$lt;
|
||||
upperInclusive = false;
|
||||
} else if (ops.$lte !== undefined) {
|
||||
max = ops.$lte;
|
||||
upperInclusive = true;
|
||||
}
|
||||
|
||||
if (min === undefined && max === undefined) return null;
|
||||
if (min !== undefined && max !== undefined) {
|
||||
if (min > max) return [];
|
||||
if (min === max && (!lowerInclusive || !upperInclusive)) return [];
|
||||
}
|
||||
|
||||
return this.cache.range(indexName, min, max, { lowerInclusive, upperInclusive });
|
||||
}
|
||||
|
||||
/**
|
||||
* Rebuild all in-memory index B+ Trees by reading every document from
|
||||
* localStorage. Called only when a cross-tab manifest change is detected.
|
||||
*/
|
||||
private rebuildIndexes(): void {
|
||||
for (const [, indexMap] of this.indexes) {
|
||||
indexMap.clear();
|
||||
for (const fields of this.indexDefs) {
|
||||
const name = fields.join(INDEX_KEY_SEP);
|
||||
this.cache.clearIndex(name);
|
||||
}
|
||||
|
||||
for (const key of this.manifest) {
|
||||
@@ -405,6 +612,11 @@ export class StorageLocalStorage<
|
||||
return `${this.prefix}${KEY_SEP}${COUNTER_SUFFIX}`;
|
||||
}
|
||||
|
||||
/** Build the localStorage key used to persist the mutation version. */
|
||||
private versionKey(): string {
|
||||
return `${this.prefix}${KEY_SEP}${VERSION_SUFFIX}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Re-read the manifest from localStorage into memory.
|
||||
* Called at the start of every public method so that cross-tab writes
|
||||
@@ -415,23 +627,41 @@ export class StorageLocalStorage<
|
||||
*/
|
||||
private refreshManifest(): void {
|
||||
const raw = localStorage.getItem(this.manifestKey()) ?? '[]';
|
||||
const versionRaw = localStorage.getItem(this.versionKey()) ?? '0';
|
||||
|
||||
if (raw === this.lastManifestRaw) return;
|
||||
if (raw === this.lastManifestRaw && versionRaw === this.lastVersionRaw) return;
|
||||
|
||||
this.lastManifestRaw = raw;
|
||||
let parsedKeys: number[] = [];
|
||||
let parsedOk = true;
|
||||
|
||||
try {
|
||||
const keys: number[] = JSON.parse(raw);
|
||||
this.manifest = new Set(keys);
|
||||
parsedKeys = JSON.parse(raw);
|
||||
} catch {
|
||||
parsedOk = false;
|
||||
this.manifest = new Set();
|
||||
}
|
||||
|
||||
if (parsedOk) {
|
||||
this.manifest = new Set(parsedKeys);
|
||||
this.lastManifestRaw = raw;
|
||||
}
|
||||
this.lastVersionRaw = versionRaw;
|
||||
|
||||
// Restore the counter from localStorage.
|
||||
const counterRaw = localStorage.getItem(this.counterKey());
|
||||
this.nextKey = counterRaw !== null ? Number(counterRaw) : 0;
|
||||
if (counterRaw !== null) {
|
||||
this.nextKey = Number(counterRaw);
|
||||
} else if (this.manifest.size > 0) {
|
||||
let max = -1;
|
||||
for (const key of this.manifest) {
|
||||
if (key > max) max = key;
|
||||
}
|
||||
this.nextKey = max + 1;
|
||||
} else {
|
||||
this.nextKey = 0;
|
||||
}
|
||||
|
||||
// Manifest changed — indexes are potentially stale.
|
||||
// Manifest or version changed — indexes are potentially stale.
|
||||
if (this.indexDefs.length > 0) {
|
||||
this.rebuildIndexes();
|
||||
}
|
||||
@@ -442,11 +672,16 @@ export class StorageLocalStorage<
|
||||
*/
|
||||
private persistManifest(): void {
|
||||
const raw = JSON.stringify([...this.manifest]);
|
||||
const nextVersion = Number(localStorage.getItem(this.versionKey()) ?? '0') + 1;
|
||||
const versionRaw = String(nextVersion);
|
||||
|
||||
localStorage.setItem(this.manifestKey(), raw);
|
||||
localStorage.setItem(this.counterKey(), String(this.nextKey));
|
||||
localStorage.setItem(this.versionKey(), versionRaw);
|
||||
|
||||
// Keep the cached raw string in sync so the next refreshManifest()
|
||||
// recognises this as "our own write" and skips the rebuild.
|
||||
// Keep cached values in sync so the next refreshManifest() recognises
|
||||
// this as our own write and skips unnecessary rebuild work.
|
||||
this.lastManifestRaw = raw;
|
||||
this.lastVersionRaw = versionRaw;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { BaseStorage, FindOptions } from './base-storage.js';
|
||||
import { BaseStorage, FindOptions, type Filter } from './base-storage.js';
|
||||
import { StorageMemory } from './storage-memory.js';
|
||||
|
||||
/**
|
||||
@@ -7,11 +7,17 @@ import { StorageMemory } from './storage-memory.js';
|
||||
* All read operations will use system memory - all write operations will use the provided adapter.
|
||||
*/
|
||||
export class StorageMemorySynced<T extends Record<string, any> = Record<string, any>> extends BaseStorage<T> {
|
||||
private isPrimed: boolean;
|
||||
private primePromise: Promise<void> | null;
|
||||
|
||||
constructor(
|
||||
private inMemoryCache: StorageMemory<T>,
|
||||
private store: BaseStorage<T>,
|
||||
isPrimed = false,
|
||||
) {
|
||||
super();
|
||||
this.isPrimed = isPrimed;
|
||||
this.primePromise = null;
|
||||
|
||||
// Hook into all write operations so that we can sync the In-Memory cache.
|
||||
this.store.on('insert', async (payload) => {
|
||||
@@ -44,6 +50,23 @@ export class StorageMemorySynced<T extends Record<string, any> = Record<string,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure the in-memory cache has been initialized from the backing store.
|
||||
* This is especially important for derived children, whose caches start empty.
|
||||
*/
|
||||
private async ensurePrimed(): Promise<void> {
|
||||
if (this.isPrimed) return;
|
||||
if (!this.primePromise) {
|
||||
this.primePromise = (async () => {
|
||||
await this.inMemoryCache.deleteMany({});
|
||||
const allDocuments = await this.store.find();
|
||||
await this.inMemoryCache.insertMany(allDocuments);
|
||||
this.isPrimed = true;
|
||||
})();
|
||||
}
|
||||
await this.primePromise;
|
||||
}
|
||||
|
||||
static async create<T extends Record<string, any>>(store: BaseStorage<T>) {
|
||||
// Instantiate in-memory cache and the backing store.
|
||||
const inMemoryCache = new StorageMemory<T>();
|
||||
@@ -58,6 +81,7 @@ export class StorageMemorySynced<T extends Record<string, any> = Record<string,
|
||||
}
|
||||
|
||||
// Return our instance of this store.
|
||||
memorySyncedStore.isPrimed = true;
|
||||
return memorySyncedStore;
|
||||
}
|
||||
|
||||
@@ -65,19 +89,22 @@ export class StorageMemorySynced<T extends Record<string, any> = Record<string,
|
||||
await this.store.insertMany(documents);
|
||||
}
|
||||
|
||||
async find(filter?: Partial<T>, options?: FindOptions): Promise<T[]> {
|
||||
async find(filter?: Filter<T>, options?: FindOptions): Promise<T[]> {
|
||||
await this.ensurePrimed();
|
||||
return await this.inMemoryCache.find(filter, options);
|
||||
}
|
||||
|
||||
async updateMany(
|
||||
filter: Partial<T>,
|
||||
filter: Filter<T>,
|
||||
update: Partial<T>,
|
||||
options: FindOptions = {} as FindOptions
|
||||
options: Partial<FindOptions> = {},
|
||||
): Promise<number> {
|
||||
await this.ensurePrimed();
|
||||
return await this.store.updateMany(filter, update, options);
|
||||
}
|
||||
|
||||
async deleteMany(filter: Partial<T>, options: FindOptions = {} as FindOptions): Promise<number> {
|
||||
async deleteMany(filter: Filter<T>, options: Partial<FindOptions> = {}): Promise<number> {
|
||||
await this.ensurePrimed();
|
||||
return await this.store.deleteMany(filter, options);
|
||||
}
|
||||
|
||||
@@ -85,7 +112,11 @@ export class StorageMemorySynced<T extends Record<string, any> = Record<string,
|
||||
const childStore = this.store.deriveChild<C>(path);
|
||||
const childMemory = this.inMemoryCache.deriveChild<C>(path);
|
||||
|
||||
if (!(childMemory instanceof StorageMemory)) {
|
||||
throw new Error('Expected derived in-memory cache to be a StorageMemory instance');
|
||||
}
|
||||
|
||||
// Create a new synced storage for the child
|
||||
return new StorageMemorySynced<C>(childMemory as StorageMemory<C>, childStore);
|
||||
return new StorageMemorySynced<C>(childMemory, childStore);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,16 @@
|
||||
import { BaseStorage, FindOptions, type IndexDefinition } from './base-storage.js';
|
||||
import {
|
||||
BaseStorage,
|
||||
FindOptions,
|
||||
type IndexDefinition,
|
||||
type Filter,
|
||||
type ComparisonOperators,
|
||||
isOperatorObject,
|
||||
isLogicalKey,
|
||||
} from './base-storage.js';
|
||||
import { BaseCache, BTreeCache } from 'src/cache/index.js';
|
||||
|
||||
/**
|
||||
* Separator used when joining multiple field values into a single index key.
|
||||
* Chosen to be unlikely to appear in real field values.
|
||||
* Separator used when joining field names to create the index map key.
|
||||
*/
|
||||
const INDEX_KEY_SEP = '\x00';
|
||||
|
||||
@@ -27,16 +35,17 @@ function normalizeIndexes(indexes?: IndexDefinition): string[][] {
|
||||
*
|
||||
* @remarks
|
||||
* Documents are keyed internally by an auto-incrementing numeric key.
|
||||
* Optional indexes provide O(1) lookups when a query filter matches
|
||||
* an index exactly.
|
||||
* Optional indexes are backed by B+ Trees, providing O(log n) equality
|
||||
* lookups and O(log n + k) range queries.
|
||||
*/
|
||||
export class StorageMemory<
|
||||
T extends Record<string, any> = Record<string, any>,
|
||||
> extends BaseStorage<T> {
|
||||
static from<T extends Record<string, any>>(
|
||||
indexes?: IndexDefinition,
|
||||
cache?: BaseCache,
|
||||
): StorageMemory<T> {
|
||||
return new StorageMemory<T>(indexes);
|
||||
return new StorageMemory<T>(indexes, cache);
|
||||
}
|
||||
|
||||
/** Auto-incrementing counter used to generate internal keys. */
|
||||
@@ -45,13 +54,8 @@ export class StorageMemory<
|
||||
/** Primary document store keyed by an opaque internal key. */
|
||||
private store: Map<number, T>;
|
||||
|
||||
/**
|
||||
* Secondary index maps.
|
||||
* Outer key = index name (joined field names).
|
||||
* Inner key = index value (joined field values from a document).
|
||||
* Inner value = set of internal keys that share this index value.
|
||||
*/
|
||||
private indexes: Map<string, Map<string, Set<number>>>;
|
||||
/** Secondary index cache (B-Tree or KV implementation). */
|
||||
private cache: BaseCache;
|
||||
|
||||
/** The normalized index definitions supplied at construction time. */
|
||||
private indexDefs: string[][];
|
||||
@@ -59,17 +63,17 @@ export class StorageMemory<
|
||||
/** Lazily-created child storage instances. */
|
||||
private children: Map<string, StorageMemory<any>>;
|
||||
|
||||
constructor(indexes?: IndexDefinition) {
|
||||
constructor(indexes?: IndexDefinition, cache?: BaseCache) {
|
||||
super();
|
||||
|
||||
this.store = new Map();
|
||||
this.children = new Map();
|
||||
this.indexDefs = normalizeIndexes(indexes);
|
||||
|
||||
// Initialise an empty map for each index definition.
|
||||
this.indexes = new Map();
|
||||
this.cache = cache ?? new BTreeCache();
|
||||
for (const fields of this.indexDefs) {
|
||||
this.indexes.set(fields.join(INDEX_KEY_SEP), new Map());
|
||||
const name = fields.join(INDEX_KEY_SEP);
|
||||
this.cache.registerIndex(name, fields);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -86,7 +90,7 @@ export class StorageMemory<
|
||||
}
|
||||
}
|
||||
|
||||
async find(filter?: Partial<T>, options?: FindOptions): Promise<T[]> {
|
||||
async find(filter?: Filter<T>, options?: FindOptions): Promise<T[]> {
|
||||
let results: T[];
|
||||
|
||||
// Attempt to satisfy the query via an index.
|
||||
@@ -118,7 +122,7 @@ export class StorageMemory<
|
||||
}
|
||||
|
||||
async updateMany(
|
||||
filter: Partial<T>,
|
||||
filter: Filter<T>,
|
||||
update: Partial<T>,
|
||||
options: Partial<FindOptions> = {},
|
||||
): Promise<number> {
|
||||
@@ -147,7 +151,7 @@ export class StorageMemory<
|
||||
}
|
||||
|
||||
async deleteMany(
|
||||
filter: Partial<T>,
|
||||
filter: Filter<T>,
|
||||
options: Partial<FindOptions> = {},
|
||||
): Promise<number> {
|
||||
const rowsToDelete = this.collectMatches(filter);
|
||||
@@ -171,7 +175,7 @@ export class StorageMemory<
|
||||
|
||||
deriveChild<C>(path: string): BaseStorage<C> {
|
||||
if (!this.children.has(path)) {
|
||||
this.children.set(path, new StorageMemory<C>(this.indexDefs));
|
||||
this.children.set(path, new StorageMemory<C>(this.indexDefs, this.cache.createChild()));
|
||||
}
|
||||
return this.children.get(path) as StorageMemory<C>;
|
||||
}
|
||||
@@ -181,38 +185,93 @@ export class StorageMemory<
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Checks whether a document satisfies every field in the filter.
|
||||
* An empty or undefined filter matches everything.
|
||||
* Checks whether a document satisfies a filter.
|
||||
*
|
||||
* Handles top-level logical operators ($and, $or, $not) first via
|
||||
* recursion, then evaluates remaining field-level conditions.
|
||||
*/
|
||||
private matchesFilter(item: T, filter?: Partial<T>): boolean {
|
||||
private matchesFilter(item: T, filter?: Filter<T>): boolean {
|
||||
if (!filter || Object.keys(filter).length === 0) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Top-level logical operators.
|
||||
if (filter.$and && !filter.$and.every((f) => this.matchesFilter(item, f))) return false;
|
||||
if (filter.$or && !filter.$or.some((f) => this.matchesFilter(item, f))) return false;
|
||||
if (filter.$not && this.matchesFilter(item, filter.$not)) return false;
|
||||
|
||||
// Field-level conditions (skip logical operator keys).
|
||||
for (const [key, value] of Object.entries(filter)) {
|
||||
if (item[key] !== value) {
|
||||
return false;
|
||||
if (isLogicalKey(key)) continue;
|
||||
if (isOperatorObject(value)) {
|
||||
if (!this.matchesOperators(item[key], value)) return false;
|
||||
} else {
|
||||
if (item[key] !== value) return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Evaluate a set of comparison / string operators against a single field value.
|
||||
* All operators must pass for the field to match.
|
||||
*/
|
||||
private matchesOperators(fieldValue: any, ops: ComparisonOperators<any>): boolean {
|
||||
if (ops.$eq !== undefined && fieldValue !== ops.$eq) return false;
|
||||
if (ops.$ne !== undefined && fieldValue === ops.$ne) return false;
|
||||
if (ops.$lt !== undefined && !(fieldValue < ops.$lt)) return false;
|
||||
if (ops.$lte !== undefined && !(fieldValue <= ops.$lte)) return false;
|
||||
if (ops.$gt !== undefined && !(fieldValue > ops.$gt)) return false;
|
||||
if (ops.$gte !== undefined && !(fieldValue >= ops.$gte)) return false;
|
||||
|
||||
if (ops.$startsWith !== undefined) {
|
||||
if (typeof fieldValue !== 'string' || !fieldValue.startsWith(ops.$startsWith)) return false;
|
||||
}
|
||||
if (ops.$contains !== undefined) {
|
||||
if (typeof fieldValue !== 'string' || !fieldValue.includes(ops.$contains)) return false;
|
||||
}
|
||||
|
||||
// Field-level $not: invert the enclosed operator set.
|
||||
if (ops.$not !== undefined) {
|
||||
if (this.matchesOperators(fieldValue, ops.$not)) return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine whether candidate documents returned by index resolution still
|
||||
* require full filter verification.
|
||||
*/
|
||||
private filterNeedsVerification(
|
||||
filter: Filter<T> | undefined,
|
||||
resolvedFields: string[],
|
||||
): boolean {
|
||||
if (!filter) return false;
|
||||
const filterKeys = Object.keys(filter);
|
||||
const hasLogicalOps = filterKeys.some(isLogicalKey);
|
||||
return hasLogicalOps
|
||||
|| filterKeys.some((k) => !isLogicalKey(k) && !resolvedFields.includes(k));
|
||||
}
|
||||
|
||||
/**
|
||||
* Collect all [internalKey, document] pairs that match a filter.
|
||||
* Uses an index when possible, otherwise falls back to a full scan.
|
||||
*/
|
||||
private collectMatches(filter?: Partial<T>): Array<[number, T]> {
|
||||
const indexKeys = this.resolveIndexKeys(filter);
|
||||
private collectMatches(filter?: Filter<T>): Array<[number, T]> {
|
||||
const resolution = this.resolveIndexKeys(filter);
|
||||
|
||||
if (resolution !== null) {
|
||||
const { keys, resolvedFields } = resolution;
|
||||
const needsVerification = this.filterNeedsVerification(filter, resolvedFields);
|
||||
|
||||
if (indexKeys !== null) {
|
||||
// We have candidate internal keys from the index — fetch and verify.
|
||||
const results: Array<[number, T]> = [];
|
||||
for (const key of indexKeys) {
|
||||
for (const key of keys) {
|
||||
const doc = this.store.get(key);
|
||||
if (doc && this.matchesFilter(doc, filter)) {
|
||||
if (!doc) continue;
|
||||
if (needsVerification && !this.matchesFilter(doc, filter)) continue;
|
||||
results.push([key, doc]);
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
@@ -246,97 +305,216 @@ export class StorageMemory<
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Build the index value string for a given document and set of fields.
|
||||
* Returns `null` if any of the fields are missing from the document,
|
||||
* since we can't meaningfully index a partial key.
|
||||
* Build the B+ Tree key for a document and a set of index fields.
|
||||
* - Single-field indexes return the raw field value.
|
||||
* - Compound indexes return an array of raw field values.
|
||||
* Returns `null` if any required field is missing from the document.
|
||||
*/
|
||||
private buildIndexValue(doc: Record<string, any>, fields: string[]): string | null {
|
||||
const parts: string[] = [];
|
||||
private buildIndexKey(doc: Record<string, any>, fields: string[]): any | null {
|
||||
if (fields.length === 1) {
|
||||
if (!(fields[0] in doc)) return null;
|
||||
return doc[fields[0]];
|
||||
}
|
||||
|
||||
const parts: any[] = [];
|
||||
for (const field of fields) {
|
||||
if (!(field in doc)) return null;
|
||||
parts.push(String(doc[field]));
|
||||
parts.push(doc[field]);
|
||||
}
|
||||
return parts.join(INDEX_KEY_SEP);
|
||||
return parts;
|
||||
}
|
||||
|
||||
/** Register a document in all applicable indexes. */
|
||||
private addToIndexes(internalKey: number, doc: T): void {
|
||||
for (const fields of this.indexDefs) {
|
||||
const indexName = fields.join(INDEX_KEY_SEP);
|
||||
const indexValue = this.buildIndexValue(doc, fields);
|
||||
if (indexValue === null) continue;
|
||||
const indexKey = this.buildIndexKey(doc, fields);
|
||||
if (indexKey === null) continue;
|
||||
|
||||
const indexMap = this.indexes.get(indexName)!;
|
||||
let bucket = indexMap.get(indexValue);
|
||||
if (!bucket) {
|
||||
bucket = new Set();
|
||||
indexMap.set(indexValue, bucket);
|
||||
}
|
||||
bucket.add(internalKey);
|
||||
const name = fields.join(INDEX_KEY_SEP);
|
||||
this.cache.insert(name, indexKey, internalKey);
|
||||
}
|
||||
}
|
||||
|
||||
/** Remove a document from all applicable indexes. */
|
||||
private removeFromIndexes(internalKey: number, doc: T): void {
|
||||
for (const fields of this.indexDefs) {
|
||||
const indexName = fields.join(INDEX_KEY_SEP);
|
||||
const indexValue = this.buildIndexValue(doc, fields);
|
||||
if (indexValue === null) continue;
|
||||
const indexKey = this.buildIndexKey(doc, fields);
|
||||
if (indexKey === null) continue;
|
||||
|
||||
const indexMap = this.indexes.get(indexName)!;
|
||||
const bucket = indexMap.get(indexValue);
|
||||
if (bucket) {
|
||||
bucket.delete(internalKey);
|
||||
if (bucket.size === 0) indexMap.delete(indexValue);
|
||||
}
|
||||
const name = fields.join(INDEX_KEY_SEP);
|
||||
this.cache.delete(name, indexKey, internalKey);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempt to resolve a set of candidate internal keys from the indexes.
|
||||
* Returns `null` if no index can serve the query.
|
||||
*
|
||||
* An index is used when the filter fields are a superset of (or equal to)
|
||||
* an index's fields — meaning the index value can be fully constructed
|
||||
* from the filter.
|
||||
* Result of an index resolution attempt.
|
||||
* `keys` is an iterable of candidate internal keys.
|
||||
* `resolvedFields` lists the filter fields fully satisfied by the index,
|
||||
* so callers can skip re-verifying those conditions in matchesFilter.
|
||||
*/
|
||||
private resolveIndexKeys(filter?: Partial<T>): Set<number> | null {
|
||||
private resolveIndexKeys(
|
||||
filter?: Filter<T>,
|
||||
): { keys: Iterable<number>; resolvedFields: string[] } | null {
|
||||
if (!filter) return null;
|
||||
const filterKeys = Object.keys(filter);
|
||||
if (filterKeys.length === 0) return null;
|
||||
|
||||
for (const fields of this.indexDefs) {
|
||||
// Every field in the index must be present in the filter.
|
||||
if (!fields.every((f) => f in filter)) continue;
|
||||
|
||||
const indexName = fields.join(INDEX_KEY_SEP);
|
||||
const indexValue = this.buildIndexValue(filter, fields);
|
||||
if (indexValue === null) continue;
|
||||
|
||||
const indexMap = this.indexes.get(indexName)!;
|
||||
const bucket = indexMap.get(indexValue);
|
||||
return bucket ?? new Set();
|
||||
if (fields.length === 1) {
|
||||
// --- Single-field index ---
|
||||
const field = fields[0];
|
||||
if (!(field in filter)) continue;
|
||||
|
||||
const filterValue = (filter as any)[field];
|
||||
|
||||
if (isOperatorObject(filterValue)) {
|
||||
const keys = this.resolveOperatorViaTree(indexName, filterValue);
|
||||
if (keys !== null) return { keys, resolvedFields: [field] };
|
||||
continue;
|
||||
}
|
||||
|
||||
// Plain equality.
|
||||
return { keys: this.cache.get(indexName, filterValue), resolvedFields: [field] };
|
||||
} else {
|
||||
// --- Compound index — all fields must be plain equality ---
|
||||
if (!fields.every((f) => f in filter && !isOperatorObject((filter as any)[f]))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const tupleKey = fields.map((f) => (filter as any)[f]);
|
||||
return { keys: this.cache.get(indexName, tupleKey), resolvedFields: [...fields] };
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Try to resolve an operator filter against a single-field B+ Tree index.
|
||||
* Returns a flat array of matching internal keys, or null if the
|
||||
* operators can't be efficiently served by the tree.
|
||||
*
|
||||
* Supported acceleration:
|
||||
* - `$eq` → point lookup via `.get()`
|
||||
* - `$gt/$gte/$lt/$lte` → range scan via `.range()`
|
||||
* - `$startsWith` → converted to a range scan on the prefix
|
||||
* - `$ne`, `$contains`, `$not` → cannot use index, returns null
|
||||
*/
|
||||
private resolveOperatorViaTree(
|
||||
indexName: string,
|
||||
ops: ComparisonOperators<any>,
|
||||
): Iterable<number> | null {
|
||||
// Operators that prevent efficient index use.
|
||||
if (ops.$ne !== undefined || ops.$contains !== undefined || ops.$not !== undefined) return null;
|
||||
|
||||
// $eq is a point lookup.
|
||||
if (ops.$eq !== undefined) {
|
||||
// If $eq is combined with other operators, this path does not fully
|
||||
// resolve the predicate. Let caller fall back to verification/full scan.
|
||||
if (
|
||||
ops.$gt !== undefined
|
||||
|| ops.$gte !== undefined
|
||||
|| ops.$lt !== undefined
|
||||
|| ops.$lte !== undefined
|
||||
|| ops.$startsWith !== undefined
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
return this.cache.get(indexName, ops.$eq);
|
||||
}
|
||||
|
||||
// $startsWith is converted to a range scan: "abc" → ["abc", "abd").
|
||||
if (ops.$startsWith !== undefined) {
|
||||
const prefix = ops.$startsWith;
|
||||
if (prefix.length === 0) return null;
|
||||
const upper = prefix.slice(0, -1)
|
||||
+ String.fromCharCode(prefix.charCodeAt(prefix.length - 1) + 1);
|
||||
const entries = this.cache.range(indexName, prefix, upper, {
|
||||
lowerInclusive: true,
|
||||
upperInclusive: false,
|
||||
});
|
||||
return entries;
|
||||
}
|
||||
|
||||
// Extract range bounds from the remaining operators.
|
||||
// If strict/non-strict variants are both provided, use the stricter bound.
|
||||
let min: any = undefined;
|
||||
let max: any = undefined;
|
||||
let lowerInclusive = true;
|
||||
let upperInclusive = false;
|
||||
|
||||
if (ops.$gt !== undefined && ops.$gte !== undefined) {
|
||||
if (ops.$gt > ops.$gte) {
|
||||
min = ops.$gt;
|
||||
lowerInclusive = false;
|
||||
} else if (ops.$gt < ops.$gte) {
|
||||
min = ops.$gte;
|
||||
lowerInclusive = true;
|
||||
} else {
|
||||
min = ops.$gt;
|
||||
lowerInclusive = false;
|
||||
}
|
||||
} else if (ops.$gt !== undefined) {
|
||||
min = ops.$gt;
|
||||
lowerInclusive = false;
|
||||
} else if (ops.$gte !== undefined) {
|
||||
min = ops.$gte;
|
||||
lowerInclusive = true;
|
||||
}
|
||||
|
||||
if (ops.$lt !== undefined && ops.$lte !== undefined) {
|
||||
if (ops.$lt < ops.$lte) {
|
||||
max = ops.$lt;
|
||||
upperInclusive = false;
|
||||
} else if (ops.$lt > ops.$lte) {
|
||||
max = ops.$lte;
|
||||
upperInclusive = true;
|
||||
} else {
|
||||
max = ops.$lt;
|
||||
upperInclusive = false;
|
||||
}
|
||||
} else if (ops.$lt !== undefined) {
|
||||
max = ops.$lt;
|
||||
upperInclusive = false;
|
||||
} else if (ops.$lte !== undefined) {
|
||||
max = ops.$lte;
|
||||
upperInclusive = true;
|
||||
}
|
||||
|
||||
if (min === undefined && max === undefined) return null;
|
||||
if (min !== undefined && max !== undefined) {
|
||||
if (min > max) return [];
|
||||
if (min === max && (!lowerInclusive || !upperInclusive)) return [];
|
||||
}
|
||||
|
||||
return this.cache.range(indexName, min, max, { lowerInclusive, upperInclusive });
|
||||
}
|
||||
|
||||
/**
|
||||
* Try to answer a `find` query entirely through an index.
|
||||
* Returns `null` when no index can serve the filter, signalling
|
||||
* the caller to fall back to a full scan.
|
||||
*
|
||||
* When the index covers every field in the filter, matchesFilter
|
||||
* is skipped entirely — the B+ Tree has already ensured the
|
||||
* conditions are met.
|
||||
*/
|
||||
private findViaIndex(filter?: Partial<T>): T[] | null {
|
||||
const keys = this.resolveIndexKeys(filter);
|
||||
if (keys === null) return null;
|
||||
private findViaIndex(filter?: Filter<T>): T[] | null {
|
||||
const resolution = this.resolveIndexKeys(filter);
|
||||
if (resolution === null) return null;
|
||||
|
||||
const { keys, resolvedFields } = resolution;
|
||||
const needsVerification = this.filterNeedsVerification(filter, resolvedFields);
|
||||
|
||||
const results: T[] = [];
|
||||
for (const key of keys) {
|
||||
const doc = this.store.get(key);
|
||||
if (doc && this.matchesFilter(doc, filter)) {
|
||||
if (!doc) continue;
|
||||
if (needsVerification && !this.matchesFilter(doc, filter)) continue;
|
||||
results.push(doc);
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,17 +17,17 @@ export class EphemeralTransport {
|
||||
return response;
|
||||
}
|
||||
|
||||
async receive<T extends ZodType>(url: string, schema: T): Promise<output<T>>;
|
||||
async receive(url: string): Promise<unknown>;
|
||||
async receive(url: string, schema?: ZodType): Promise<unknown> {
|
||||
async receive<T extends ZodType | undefined>(url: string, schema?: T): Promise<T extends undefined ? unknown : output<T>> {
|
||||
const transport = await this.transports(url);
|
||||
const message = await transport.waitFor('message', () => true);
|
||||
transport.disconnect();
|
||||
|
||||
if (schema) {
|
||||
return schema.parse(message);
|
||||
// TODO: Figure out how the hell to fix this assertion. It shouldnt be needed, but TS is being a bitch.
|
||||
return schema.parse(message) as T extends undefined ? unknown : output<T>;
|
||||
}
|
||||
|
||||
return message;
|
||||
// TODO: Figure out how the hell to fix this assertion. It shouldnt be needed, but TS is being a bitch.
|
||||
return message as T extends undefined ? unknown : output<T>;
|
||||
}
|
||||
}
|
||||
|
||||
553
src/utils/btree.ts
Normal file
553
src/utils/btree.ts
Normal file
@@ -0,0 +1,553 @@
|
||||
/**
|
||||
* Generic comparator function. Returns negative if a < b, positive if a > b, 0 if equal.
|
||||
*/
|
||||
export type Comparator<K> = (a: K, b: K) => number;
|
||||
|
||||
/**
|
||||
* Options for range queries.
|
||||
*/
|
||||
export type RangeOptions = {
|
||||
/** Whether the lower bound is inclusive (default: true). */
|
||||
lowerInclusive?: boolean;
|
||||
/** Whether the upper bound is inclusive (default: false). */
|
||||
upperInclusive?: boolean;
|
||||
};
|
||||
|
||||
/**
|
||||
* A single entry returned by range queries and iteration.
|
||||
*/
|
||||
export type BPlusTreeEntry<K, V> = {
|
||||
key: K;
|
||||
values: Set<V>;
|
||||
};
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Node types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Internal (non-leaf) node. Stores keys that guide searches and pointers
|
||||
* to child nodes. Does NOT store values — all values live in leaves.
|
||||
*/
|
||||
class InternalNode<K, V> {
|
||||
keys: K[] = [];
|
||||
children: Array<InternalNode<K, V> | LeafNode<K, V>> = [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Leaf node. Stores key/value-set pairs and maintains a doubly-linked
|
||||
* list across all leaves for efficient range scans.
|
||||
*/
|
||||
class LeafNode<K, V> {
|
||||
keys: K[] = [];
|
||||
values: Array<Set<V>> = [];
|
||||
next: LeafNode<K, V> | null = null;
|
||||
prev: LeafNode<K, V> | null = null;
|
||||
}
|
||||
|
||||
type Node<K, V> = InternalNode<K, V> | LeafNode<K, V>;
|
||||
|
||||
function isLeaf<K, V>(node: Node<K, V>): node is LeafNode<K, V> {
|
||||
return node instanceof LeafNode;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Default comparator
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Default comparator using native `<` / `>` operators.
|
||||
* Works correctly for numbers, strings, and Dates.
|
||||
*/
|
||||
function defaultComparator<K>(a: K, b: K): number {
|
||||
if (a < b) return -1;
|
||||
if (a > b) return 1;
|
||||
return 0;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// B+ Tree
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* In-memory B+ Tree with duplicate-key support.
|
||||
*
|
||||
* Each unique key maps to a `Set<V>`, allowing multiple values to share
|
||||
* the same key (e.g. many documents with the same indexed field value).
|
||||
*
|
||||
* Leaf nodes are linked in a doubly-linked list so range scans are O(k)
|
||||
* after the initial O(log n) descent.
|
||||
*
|
||||
* @typeParam K - Key type (must be comparable via the provided comparator)
|
||||
* @typeParam V - Value type stored in each key's Set
|
||||
*/
|
||||
export class BPlusTree<K, V> {
|
||||
/** Maximum number of keys per node. A node splits when it exceeds this. */
|
||||
private readonly maxKeys: number;
|
||||
|
||||
/** Minimum number of keys a non-root node must hold after deletion. */
|
||||
private readonly minKeys: number;
|
||||
|
||||
private readonly compare: Comparator<K>;
|
||||
|
||||
private root: Node<K, V>;
|
||||
|
||||
/** Total number of individual values across all keys. */
|
||||
private _size = 0;
|
||||
|
||||
constructor(order = 32, comparator?: Comparator<K>) {
|
||||
if (order < 3) throw new Error('B+ Tree order must be at least 3');
|
||||
this.maxKeys = order - 1;
|
||||
this.minKeys = Math.ceil(order / 2) - 1;
|
||||
this.compare = comparator ?? defaultComparator;
|
||||
this.root = new LeafNode<K, V>();
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// Public API
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
/** Total number of individual values stored in the tree. */
|
||||
get size(): number {
|
||||
return this._size;
|
||||
}
|
||||
|
||||
/** Remove all entries from the tree. */
|
||||
clear(): void {
|
||||
this.root = new LeafNode<K, V>();
|
||||
this._size = 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Insert a value under the given key. If the key already exists the
|
||||
* value is added to its Set; otherwise a new key entry is created.
|
||||
*/
|
||||
insert(key: K, value: V): void {
|
||||
const leaf = this.findLeaf(key);
|
||||
const idx = this.leafKeyIndex(leaf, key);
|
||||
|
||||
if (idx < leaf.keys.length && this.compare(leaf.keys[idx], key) === 0) {
|
||||
// Key exists — add to its value set.
|
||||
const before = leaf.values[idx].size;
|
||||
leaf.values[idx].add(value);
|
||||
this._size += leaf.values[idx].size - before;
|
||||
} else {
|
||||
// New key — splice into position.
|
||||
leaf.keys.splice(idx, 0, key);
|
||||
leaf.values.splice(idx, 0, new Set([value]));
|
||||
this._size++;
|
||||
}
|
||||
|
||||
// Split if the leaf overflows.
|
||||
if (leaf.keys.length > this.maxKeys) {
|
||||
this.splitLeaf(leaf);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Look up all values associated with the exact key.
|
||||
* Returns `undefined` if the key is not present.
|
||||
*/
|
||||
get(key: K): Set<V> | undefined {
|
||||
const leaf = this.findLeaf(key);
|
||||
const idx = this.leafKeyIndex(leaf, key);
|
||||
if (idx < leaf.keys.length && this.compare(leaf.keys[idx], key) === 0) {
|
||||
return leaf.values[idx];
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a value (or all values) for the given key.
|
||||
*
|
||||
* - If `value` is provided, only that value is removed from the key's Set.
|
||||
* The key entry is removed when its Set becomes empty.
|
||||
* - If `value` is omitted, the entire key entry (with all values) is removed.
|
||||
*
|
||||
* @returns `true` if something was removed, `false` if the key/value wasn't found.
|
||||
*/
|
||||
delete(key: K, value?: V): boolean {
|
||||
const leaf = this.findLeaf(key);
|
||||
const idx = this.leafKeyIndex(leaf, key);
|
||||
|
||||
if (idx >= leaf.keys.length || this.compare(leaf.keys[idx], key) !== 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (value !== undefined) {
|
||||
const set = leaf.values[idx];
|
||||
if (!set.has(value)) return false;
|
||||
set.delete(value);
|
||||
this._size--;
|
||||
|
||||
if (set.size > 0) return true;
|
||||
// Set empty — fall through to remove the key entry entirely.
|
||||
} else {
|
||||
this._size -= leaf.values[idx].size;
|
||||
}
|
||||
|
||||
leaf.keys.splice(idx, 1);
|
||||
leaf.values.splice(idx, 1);
|
||||
|
||||
// Rebalance if needed (skip for root leaf).
|
||||
if (leaf !== this.root && leaf.keys.length < this.minKeys) {
|
||||
this.rebalanceLeaf(leaf);
|
||||
}
|
||||
|
||||
// Shrink tree height if the root internal node has a single child.
|
||||
if (!isLeaf(this.root) && this.root.children.length === 1) {
|
||||
this.root = this.root.children[0];
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Range query. Returns all entries whose keys fall within `[min, max]`
|
||||
* (bounds configurable via `opts`).
|
||||
*
|
||||
* - Omit `min` for an unbounded lower end.
|
||||
* - Omit `max` for an unbounded upper end.
|
||||
* - Omit both to iterate the entire tree in key order.
|
||||
*
|
||||
* Default bounds: lower inclusive, upper exclusive (half-open interval).
|
||||
*/
|
||||
range(
|
||||
min?: K,
|
||||
max?: K,
|
||||
opts?: RangeOptions,
|
||||
): BPlusTreeEntry<K, V>[] {
|
||||
const lowerInc = opts?.lowerInclusive ?? true;
|
||||
const upperInc = opts?.upperInclusive ?? false;
|
||||
|
||||
const results: BPlusTreeEntry<K, V>[] = [];
|
||||
|
||||
// Find the starting leaf.
|
||||
let leaf: LeafNode<K, V>;
|
||||
let startIdx: number;
|
||||
|
||||
if (min !== undefined) {
|
||||
leaf = this.findLeaf(min);
|
||||
startIdx = this.leafKeyIndex(leaf, min);
|
||||
// Adjust for exclusive lower bound.
|
||||
if (!lowerInc && startIdx < leaf.keys.length && this.compare(leaf.keys[startIdx], min) === 0) {
|
||||
startIdx++;
|
||||
}
|
||||
} else {
|
||||
leaf = this.firstLeaf();
|
||||
startIdx = 0;
|
||||
}
|
||||
|
||||
// Walk the leaf chain collecting matching entries.
|
||||
let currentLeaf: LeafNode<K, V> | null = leaf;
|
||||
let i = startIdx;
|
||||
|
||||
while (currentLeaf) {
|
||||
while (i < currentLeaf.keys.length) {
|
||||
const key = currentLeaf.keys[i];
|
||||
|
||||
if (max !== undefined) {
|
||||
const cmp = this.compare(key, max);
|
||||
if (cmp > 0 || (cmp === 0 && !upperInc)) {
|
||||
return results;
|
||||
}
|
||||
}
|
||||
|
||||
results.push({ key, values: currentLeaf.values[i] });
|
||||
i++;
|
||||
}
|
||||
|
||||
currentLeaf = currentLeaf.next;
|
||||
i = 0;
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterate over all entries in key order.
|
||||
*/
|
||||
*entries(): IterableIterator<BPlusTreeEntry<K, V>> {
|
||||
let leaf: LeafNode<K, V> | null = this.firstLeaf();
|
||||
while (leaf) {
|
||||
for (let i = 0; i < leaf.keys.length; i++) {
|
||||
yield { key: leaf.keys[i], values: leaf.values[i] };
|
||||
}
|
||||
leaf = leaf.next;
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// Tree navigation
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Descend to the leaf node that should contain the given key.
|
||||
*/
|
||||
private findLeaf(key: K): LeafNode<K, V> {
|
||||
let node: Node<K, V> = this.root;
|
||||
while (!isLeaf(node)) {
|
||||
const internal = node as InternalNode<K, V>;
|
||||
let childIdx = internal.keys.length;
|
||||
for (let i = 0; i < internal.keys.length; i++) {
|
||||
if (this.compare(key, internal.keys[i]) < 0) {
|
||||
childIdx = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
node = internal.children[childIdx];
|
||||
}
|
||||
return node;
|
||||
}
|
||||
|
||||
/** Get the leftmost leaf in the tree. */
|
||||
private firstLeaf(): LeafNode<K, V> {
|
||||
let node: Node<K, V> = this.root;
|
||||
while (!isLeaf(node)) {
|
||||
node = (node as InternalNode<K, V>).children[0];
|
||||
}
|
||||
return node;
|
||||
}
|
||||
|
||||
/**
|
||||
* Binary search within a leaf for the insertion position of `key`.
|
||||
* Returns the index of the first key >= `key`.
|
||||
*/
|
||||
private leafKeyIndex(leaf: LeafNode<K, V>, key: K): number {
|
||||
let lo = 0;
|
||||
let hi = leaf.keys.length;
|
||||
while (lo < hi) {
|
||||
const mid = (lo + hi) >>> 1;
|
||||
if (this.compare(leaf.keys[mid], key) < 0) {
|
||||
lo = mid + 1;
|
||||
} else {
|
||||
hi = mid;
|
||||
}
|
||||
}
|
||||
return lo;
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// Splitting
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Split an overflowing leaf node. The right half becomes a new leaf,
|
||||
* and a copy of its first key is promoted to the parent.
|
||||
*/
|
||||
private splitLeaf(leaf: LeafNode<K, V>): void {
|
||||
const mid = Math.ceil(leaf.keys.length / 2);
|
||||
const newLeaf = new LeafNode<K, V>();
|
||||
|
||||
newLeaf.keys = leaf.keys.splice(mid);
|
||||
newLeaf.values = leaf.values.splice(mid);
|
||||
|
||||
// Maintain the doubly-linked list.
|
||||
newLeaf.next = leaf.next;
|
||||
newLeaf.prev = leaf;
|
||||
if (leaf.next) leaf.next.prev = newLeaf;
|
||||
leaf.next = newLeaf;
|
||||
|
||||
const promotedKey = newLeaf.keys[0];
|
||||
this.insertIntoParent(leaf, promotedKey, newLeaf);
|
||||
}
|
||||
|
||||
/**
|
||||
* Split an overflowing internal node. The middle key is pushed up
|
||||
* to the parent (not copied — it's removed from this level).
|
||||
*/
|
||||
private splitInternal(node: InternalNode<K, V>): void {
|
||||
const mid = Math.floor(node.keys.length / 2);
|
||||
const promotedKey = node.keys[mid];
|
||||
|
||||
const newNode = new InternalNode<K, V>();
|
||||
newNode.keys = node.keys.splice(mid + 1);
|
||||
newNode.children = node.children.splice(mid + 1);
|
||||
node.keys.splice(mid, 1); // remove the promoted key
|
||||
|
||||
this.insertIntoParent(node, promotedKey, newNode);
|
||||
}
|
||||
|
||||
/**
|
||||
* Insert a promoted key and new right child into the parent of `left`.
|
||||
* If `left` is the root, a new root is created.
|
||||
*/
|
||||
private insertIntoParent(
|
||||
left: Node<K, V>,
|
||||
key: K,
|
||||
right: Node<K, V>,
|
||||
): void {
|
||||
if (left === this.root) {
|
||||
const newRoot = new InternalNode<K, V>();
|
||||
newRoot.keys = [key];
|
||||
newRoot.children = [left, right];
|
||||
this.root = newRoot;
|
||||
return;
|
||||
}
|
||||
|
||||
const parent = this.findParent(this.root, left) as InternalNode<K, V>;
|
||||
const idx = parent.children.indexOf(left);
|
||||
|
||||
parent.keys.splice(idx, 0, key);
|
||||
parent.children.splice(idx + 1, 0, right);
|
||||
|
||||
if (parent.keys.length > this.maxKeys) {
|
||||
this.splitInternal(parent);
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// Rebalancing (deletion)
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Rebalance a leaf that has fewer than `minKeys` entries after deletion.
|
||||
* Tries to borrow from a sibling first; if neither sibling can spare
|
||||
* a key, merges with a sibling.
|
||||
*/
|
||||
private rebalanceLeaf(leaf: LeafNode<K, V>): void {
|
||||
const parent = this.findParent(this.root, leaf) as InternalNode<K, V>;
|
||||
const idx = parent.children.indexOf(leaf);
|
||||
|
||||
// Try borrowing from the right sibling.
|
||||
if (idx < parent.children.length - 1) {
|
||||
const rightSibling = parent.children[idx + 1] as LeafNode<K, V>;
|
||||
if (rightSibling.keys.length > this.minKeys) {
|
||||
leaf.keys.push(rightSibling.keys.shift()!);
|
||||
leaf.values.push(rightSibling.values.shift()!);
|
||||
parent.keys[idx] = rightSibling.keys[0];
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Try borrowing from the left sibling.
|
||||
if (idx > 0) {
|
||||
const leftSibling = parent.children[idx - 1] as LeafNode<K, V>;
|
||||
if (leftSibling.keys.length > this.minKeys) {
|
||||
leaf.keys.unshift(leftSibling.keys.pop()!);
|
||||
leaf.values.unshift(leftSibling.values.pop()!);
|
||||
parent.keys[idx - 1] = leaf.keys[0];
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Merge with a sibling.
|
||||
if (idx < parent.children.length - 1) {
|
||||
this.mergeLeaves(leaf, parent.children[idx + 1] as LeafNode<K, V>, parent, idx);
|
||||
} else {
|
||||
this.mergeLeaves(parent.children[idx - 1] as LeafNode<K, V>, leaf, parent, idx - 1);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge `right` leaf into `left` leaf and remove the separator key
|
||||
* from the parent.
|
||||
*/
|
||||
private mergeLeaves(
|
||||
left: LeafNode<K, V>,
|
||||
right: LeafNode<K, V>,
|
||||
parent: InternalNode<K, V>,
|
||||
separatorIdx: number,
|
||||
): void {
|
||||
left.keys.push(...right.keys);
|
||||
left.values.push(...right.values);
|
||||
|
||||
// Fix linked list pointers.
|
||||
left.next = right.next;
|
||||
if (right.next) right.next.prev = left;
|
||||
|
||||
// Remove the separator key and right child from the parent.
|
||||
parent.keys.splice(separatorIdx, 1);
|
||||
parent.children.splice(separatorIdx + 1, 1);
|
||||
|
||||
// Recursively rebalance the parent if needed.
|
||||
if (parent !== this.root && parent.keys.length < this.minKeys) {
|
||||
this.rebalanceInternal(parent);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Rebalance an internal node that has too few keys after a merge.
|
||||
*/
|
||||
private rebalanceInternal(node: InternalNode<K, V>): void {
|
||||
const parent = this.findParent(this.root, node) as InternalNode<K, V>;
|
||||
const idx = parent.children.indexOf(node);
|
||||
|
||||
// Try borrowing from the right sibling.
|
||||
if (idx < parent.children.length - 1) {
|
||||
const rightSibling = parent.children[idx + 1] as InternalNode<K, V>;
|
||||
if (rightSibling.keys.length > this.minKeys) {
|
||||
node.keys.push(parent.keys[idx]);
|
||||
parent.keys[idx] = rightSibling.keys.shift()!;
|
||||
node.children.push(rightSibling.children.shift()!);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Try borrowing from the left sibling.
|
||||
if (idx > 0) {
|
||||
const leftSibling = parent.children[idx - 1] as InternalNode<K, V>;
|
||||
if (leftSibling.keys.length > this.minKeys) {
|
||||
node.keys.unshift(parent.keys[idx - 1]);
|
||||
parent.keys[idx - 1] = leftSibling.keys.pop()!;
|
||||
node.children.unshift(leftSibling.children.pop()!);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Merge with a sibling.
|
||||
if (idx < parent.children.length - 1) {
|
||||
const rightSibling = parent.children[idx + 1] as InternalNode<K, V>;
|
||||
this.mergeInternal(node, rightSibling, parent, idx);
|
||||
} else {
|
||||
const leftSibling = parent.children[idx - 1] as InternalNode<K, V>;
|
||||
this.mergeInternal(leftSibling, node, parent, idx - 1);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge two internal nodes by pulling down the separator key from the
|
||||
* parent and concatenating children.
|
||||
*/
|
||||
private mergeInternal(
|
||||
left: InternalNode<K, V>,
|
||||
right: InternalNode<K, V>,
|
||||
parent: InternalNode<K, V>,
|
||||
separatorIdx: number,
|
||||
): void {
|
||||
left.keys.push(parent.keys[separatorIdx]);
|
||||
left.keys.push(...right.keys);
|
||||
left.children.push(...right.children);
|
||||
|
||||
parent.keys.splice(separatorIdx, 1);
|
||||
parent.children.splice(separatorIdx + 1, 1);
|
||||
|
||||
if (parent !== this.root && parent.keys.length < this.minKeys) {
|
||||
this.rebalanceInternal(parent);
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// Utilities
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Walk the tree from `current` downward to find the parent of `target`.
|
||||
* Returns `null` if `target` is the root or not found.
|
||||
*/
|
||||
private findParent(
|
||||
current: Node<K, V>,
|
||||
target: Node<K, V>,
|
||||
): InternalNode<K, V> | null {
|
||||
if (isLeaf(current)) return null;
|
||||
const internal = current as InternalNode<K, V>;
|
||||
|
||||
for (const child of internal.children) {
|
||||
if (child === target) return internal;
|
||||
const found = this.findParent(child, target);
|
||||
if (found) return found;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,4 @@
|
||||
export * from './event-emitter.js';
|
||||
export * from './exponential-backoff.js';
|
||||
export * from './sse-session.js';
|
||||
export * from './btree.js';
|
||||
|
||||
58
tests/storage/storage-regressions.test.ts
Normal file
58
tests/storage/storage-regressions.test.ts
Normal file
@@ -0,0 +1,58 @@
|
||||
import { describe, expect, it } from 'vitest';
|
||||
|
||||
import { AESKey } from 'src/crypto/aes-key.js';
|
||||
|
||||
import { EncryptedStorage } from 'src/storage/encrypted-storage.js';
|
||||
import { StorageMemory } from 'src/storage/storage-memory.js';
|
||||
import { StorageMemorySynced } from 'src/storage/storage-memory-synced.js';
|
||||
|
||||
describe('storage regressions', () => {
|
||||
it('does not treat $eq as fully resolved when mixed with other operators', async () => {
|
||||
const storage = StorageMemory.from<{ age: number }>(['age']);
|
||||
await storage.insertMany([{ age: 25 }, { age: 35 }]);
|
||||
|
||||
const results = await storage.find({ age: { $eq: 25, $gt: 30 } });
|
||||
expect(results).toEqual([]);
|
||||
});
|
||||
|
||||
it('normalizes conflicting lower/upper bounds to strictest constraints', async () => {
|
||||
const storage = StorageMemory.from<{ age: number }>(['age']);
|
||||
await storage.insertMany([{ age: 30 }, { age: 31 }, { age: 32 }]);
|
||||
|
||||
const lower = await storage.find({ age: { $gt: 30, $gte: 30 } });
|
||||
expect(lower.map((d) => d.age).sort((a, b) => a - b)).toEqual([31, 32]);
|
||||
|
||||
const upper = await storage.find({ age: { $lt: 32, $lte: 32 } });
|
||||
expect(upper.map((d) => d.age).sort((a, b) => a - b)).toEqual([30, 31]);
|
||||
});
|
||||
|
||||
it('formats plaintext operator values in encrypted filters', async () => {
|
||||
type Doc = { createdAt: Date; name: string };
|
||||
const key = await AESKey.fromSeed('storage-regression-test-key');
|
||||
const base = StorageMemory.from<Record<string, any>>(['createdAt']);
|
||||
const storage = EncryptedStorage.from<Doc>(base, key, { plaintextKeys: ['createdAt'] });
|
||||
|
||||
await storage.insertOne({
|
||||
createdAt: new Date('2024-01-02T00:00:00.000Z'),
|
||||
name: 'alice',
|
||||
});
|
||||
|
||||
const results = await storage.find({
|
||||
createdAt: { $gte: new Date('2024-01-01T00:00:00.000Z') },
|
||||
});
|
||||
expect(results).toHaveLength(1);
|
||||
expect(results[0].name).toBe('alice');
|
||||
});
|
||||
|
||||
it('primes derived child cache in StorageMemorySynced', async () => {
|
||||
const store = StorageMemory.from<Record<string, any>>();
|
||||
const child = store.deriveChild<{ value: number }>('child');
|
||||
await child.insertOne({ value: 42 });
|
||||
|
||||
const synced = await StorageMemorySynced.create(store);
|
||||
const syncedChild = synced.deriveChild<{ value: number }>('child');
|
||||
const results = await syncedChild.find();
|
||||
|
||||
expect(results).toEqual([{ value: 42 }]);
|
||||
});
|
||||
});
|
||||
392
tests/utils/btree.test.ts
Normal file
392
tests/utils/btree.test.ts
Normal file
@@ -0,0 +1,392 @@
|
||||
import { describe, it, expect, beforeEach } from 'vitest';
|
||||
import { BPlusTree } from '../../src/utils/btree.js';
|
||||
|
||||
describe('BPlusTree', () => {
|
||||
let tree: BPlusTree<number, string>;
|
||||
|
||||
beforeEach(() => {
|
||||
tree = new BPlusTree<number, string>();
|
||||
});
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// Construction
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
describe('constructor', () => {
|
||||
it('should create an empty tree', () => {
|
||||
expect(tree.size).toBe(0);
|
||||
});
|
||||
|
||||
it('should reject order < 3', () => {
|
||||
expect(() => new BPlusTree(2)).toThrow('order must be at least 3');
|
||||
});
|
||||
});
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// Insert & Get
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
describe('insert and get', () => {
|
||||
it('should insert and retrieve a single entry', () => {
|
||||
tree.insert(10, 'a');
|
||||
expect(tree.get(10)).toEqual(new Set(['a']));
|
||||
expect(tree.size).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle multiple distinct keys', () => {
|
||||
tree.insert(10, 'a');
|
||||
tree.insert(20, 'b');
|
||||
tree.insert(5, 'c');
|
||||
expect(tree.get(10)).toEqual(new Set(['a']));
|
||||
expect(tree.get(20)).toEqual(new Set(['b']));
|
||||
expect(tree.get(5)).toEqual(new Set(['c']));
|
||||
expect(tree.size).toBe(3);
|
||||
});
|
||||
|
||||
it('should return undefined for missing keys', () => {
|
||||
tree.insert(10, 'a');
|
||||
expect(tree.get(99)).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should accumulate duplicate keys into a Set', () => {
|
||||
tree.insert(10, 'a');
|
||||
tree.insert(10, 'b');
|
||||
tree.insert(10, 'c');
|
||||
expect(tree.get(10)).toEqual(new Set(['a', 'b', 'c']));
|
||||
expect(tree.size).toBe(3);
|
||||
});
|
||||
|
||||
it('should not double-count duplicate values for the same key', () => {
|
||||
tree.insert(10, 'a');
|
||||
tree.insert(10, 'a');
|
||||
expect(tree.get(10)).toEqual(new Set(['a']));
|
||||
expect(tree.size).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// Delete
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
describe('delete', () => {
|
||||
it('should delete a specific value from a key', () => {
|
||||
tree.insert(10, 'a');
|
||||
tree.insert(10, 'b');
|
||||
expect(tree.delete(10, 'a')).toBe(true);
|
||||
expect(tree.get(10)).toEqual(new Set(['b']));
|
||||
expect(tree.size).toBe(1);
|
||||
});
|
||||
|
||||
it('should remove the key entry when its last value is deleted', () => {
|
||||
tree.insert(10, 'a');
|
||||
expect(tree.delete(10, 'a')).toBe(true);
|
||||
expect(tree.get(10)).toBeUndefined();
|
||||
expect(tree.size).toBe(0);
|
||||
});
|
||||
|
||||
it('should delete all values for a key when value is omitted', () => {
|
||||
tree.insert(10, 'a');
|
||||
tree.insert(10, 'b');
|
||||
expect(tree.delete(10)).toBe(true);
|
||||
expect(tree.get(10)).toBeUndefined();
|
||||
expect(tree.size).toBe(0);
|
||||
});
|
||||
|
||||
it('should return false for non-existent key', () => {
|
||||
expect(tree.delete(99)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for non-existent value', () => {
|
||||
tree.insert(10, 'a');
|
||||
expect(tree.delete(10, 'z')).toBe(false);
|
||||
expect(tree.size).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// Range queries
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
describe('range', () => {
|
||||
beforeEach(() => {
|
||||
for (let i = 0; i < 100; i++) {
|
||||
tree.insert(i, `v${i}`);
|
||||
}
|
||||
});
|
||||
|
||||
it('should return all entries when no bounds given', () => {
|
||||
const result = tree.range();
|
||||
expect(result.length).toBe(100);
|
||||
expect(result[0].key).toBe(0);
|
||||
expect(result[99].key).toBe(99);
|
||||
});
|
||||
|
||||
it('should return entries in key order', () => {
|
||||
const keys = tree.range().map((e) => e.key);
|
||||
for (let i = 1; i < keys.length; i++) {
|
||||
expect(keys[i]).toBeGreaterThan(keys[i - 1]);
|
||||
}
|
||||
});
|
||||
|
||||
it('should respect lower bound (inclusive by default)', () => {
|
||||
const result = tree.range(50);
|
||||
expect(result.length).toBe(50);
|
||||
expect(result[0].key).toBe(50);
|
||||
});
|
||||
|
||||
it('should respect upper bound (exclusive by default)', () => {
|
||||
const result = tree.range(undefined, 10);
|
||||
expect(result.length).toBe(10);
|
||||
expect(result[result.length - 1].key).toBe(9);
|
||||
});
|
||||
|
||||
it('should support inclusive upper bound', () => {
|
||||
const result = tree.range(undefined, 10, { upperInclusive: true });
|
||||
expect(result.length).toBe(11);
|
||||
expect(result[result.length - 1].key).toBe(10);
|
||||
});
|
||||
|
||||
it('should support exclusive lower bound', () => {
|
||||
const result = tree.range(50, undefined, { lowerInclusive: false });
|
||||
expect(result.length).toBe(49);
|
||||
expect(result[0].key).toBe(51);
|
||||
});
|
||||
|
||||
it('should handle combined bounds', () => {
|
||||
const result = tree.range(20, 30);
|
||||
expect(result.length).toBe(10);
|
||||
expect(result[0].key).toBe(20);
|
||||
expect(result[result.length - 1].key).toBe(29);
|
||||
});
|
||||
|
||||
it('should return empty array for no-result range', () => {
|
||||
const result = tree.range(200, 300);
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return empty for inverted bounds', () => {
|
||||
const result = tree.range(50, 10);
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// Edge cases
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle get on empty tree', () => {
|
||||
expect(tree.get(1)).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should handle range on empty tree', () => {
|
||||
expect(tree.range()).toEqual([]);
|
||||
expect(tree.range(1, 2)).toEqual([]);
|
||||
});
|
||||
|
||||
it('should handle delete on empty tree', () => {
|
||||
expect(tree.delete(1)).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle insert-then-delete-all back to empty', () => {
|
||||
for (let i = 0; i < 50; i++) {
|
||||
tree.insert(i, `v${i}`);
|
||||
}
|
||||
for (let i = 0; i < 50; i++) {
|
||||
expect(tree.delete(i, `v${i}`)).toBe(true);
|
||||
}
|
||||
expect(tree.size).toBe(0);
|
||||
expect(tree.range()).toEqual([]);
|
||||
// Verify we can still insert after emptying.
|
||||
tree.insert(1, 'new');
|
||||
expect(tree.get(1)).toEqual(new Set(['new']));
|
||||
});
|
||||
});
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// Clear
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
describe('clear', () => {
|
||||
it('should reset the tree to empty', () => {
|
||||
for (let i = 0; i < 100; i++) tree.insert(i, `v${i}`);
|
||||
expect(tree.size).toBe(100);
|
||||
tree.clear();
|
||||
expect(tree.size).toBe(0);
|
||||
expect(tree.get(0)).toBeUndefined();
|
||||
expect(tree.range()).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// Entries iterator
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
describe('entries', () => {
|
||||
it('should yield all entries in key order', () => {
|
||||
tree.insert(30, 'c');
|
||||
tree.insert(10, 'a');
|
||||
tree.insert(20, 'b');
|
||||
|
||||
const result = [...tree.entries()];
|
||||
expect(result.map((e) => e.key)).toEqual([10, 20, 30]);
|
||||
});
|
||||
|
||||
it('should yield nothing for empty tree', () => {
|
||||
expect([...tree.entries()]).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// Large dataset
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
describe('large dataset', () => {
|
||||
const N = 10_000;
|
||||
|
||||
it('should correctly store and retrieve N items', () => {
|
||||
for (let i = 0; i < N; i++) {
|
||||
tree.insert(i, `v${i}`);
|
||||
}
|
||||
expect(tree.size).toBe(N);
|
||||
|
||||
// Spot-check some values.
|
||||
expect(tree.get(0)).toEqual(new Set(['v0']));
|
||||
expect(tree.get(N - 1)).toEqual(new Set([`v${N - 1}`]));
|
||||
expect(tree.get(Math.floor(N / 2))).toEqual(new Set([`v${Math.floor(N / 2)}`]));
|
||||
});
|
||||
|
||||
it('should produce correct range results on large dataset', () => {
|
||||
for (let i = 0; i < N; i++) {
|
||||
tree.insert(i, `v${i}`);
|
||||
}
|
||||
|
||||
const result = tree.range(5000, 5010);
|
||||
expect(result.length).toBe(10);
|
||||
expect(result[0].key).toBe(5000);
|
||||
expect(result[9].key).toBe(5009);
|
||||
});
|
||||
|
||||
it('should survive inserting and deleting many items', () => {
|
||||
for (let i = 0; i < N; i++) {
|
||||
tree.insert(i, `v${i}`);
|
||||
}
|
||||
|
||||
// Delete the first half.
|
||||
for (let i = 0; i < N / 2; i++) {
|
||||
expect(tree.delete(i, `v${i}`)).toBe(true);
|
||||
}
|
||||
|
||||
expect(tree.size).toBe(N / 2);
|
||||
expect(tree.get(0)).toBeUndefined();
|
||||
expect(tree.get(N / 2)).toEqual(new Set([`v${N / 2}`]));
|
||||
|
||||
// Remaining range should start at N/2.
|
||||
const remaining = tree.range();
|
||||
expect(remaining.length).toBe(N / 2);
|
||||
expect(remaining[0].key).toBe(N / 2);
|
||||
});
|
||||
});
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// Custom comparator
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
describe('custom comparator', () => {
|
||||
it('should support reverse ordering', () => {
|
||||
const reverseTree = new BPlusTree<number, string>(32, (a, b) => b - a);
|
||||
reverseTree.insert(1, 'a');
|
||||
reverseTree.insert(2, 'b');
|
||||
reverseTree.insert(3, 'c');
|
||||
|
||||
const entries = [...reverseTree.entries()];
|
||||
expect(entries.map((e) => e.key)).toEqual([3, 2, 1]);
|
||||
});
|
||||
});
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// Node splitting (small order to force splits)
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
describe('node splitting with small order', () => {
|
||||
let smallTree: BPlusTree<number, string>;
|
||||
|
||||
beforeEach(() => {
|
||||
smallTree = new BPlusTree<number, string>(4);
|
||||
});
|
||||
|
||||
it('should handle splits correctly', () => {
|
||||
// Order 4 means max 3 keys per node — splits after the 4th insert.
|
||||
for (let i = 0; i < 20; i++) {
|
||||
smallTree.insert(i, `v${i}`);
|
||||
}
|
||||
expect(smallTree.size).toBe(20);
|
||||
|
||||
// All values should be retrievable.
|
||||
for (let i = 0; i < 20; i++) {
|
||||
expect(smallTree.get(i)).toEqual(new Set([`v${i}`]));
|
||||
}
|
||||
});
|
||||
|
||||
it('should maintain sorted order after many splits', () => {
|
||||
// Insert in random order to stress split logic.
|
||||
const values = Array.from({ length: 50 }, (_, i) => i);
|
||||
for (let i = values.length - 1; i > 0; i--) {
|
||||
const j = Math.floor(Math.random() * (i + 1));
|
||||
[values[i], values[j]] = [values[j], values[i]];
|
||||
}
|
||||
|
||||
for (const v of values) {
|
||||
smallTree.insert(v, `v${v}`);
|
||||
}
|
||||
|
||||
const entries = [...smallTree.entries()];
|
||||
const keys = entries.map((e) => e.key);
|
||||
expect(keys).toEqual([...keys].sort((a, b) => a - b));
|
||||
});
|
||||
|
||||
it('should handle delete with merging at small order', () => {
|
||||
for (let i = 0; i < 20; i++) {
|
||||
smallTree.insert(i, `v${i}`);
|
||||
}
|
||||
|
||||
// Delete enough to trigger merges.
|
||||
for (let i = 0; i < 15; i++) {
|
||||
expect(smallTree.delete(i, `v${i}`)).toBe(true);
|
||||
}
|
||||
|
||||
expect(smallTree.size).toBe(5);
|
||||
|
||||
// Remaining keys should be intact.
|
||||
for (let i = 15; i < 20; i++) {
|
||||
expect(smallTree.get(i)).toEqual(new Set([`v${i}`]));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// String keys
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
describe('string keys', () => {
|
||||
it('should work with string keys using default comparator', () => {
|
||||
const strTree = new BPlusTree<string, number>();
|
||||
strTree.insert('banana', 1);
|
||||
strTree.insert('apple', 2);
|
||||
strTree.insert('cherry', 3);
|
||||
|
||||
const entries = [...strTree.entries()];
|
||||
expect(entries.map((e) => e.key)).toEqual(['apple', 'banana', 'cherry']);
|
||||
expect(strTree.get('banana')).toEqual(new Set([1]));
|
||||
});
|
||||
|
||||
it('should support string range queries', () => {
|
||||
const strTree = new BPlusTree<string, number>();
|
||||
const words = ['apple', 'banana', 'cherry', 'date', 'elderberry', 'fig'];
|
||||
words.forEach((w, i) => strTree.insert(w, i));
|
||||
|
||||
const result = strTree.range('banana', 'elderberry');
|
||||
expect(result.map((e) => e.key)).toEqual(['banana', 'cherry', 'date']);
|
||||
});
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user