Dont assume id on storage objects
This commit is contained in:
@@ -1,36 +1,189 @@
|
||||
import { BaseStorage, FindOptions } from './base-storage.js';
|
||||
import { BaseStorage, FindOptions, type IndexDefinition } from './base-storage.js';
|
||||
|
||||
/**
|
||||
* Separator used when joining multiple field values into a single index key.
|
||||
* Chosen to be unlikely to appear in real field values.
|
||||
*/
|
||||
const INDEX_KEY_SEP = '\x00';
|
||||
|
||||
/**
|
||||
* Normalize an IndexDefinition into a canonical `string[][]` form.
|
||||
* A flat `string[]` like `['id', 'name']` becomes `[['id'], ['name']]`.
|
||||
* An already-nested `string[][]` is returned as-is.
|
||||
*/
|
||||
function normalizeIndexes(indexes?: IndexDefinition): string[][] {
|
||||
if (!indexes || indexes.length === 0) return [];
|
||||
|
||||
// If the first element is a string, treat the whole array as shorthand.
|
||||
if (typeof indexes[0] === 'string') {
|
||||
return (indexes as string[]).map((field) => [field]);
|
||||
}
|
||||
|
||||
return indexes as string[][];
|
||||
}
|
||||
|
||||
/**
|
||||
* Implementation of BaseStore using Memory as the storage backend.
|
||||
*
|
||||
* @remarks
|
||||
* This implementation can be used testing and caching of expensive operations.
|
||||
* Documents are keyed internally by an auto-incrementing numeric key.
|
||||
* Optional indexes provide O(1) lookups when a query filter matches
|
||||
* an index exactly.
|
||||
*/
|
||||
export class StorageMemory<
|
||||
T extends Record<string, any> = Record<string, any>,
|
||||
> extends BaseStorage<T> {
|
||||
// TODO: Eventually this may accept indexes as an argument.
|
||||
static from<T extends Record<string, any>>(): StorageMemory<T> {
|
||||
return new StorageMemory<T>();
|
||||
static from<T extends Record<string, any>>(
|
||||
indexes?: IndexDefinition,
|
||||
): StorageMemory<T> {
|
||||
return new StorageMemory<T>(indexes);
|
||||
}
|
||||
|
||||
private store: Map<string, T>;
|
||||
/** Auto-incrementing counter used to generate internal keys. */
|
||||
private nextKey = 0;
|
||||
|
||||
/** Primary document store keyed by an opaque internal key. */
|
||||
private store: Map<number, T>;
|
||||
|
||||
/**
|
||||
* Secondary index maps.
|
||||
* Outer key = index name (joined field names).
|
||||
* Inner key = index value (joined field values from a document).
|
||||
* Inner value = set of internal keys that share this index value.
|
||||
*/
|
||||
private indexes: Map<string, Map<string, Set<number>>>;
|
||||
|
||||
/** The normalized index definitions supplied at construction time. */
|
||||
private indexDefs: string[][];
|
||||
|
||||
/** Lazily-created child storage instances. */
|
||||
private children: Map<string, StorageMemory<any>>;
|
||||
|
||||
constructor() {
|
||||
constructor(indexes?: IndexDefinition) {
|
||||
super();
|
||||
|
||||
this.store = new Map();
|
||||
this.children = new Map();
|
||||
this.indexDefs = normalizeIndexes(indexes);
|
||||
|
||||
// Initialise an empty map for each index definition.
|
||||
this.indexes = new Map();
|
||||
for (const fields of this.indexDefs) {
|
||||
this.indexes.set(fields.join(INDEX_KEY_SEP), new Map());
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Abstract method implementations
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
async insertMany(documents: Array<T>): Promise<void> {
|
||||
for (const document of documents) {
|
||||
this.store.set(document.id, document);
|
||||
const key = this.nextKey++;
|
||||
this.store.set(key, document);
|
||||
this.addToIndexes(key, document);
|
||||
this.emit('insert', { value: document });
|
||||
}
|
||||
}
|
||||
|
||||
async find(filter?: Partial<T>, options?: FindOptions): Promise<T[]> {
|
||||
let results: T[];
|
||||
|
||||
// Attempt to satisfy the query via an index.
|
||||
const indexed = this.findViaIndex(filter);
|
||||
|
||||
if (indexed !== null) {
|
||||
results = indexed;
|
||||
} else {
|
||||
// Fall back to a full scan.
|
||||
results = [];
|
||||
for (const [, value] of this.store) {
|
||||
if (this.matchesFilter(value, filter)) {
|
||||
results.push(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Apply sort before skip/limit so the window is deterministic.
|
||||
if (options?.sort) {
|
||||
results = this.applySorting(results, options.sort);
|
||||
}
|
||||
|
||||
const startIndex = options?.skip ?? 0;
|
||||
const endIndex = options?.limit
|
||||
? startIndex + options.limit
|
||||
: results.length;
|
||||
|
||||
return results.slice(startIndex, endIndex);
|
||||
}
|
||||
|
||||
async updateMany(
|
||||
filter: Partial<T>,
|
||||
update: Partial<T>,
|
||||
options: Partial<FindOptions> = {},
|
||||
): Promise<number> {
|
||||
const itemsToUpdate = this.collectMatches(filter);
|
||||
|
||||
const startIndex = options.skip ?? 0;
|
||||
const endIndex = options.limit
|
||||
? startIndex + options.limit
|
||||
: itemsToUpdate.length;
|
||||
const itemsToProcess = itemsToUpdate.slice(startIndex, endIndex);
|
||||
|
||||
let updated = 0;
|
||||
for (const [key, oldValue] of itemsToProcess) {
|
||||
const updatedValue = { ...oldValue, ...update } as T;
|
||||
|
||||
// Re-index: remove old entries, store new doc, add new entries.
|
||||
this.removeFromIndexes(key, oldValue);
|
||||
this.store.set(key, updatedValue);
|
||||
this.addToIndexes(key, updatedValue);
|
||||
|
||||
this.emit('update', { oldValue, value: updatedValue });
|
||||
updated++;
|
||||
}
|
||||
|
||||
return updated;
|
||||
}
|
||||
|
||||
async deleteMany(
|
||||
filter: Partial<T>,
|
||||
options: Partial<FindOptions> = {},
|
||||
): Promise<number> {
|
||||
const rowsToDelete = this.collectMatches(filter);
|
||||
|
||||
const startIndex = options.skip ?? 0;
|
||||
const endIndex = options.limit
|
||||
? startIndex + options.limit
|
||||
: rowsToDelete.length;
|
||||
const rowsToProcess = rowsToDelete.slice(startIndex, endIndex);
|
||||
|
||||
let deleted = 0;
|
||||
for (const [key, value] of rowsToProcess) {
|
||||
this.removeFromIndexes(key, value);
|
||||
this.store.delete(key);
|
||||
this.emit('delete', { value });
|
||||
deleted++;
|
||||
}
|
||||
|
||||
return deleted;
|
||||
}
|
||||
|
||||
deriveChild<C>(path: string): BaseStorage<C> {
|
||||
if (!this.children.has(path)) {
|
||||
this.children.set(path, new StorageMemory<C>(this.indexDefs));
|
||||
}
|
||||
return this.children.get(path) as StorageMemory<C>;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Private helpers — filtering
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Checks whether a document satisfies every field in the filter.
|
||||
* An empty or undefined filter matches everything.
|
||||
*/
|
||||
private matchesFilter(item: T, filter?: Partial<T>): boolean {
|
||||
if (!filter || Object.keys(filter).length === 0) {
|
||||
return true;
|
||||
@@ -44,84 +197,146 @@ export class StorageMemory<
|
||||
return true;
|
||||
}
|
||||
|
||||
async find(filter?: Partial<T>, options?: FindOptions): Promise<T[]> {
|
||||
const results: T[] = [];
|
||||
for (const [, value] of this.store) {
|
||||
/**
|
||||
* Collect all [internalKey, document] pairs that match a filter.
|
||||
* Uses an index when possible, otherwise falls back to a full scan.
|
||||
*/
|
||||
private collectMatches(filter?: Partial<T>): Array<[number, T]> {
|
||||
const indexKeys = this.resolveIndexKeys(filter);
|
||||
|
||||
if (indexKeys !== null) {
|
||||
// We have candidate internal keys from the index — fetch and verify.
|
||||
const results: Array<[number, T]> = [];
|
||||
for (const key of indexKeys) {
|
||||
const doc = this.store.get(key);
|
||||
if (doc && this.matchesFilter(doc, filter)) {
|
||||
results.push([key, doc]);
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
// Full scan.
|
||||
const results: Array<[number, T]> = [];
|
||||
for (const [key, value] of this.store) {
|
||||
if (this.matchesFilter(value, filter)) {
|
||||
results.push(value);
|
||||
results.push([key, value]);
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
async updateMany(
|
||||
filter: Partial<T>,
|
||||
update: Partial<T>,
|
||||
options: Partial<FindOptions> = {},
|
||||
): Promise<number> {
|
||||
let updated = 0;
|
||||
const itemsToUpdate: Array<[string, T]> = [];
|
||||
|
||||
// Collect all matching items
|
||||
for (const [key, value] of this.store) {
|
||||
if (this.matchesFilter(value, filter)) {
|
||||
itemsToUpdate.push([key, value]);
|
||||
/**
|
||||
* Sort an array of documents according to a sort specification.
|
||||
* Keys map to `1` (ascending) or `-1` (descending).
|
||||
*/
|
||||
private applySorting(items: T[], sort: Record<string, 1 | -1>): T[] {
|
||||
const sortEntries = Object.entries(sort);
|
||||
return [...items].sort((a, b) => {
|
||||
for (const [key, direction] of sortEntries) {
|
||||
if (a[key] < b[key]) return -1 * direction;
|
||||
if (a[key] > b[key]) return 1 * direction;
|
||||
}
|
||||
}
|
||||
|
||||
// Apply skip and limit
|
||||
const startIndex = options.skip || 0;
|
||||
const endIndex = options.limit
|
||||
? startIndex + options.limit
|
||||
: itemsToUpdate.length;
|
||||
const itemsToProcess = itemsToUpdate.slice(startIndex, endIndex);
|
||||
|
||||
// Update items
|
||||
for (const [key, oldValue] of itemsToProcess) {
|
||||
const updatedValue = { ...oldValue, ...update };
|
||||
this.store.set(key, updatedValue);
|
||||
this.emit('update', { value: updatedValue });
|
||||
updated++;
|
||||
}
|
||||
|
||||
return updated;
|
||||
return 0;
|
||||
});
|
||||
}
|
||||
|
||||
async deleteMany(
|
||||
filter: Partial<T>,
|
||||
options: Partial<FindOptions> = {},
|
||||
): Promise<number> {
|
||||
let deleted = 0;
|
||||
const rowsToDelete: Array<T> = [];
|
||||
// ---------------------------------------------------------------------------
|
||||
// Private helpers — indexing
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
// Collect all matching keys
|
||||
for (const [key, value] of this.store) {
|
||||
if (this.matchesFilter(value, filter)) {
|
||||
rowsToDelete.push(value);
|
||||
}
|
||||
/**
|
||||
* Build the index value string for a given document and set of fields.
|
||||
* Returns `null` if any of the fields are missing from the document,
|
||||
* since we can't meaningfully index a partial key.
|
||||
*/
|
||||
private buildIndexValue(doc: Record<string, any>, fields: string[]): string | null {
|
||||
const parts: string[] = [];
|
||||
for (const field of fields) {
|
||||
if (!(field in doc)) return null;
|
||||
parts.push(String(doc[field]));
|
||||
}
|
||||
|
||||
// Apply skip and limit
|
||||
const startIndex = options.skip || 0;
|
||||
const endIndex = options.limit
|
||||
? startIndex + options.limit
|
||||
: rowsToDelete.length;
|
||||
const rowsToProcess = rowsToDelete.slice(startIndex, endIndex);
|
||||
|
||||
// Delete items
|
||||
for (const row of rowsToProcess) {
|
||||
this.store.delete(row.id);
|
||||
this.emit('delete', { value: row });
|
||||
deleted++;
|
||||
}
|
||||
|
||||
return deleted;
|
||||
return parts.join(INDEX_KEY_SEP);
|
||||
}
|
||||
|
||||
deriveChild<C>(path: string): BaseStorage<C> {
|
||||
if (!this.children.has(path)) {
|
||||
this.children.set(path, new StorageMemory<C>());
|
||||
/** Register a document in all applicable indexes. */
|
||||
private addToIndexes(internalKey: number, doc: T): void {
|
||||
for (const fields of this.indexDefs) {
|
||||
const indexName = fields.join(INDEX_KEY_SEP);
|
||||
const indexValue = this.buildIndexValue(doc, fields);
|
||||
if (indexValue === null) continue;
|
||||
|
||||
const indexMap = this.indexes.get(indexName)!;
|
||||
let bucket = indexMap.get(indexValue);
|
||||
if (!bucket) {
|
||||
bucket = new Set();
|
||||
indexMap.set(indexValue, bucket);
|
||||
}
|
||||
bucket.add(internalKey);
|
||||
}
|
||||
return this.children.get(path) as StorageMemory<C>;
|
||||
}
|
||||
|
||||
/** Remove a document from all applicable indexes. */
|
||||
private removeFromIndexes(internalKey: number, doc: T): void {
|
||||
for (const fields of this.indexDefs) {
|
||||
const indexName = fields.join(INDEX_KEY_SEP);
|
||||
const indexValue = this.buildIndexValue(doc, fields);
|
||||
if (indexValue === null) continue;
|
||||
|
||||
const indexMap = this.indexes.get(indexName)!;
|
||||
const bucket = indexMap.get(indexValue);
|
||||
if (bucket) {
|
||||
bucket.delete(internalKey);
|
||||
if (bucket.size === 0) indexMap.delete(indexValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempt to resolve a set of candidate internal keys from the indexes.
|
||||
* Returns `null` if no index can serve the query.
|
||||
*
|
||||
* An index is used when the filter fields are a superset of (or equal to)
|
||||
* an index's fields — meaning the index value can be fully constructed
|
||||
* from the filter.
|
||||
*/
|
||||
private resolveIndexKeys(filter?: Partial<T>): Set<number> | null {
|
||||
if (!filter) return null;
|
||||
const filterKeys = Object.keys(filter);
|
||||
if (filterKeys.length === 0) return null;
|
||||
|
||||
for (const fields of this.indexDefs) {
|
||||
// Every field in the index must be present in the filter.
|
||||
if (!fields.every((f) => f in filter)) continue;
|
||||
|
||||
const indexName = fields.join(INDEX_KEY_SEP);
|
||||
const indexValue = this.buildIndexValue(filter, fields);
|
||||
if (indexValue === null) continue;
|
||||
|
||||
const indexMap = this.indexes.get(indexName)!;
|
||||
const bucket = indexMap.get(indexValue);
|
||||
return bucket ?? new Set();
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Try to answer a `find` query entirely through an index.
|
||||
* Returns `null` when no index can serve the filter, signalling
|
||||
* the caller to fall back to a full scan.
|
||||
*/
|
||||
private findViaIndex(filter?: Partial<T>): T[] | null {
|
||||
const keys = this.resolveIndexKeys(filter);
|
||||
if (keys === null) return null;
|
||||
|
||||
const results: T[] = [];
|
||||
for (const key of keys) {
|
||||
const doc = this.store.get(key);
|
||||
if (doc && this.matchesFilter(doc, filter)) {
|
||||
results.push(doc);
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user