Initial commit
This commit is contained in:
80
.gitignore
vendored
Normal file
80
.gitignore
vendored
Normal file
@@ -0,0 +1,80 @@
|
||||
# Dependencies
|
||||
node_modules/
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
|
||||
# Runtime data
|
||||
pids
|
||||
*.pid
|
||||
*.seed
|
||||
*.pid.lock
|
||||
|
||||
# Coverage directory used by tools like istanbul
|
||||
coverage/
|
||||
|
||||
# nyc test coverage
|
||||
.nyc_output
|
||||
|
||||
# Dependency directories
|
||||
node_modules/
|
||||
jspm_packages/
|
||||
|
||||
# Optional npm cache directory
|
||||
.npm
|
||||
|
||||
# Optional eslint cache
|
||||
.eslintcache
|
||||
|
||||
# Microbundle cache
|
||||
.rpt2_cache/
|
||||
.rts2_cache_cjs/
|
||||
.rts2_cache_es/
|
||||
.rts2_cache_umd/
|
||||
|
||||
# Optional REPL history
|
||||
.node_repl_history
|
||||
|
||||
# Output of 'npm pack'
|
||||
*.tgz
|
||||
|
||||
# Yarn Integrity file
|
||||
.yarn-integrity
|
||||
|
||||
# dotenv environment variables file
|
||||
.env
|
||||
.env.test
|
||||
.env.production
|
||||
.env.local
|
||||
|
||||
# parcel-bundler cache (https://parceljs.org/)
|
||||
.cache
|
||||
.parcel-cache
|
||||
|
||||
# next.js build output
|
||||
.next
|
||||
|
||||
# nuxt.js build output
|
||||
.nuxt
|
||||
|
||||
# vuepress build output
|
||||
.vuepress/dist
|
||||
|
||||
# Serverless directories
|
||||
.serverless/
|
||||
|
||||
# FuseBox cache
|
||||
.fusebox/
|
||||
|
||||
# DynamoDB Local files
|
||||
.dynamodb/
|
||||
|
||||
# TernJS port file
|
||||
.tern-port
|
||||
|
||||
# Stores VSCode versions used for testing VSCode extensions
|
||||
.vscode-test
|
||||
|
||||
# Build output
|
||||
dist/
|
||||
build/
|
||||
4
.prettierrc
Normal file
4
.prettierrc
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"singleQuote": true,
|
||||
"semi": true
|
||||
}
|
||||
191
benchmarks/diffie-helman.ts
Normal file
191
benchmarks/diffie-helman.ts
Normal file
@@ -0,0 +1,191 @@
|
||||
/**
|
||||
* Pure TypeScript implementation of Diffie-Hellman key exchange
|
||||
*
|
||||
* Uses MODP Group 14 (RFC 3526) parameters:
|
||||
* - 2048-bit prime modulus
|
||||
* - Generator = 2
|
||||
*/
|
||||
|
||||
// MODP Group 14 parameters (RFC 3526)
|
||||
const PRIME_2048 = BigInt(
|
||||
'0x' +
|
||||
'FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD1' +
|
||||
'29024E088A67CC74020BBEA63B139B22514A08798E3404DD' +
|
||||
'EF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245' +
|
||||
'E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7ED' +
|
||||
'EE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3D' +
|
||||
'C2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F' +
|
||||
'83655D23DCA3AD961C62F356208552BB9ED529077096966D' +
|
||||
'670C354E4ABC9804F1746C08CA237327FFFFFFFFFFFFFFFF',
|
||||
);
|
||||
|
||||
const GENERATOR = BigInt(2);
|
||||
|
||||
/**
|
||||
* Convert Uint8Array to BigInt
|
||||
*/
|
||||
function uint8ArrayToBigInt(bytes: Uint8Array): bigint {
|
||||
let result = BigInt(0);
|
||||
for (let i = 0; i < bytes.length; i++) {
|
||||
result = (result << BigInt(8)) + BigInt(bytes[i]);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert BigInt to Uint8Array of specified length
|
||||
*/
|
||||
function bigIntToUint8Array(value: bigint, byteLength: number): Uint8Array {
|
||||
const result = new Uint8Array(byteLength);
|
||||
let temp = value;
|
||||
|
||||
for (let i = byteLength - 1; i >= 0; i--) {
|
||||
result[i] = Number(temp & BigInt(0xff));
|
||||
temp = temp >> BigInt(8);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Modular exponentiation: (base^exponent) mod modulus
|
||||
* Uses binary exponentiation for efficiency
|
||||
*/
|
||||
function modPow(base: bigint, exponent: bigint, modulus: bigint): bigint {
|
||||
if (modulus === BigInt(1)) return BigInt(0);
|
||||
|
||||
let result = BigInt(1);
|
||||
base = base % modulus;
|
||||
|
||||
while (exponent > BigInt(0)) {
|
||||
if (exponent % BigInt(2) === BigInt(1)) {
|
||||
result = (result * base) % modulus;
|
||||
}
|
||||
exponent = exponent >> BigInt(1);
|
||||
base = (base * base) % modulus;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a Diffie-Hellman public key from a private key
|
||||
*
|
||||
* @param privateKey - Private key as Uint8Array (should be random and less than the prime)
|
||||
* @returns Public key as Uint8Array
|
||||
*/
|
||||
export function generatePublicKey(privateKey: Uint8Array): Uint8Array {
|
||||
const privateKeyBigInt = uint8ArrayToBigInt(privateKey);
|
||||
const publicKeyBigInt = modPow(GENERATOR, privateKeyBigInt, PRIME_2048);
|
||||
return bigIntToUint8Array(publicKeyBigInt, 256); // 2048 bits = 256 bytes
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute Diffie-Hellman shared secret
|
||||
*
|
||||
* @param privateKey - Your private key as Uint8Array
|
||||
* @param publicKey - Other party's public key as Uint8Array
|
||||
* @returns Shared secret as Uint8Array
|
||||
*/
|
||||
export function diffieHellman(
|
||||
privateKey: Uint8Array,
|
||||
publicKey: Uint8Array,
|
||||
): Uint8Array {
|
||||
const privateKeyBigInt = uint8ArrayToBigInt(privateKey);
|
||||
const publicKeyBigInt = uint8ArrayToBigInt(publicKey);
|
||||
|
||||
// Validate that the public key is valid (1 < publicKey < prime-1)
|
||||
if (
|
||||
publicKeyBigInt <= BigInt(1) ||
|
||||
publicKeyBigInt >= PRIME_2048 - BigInt(1)
|
||||
) {
|
||||
throw new Error('Invalid public key: must be between 1 and prime-1');
|
||||
}
|
||||
|
||||
// Compute shared secret: (publicKey^privateKey) mod prime
|
||||
const sharedSecretBigInt = modPow(
|
||||
publicKeyBigInt,
|
||||
privateKeyBigInt,
|
||||
PRIME_2048,
|
||||
);
|
||||
|
||||
return bigIntToUint8Array(sharedSecretBigInt, 256); // 2048 bits = 256 bytes
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a random private key suitable for Diffie-Hellman
|
||||
*
|
||||
* @returns Random private key as Uint8Array
|
||||
*/
|
||||
export function generatePrivateKey(): Uint8Array {
|
||||
// Generate a random 256-byte private key
|
||||
// In practice, you'd use crypto.getRandomValues() but since this is pure TS:
|
||||
const privateKey = new Uint8Array(256);
|
||||
|
||||
// Simple pseudo-random generation (NOT cryptographically secure)
|
||||
// In real applications, use crypto.getRandomValues() or similar
|
||||
for (let i = 0; i < privateKey.length; i++) {
|
||||
privateKey[i] = Math.floor(Math.random() * 256);
|
||||
}
|
||||
|
||||
// Ensure the private key is less than the prime by clearing the most significant bit
|
||||
privateKey[0] &= 0x7f;
|
||||
|
||||
return privateKey;
|
||||
}
|
||||
|
||||
/**
|
||||
* Complete Diffie-Hellman key exchange example
|
||||
*
|
||||
* @returns Object with generated keys and shared secret
|
||||
*/
|
||||
export function demonstrateDiffieHellman() {
|
||||
// Alice generates her key pair
|
||||
const alicePrivateKey = generatePrivateKey();
|
||||
const alicePublicKey = generatePublicKey(alicePrivateKey);
|
||||
|
||||
// Bob generates his key pair
|
||||
const bobPrivateKey = generatePrivateKey();
|
||||
const bobPublicKey = generatePublicKey(bobPrivateKey);
|
||||
|
||||
// Both parties compute the same shared secret
|
||||
const aliceSharedSecret = diffieHellman(alicePrivateKey, bobPublicKey);
|
||||
const bobSharedSecret = diffieHellman(bobPrivateKey, alicePublicKey);
|
||||
|
||||
// Verify they computed the same secret
|
||||
const secretsMatch = aliceSharedSecret.every(
|
||||
(byte, index) => byte === bobSharedSecret[index],
|
||||
);
|
||||
|
||||
return {
|
||||
alicePrivateKey,
|
||||
alicePublicKey,
|
||||
bobPrivateKey,
|
||||
bobPublicKey,
|
||||
aliceSharedSecret,
|
||||
bobSharedSecret,
|
||||
secretsMatch,
|
||||
};
|
||||
}
|
||||
|
||||
// Example usage and test
|
||||
if (require.main === module) {
|
||||
console.log('Diffie-Hellman Key Exchange Demo');
|
||||
console.log('================================');
|
||||
|
||||
const demo = demonstrateDiffieHellman();
|
||||
|
||||
console.log(
|
||||
'Alice Public Key (first 8 bytes):',
|
||||
Array.from(demo.alicePublicKey.slice(0, 8)),
|
||||
);
|
||||
console.log(
|
||||
'Bob Public Key (first 8 bytes):',
|
||||
Array.from(demo.bobPublicKey.slice(0, 8)),
|
||||
);
|
||||
console.log(
|
||||
'Shared Secret (first 8 bytes):',
|
||||
Array.from(demo.aliceSharedSecret.slice(0, 8)),
|
||||
);
|
||||
console.log('Secrets Match:', demo.secretsMatch);
|
||||
}
|
||||
7
benchmarks/rng.ts
Normal file
7
benchmarks/rng.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
import crypto from 'crypto';
|
||||
import { Bytes } from '../src/crypto/bytes.js';
|
||||
|
||||
const rng = crypto.getRandomValues(new Uint8Array(2048));
|
||||
const bytes = Bytes.from(rng);
|
||||
|
||||
console.log(bytes.toBase64());
|
||||
57
benchmarks/sekp256k1.ts
Normal file
57
benchmarks/sekp256k1.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
import { Bytes, User } from '../src/index.js';
|
||||
|
||||
const alice = await User.fromSecret('alice');
|
||||
const bob = await User.fromSecret('bob');
|
||||
|
||||
const alicePublicKey = await alice.getPublicKey();
|
||||
const bobPublicKey = await bob.getPublicKey();
|
||||
|
||||
const keyStart = performance.now();
|
||||
const aliceSharedSecret = await alice.getSharedSecret(bobPublicKey);
|
||||
const bobSharedSecret = await bob.getSharedSecret(alicePublicKey);
|
||||
|
||||
const keyEnd = performance.now();
|
||||
|
||||
console.log(`Key generation time: ${keyEnd - keyStart}ms`);
|
||||
|
||||
const message = 'Hello, world!';
|
||||
const count = 100_000;
|
||||
|
||||
const start = performance.now();
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
// Encrypt key with shared secret
|
||||
const encryptedKey = await aliceSharedSecret.encrypt(Bytes.fromUtf8(message));
|
||||
|
||||
// Decrypt key with shared secret
|
||||
const decryptedKey = await bobSharedSecret.decrypt(encryptedKey);
|
||||
|
||||
if (decryptedKey.toUtf8() !== message) {
|
||||
throw new Error('Decrypted message does not match original message');
|
||||
}
|
||||
}
|
||||
|
||||
const end = performance.now();
|
||||
|
||||
const totalTimeMs = end - start;
|
||||
const operationsPerSecond = (count / totalTimeMs) * 1000;
|
||||
|
||||
console.log(
|
||||
`Aes encrypt/decrypt per second: ${operationsPerSecond} (Total time: ${totalTimeMs}ms)`,
|
||||
);
|
||||
|
||||
const startEncrypt = performance.now();
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
// Encrypt key with shared secret
|
||||
const encryptedKey = await aliceSharedSecret.encrypt(Bytes.fromUtf8(message));
|
||||
}
|
||||
|
||||
const endEncrypt = performance.now();
|
||||
|
||||
const totalTimeMsEncrypt = endEncrypt - startEncrypt;
|
||||
const operationsPerSecondEncrypt = (count / totalTimeMsEncrypt) * 1000;
|
||||
|
||||
console.log(
|
||||
`Aes encrypt per second: ${operationsPerSecondEncrypt} (Total time: ${totalTimeMsEncrypt}ms)`,
|
||||
);
|
||||
25
benchmarks/sha256.ts
Normal file
25
benchmarks/sha256.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import { sha256, binToBase58 } from '@bitauth/libauth';
|
||||
|
||||
const data = 'Hello, world!';
|
||||
const count = 1_000_000;
|
||||
|
||||
let bytes = new TextEncoder().encode(data);
|
||||
const start = performance.now();
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
bytes = new Uint8Array(sha256.hash(bytes));
|
||||
}
|
||||
const end = performance.now();
|
||||
|
||||
const totalTimeMs = end - start;
|
||||
const operationsPerSecond = (count / totalTimeMs) * 1000;
|
||||
|
||||
console.log(
|
||||
`libauth sha256 per second: ${operationsPerSecond} (Total time: ${totalTimeMs}ms)`,
|
||||
);
|
||||
|
||||
console.log(binToBase58(bytes));
|
||||
|
||||
if (binToBase58(bytes) !== '2YZjvhWqVKZgQFDnVkadwRcpJkqW6oNjiHPXxkEpq2zP') {
|
||||
throw new Error('sha256 hash is incorrect');
|
||||
}
|
||||
38
benchmarks/storage.ts
Normal file
38
benchmarks/storage.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
import { AESKey } from '../src/crypto/aes-key.js';
|
||||
import { BaseStorage } from '../src/storage/base-storage.js';
|
||||
import { StorageMemory, StorageMemorySynced, EncryptedStorage } from '../src/storage/index.js';
|
||||
|
||||
const storage = StorageMemory.from();
|
||||
// const storageSynced = StorageMemorySynced.from(storage);
|
||||
|
||||
const currentDate = new Date();
|
||||
|
||||
const data = {
|
||||
name: 'test',
|
||||
age: 20,
|
||||
email: 'test@test.com',
|
||||
password: 'test',
|
||||
createdAt: currentDate,
|
||||
updatedAt: new Date(currentDate.getTime() + 1000),
|
||||
}
|
||||
|
||||
const storageEncryptedBase = StorageMemory.from()
|
||||
const storageEncrypted = EncryptedStorage.from(storageEncryptedBase, await AESKey.fromSeed('test'));
|
||||
|
||||
storageEncryptedBase.on('insert', (event) => {
|
||||
console.log('insert', event);
|
||||
});
|
||||
|
||||
// Store data in storage
|
||||
await storage.insertOne('test', data);
|
||||
// storageSynced.insertOne('test', data);
|
||||
await storageEncrypted.insertOne('test', data);
|
||||
|
||||
// Retrieve data from storage
|
||||
const retrievedData = await storage.findOne({ name: 'test' });
|
||||
// const retrievedDataSynced = await storageSynced.findOne('test');
|
||||
const retrievedDataEncrypted = await storageEncrypted.findOne({ name: 'test' });
|
||||
|
||||
console.log(retrievedData);
|
||||
// console.log(retrievedDataSynced);
|
||||
console.log(retrievedDataEncrypted);
|
||||
3914
package-lock.json
generated
Normal file
3914
package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
28
package.json
Normal file
28
package.json
Normal file
@@ -0,0 +1,28 @@
|
||||
{
|
||||
"name": "hashpass-stack",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"format": "prettier --write .",
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"description": "",
|
||||
"devDependencies": {
|
||||
"madge": "^8.0.0",
|
||||
"prettier": "^3.6.2",
|
||||
"tsx": "^4.20.3",
|
||||
"typescript": "^5.8.3",
|
||||
"vitest": "^3.2.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"@bitauth/libauth": "^3.0.0",
|
||||
"@noble/secp256k1": "^2.3.0",
|
||||
"msgpackr": "^1.11.5",
|
||||
"zod": "^4.0.14"
|
||||
}
|
||||
}
|
||||
104
src/crypto/aes-key.ts
Normal file
104
src/crypto/aes-key.ts
Normal file
@@ -0,0 +1,104 @@
|
||||
import { sha256, utf8ToBin } from '@bitauth/libauth';
|
||||
import { Bytes } from './bytes.js';
|
||||
|
||||
const ITERATIONS = 65536;
|
||||
|
||||
export class AESKey {
|
||||
static async fromSeed(seed: string): Promise<AESKey> {
|
||||
const binSeed = utf8ToBin(seed);
|
||||
|
||||
return AESKey.fromBytes(binSeed);
|
||||
}
|
||||
|
||||
static async fromBytes(bytes: Uint8Array): Promise<AESKey> {
|
||||
let hashedBytes = bytes;
|
||||
for (let i = 0; i < ITERATIONS; i++) {
|
||||
hashedBytes = sha256.hash(hashedBytes);
|
||||
}
|
||||
|
||||
const key = await crypto.subtle.importKey(
|
||||
'raw',
|
||||
new Uint8Array(hashedBytes),
|
||||
{ name: 'AES-GCM', length: 256 },
|
||||
true,
|
||||
['encrypt', 'decrypt'],
|
||||
);
|
||||
|
||||
return new AESKey(key, hashedBytes);
|
||||
}
|
||||
|
||||
static async fromRandom(): Promise<AESKey> {
|
||||
const rawKey = crypto.getRandomValues(new Uint8Array(32));
|
||||
return AESKey.fromBytes(sha256.hash(rawKey));
|
||||
}
|
||||
|
||||
static async fromKey(key: CryptoKey, rawKey: Uint8Array): Promise<AESKey> {
|
||||
return new AESKey(key, rawKey);
|
||||
}
|
||||
|
||||
private readonly raw: Bytes;
|
||||
|
||||
constructor(private key: CryptoKey, rawKey: Uint8Array) {
|
||||
if (!this.key) throw new Error('Client key not initialized');
|
||||
this.raw = Bytes.from(rawKey);
|
||||
}
|
||||
|
||||
private getDeterministicNonce(data: Uint8Array): Bytes {
|
||||
// Hash the raw key and data
|
||||
const keyHash = sha256.hash(this.raw);
|
||||
const dataHash = sha256.hash(data);
|
||||
|
||||
// Combine the hashed and rehash
|
||||
const combined = new Uint8Array(keyHash.length + dataHash.length);
|
||||
combined.set(keyHash);
|
||||
combined.set(dataHash, keyHash.length);
|
||||
|
||||
// Hash the combined hash
|
||||
const combinedHash = sha256.hash(combined);
|
||||
|
||||
// Take the first 12 bytes
|
||||
return Bytes.from(combinedHash.slice(0, 12));
|
||||
}
|
||||
|
||||
async encrypt(data: Uint8Array, deterministic: boolean = false): Promise<Bytes> {
|
||||
// Generate a random 12-byte nonce
|
||||
const nonce = deterministic ? this.getDeterministicNonce(data) : crypto.getRandomValues(new Uint8Array(12));
|
||||
|
||||
const encrypted = await crypto.subtle.encrypt(
|
||||
{
|
||||
name: 'AES-GCM',
|
||||
iv: nonce,
|
||||
tagLength: 128,
|
||||
},
|
||||
this.key,
|
||||
data as unknown as ArrayBuffer,
|
||||
);
|
||||
|
||||
// Combine nonce and encrypted data (optimized)
|
||||
const encryptedView = new Uint8Array(encrypted);
|
||||
const encryptedBytes = new Uint8Array(nonce.length + encryptedView.length);
|
||||
encryptedBytes.set(nonce);
|
||||
encryptedBytes.set(encryptedView, nonce.length);
|
||||
|
||||
// Convert to bytes
|
||||
return Bytes.from(encryptedBytes);
|
||||
}
|
||||
|
||||
async decrypt(encrypted: Bytes): Promise<Bytes> {
|
||||
// Convert from base64 and split into nonce and data
|
||||
const nonce = encrypted.slice(0, 12);
|
||||
const encryptedData = encrypted.slice(12);
|
||||
|
||||
const decrypted = await crypto.subtle.decrypt(
|
||||
{
|
||||
name: 'AES-GCM',
|
||||
iv: nonce,
|
||||
tagLength: 128,
|
||||
},
|
||||
this.key,
|
||||
encryptedData,
|
||||
);
|
||||
|
||||
return Bytes.from(new Uint8Array(decrypted));
|
||||
}
|
||||
}
|
||||
160
src/crypto/bytes.ts
Normal file
160
src/crypto/bytes.ts
Normal file
@@ -0,0 +1,160 @@
|
||||
import {
|
||||
binToHex,
|
||||
hexToBin,
|
||||
isHex,
|
||||
base64ToBin,
|
||||
binToBase64,
|
||||
isBase64,
|
||||
binToUtf8,
|
||||
utf8ToBin,
|
||||
base58ToBin,
|
||||
binToBase58,
|
||||
} from '@bitauth/libauth';
|
||||
|
||||
/**
|
||||
* Bytes Primitive.
|
||||
*/
|
||||
export class Bytes extends Uint8Array {
|
||||
/**
|
||||
* Instantiate Bytes from a Uint8Array.
|
||||
*
|
||||
* @param bytes {Uint8Array} The bytes to use.
|
||||
*/
|
||||
protected constructor(bytes: Uint8Array) {
|
||||
super(bytes);
|
||||
}
|
||||
|
||||
// Override the constructor to ensure correct inheritance
|
||||
static from(uint8Array: Uint8Array): Bytes {
|
||||
return new Bytes(uint8Array);
|
||||
}
|
||||
|
||||
/**
|
||||
* Instantiates bytes from an Uint8Array.
|
||||
*
|
||||
* @param bytes The bytes as a Uint8Array.
|
||||
*
|
||||
* @throws {Error} If Base64 is invalid.
|
||||
*
|
||||
* @returns Instance of Bytes.
|
||||
*/
|
||||
public static fromBytes(bytes: Uint8Array): Bytes {
|
||||
return new this(bytes);
|
||||
}
|
||||
|
||||
/**
|
||||
* Instantiates bytes from a Base58 string.
|
||||
*
|
||||
* @param base58 The Base58 string.
|
||||
*
|
||||
* @throws {Error} If Base58 is invalid.
|
||||
*
|
||||
* @returns Instance of Bytes.
|
||||
*/
|
||||
public static fromBase58(base58: string): Bytes {
|
||||
const bytes = base58ToBin(base58);
|
||||
|
||||
if (typeof bytes === 'string') {
|
||||
throw new Error(`Cannot decode invalid base58: ${bytes}`);
|
||||
}
|
||||
|
||||
return new this(bytes);
|
||||
}
|
||||
|
||||
/**
|
||||
* Instantiates bytes from a Base64 string.
|
||||
*
|
||||
* @param base64 The Base64 string.
|
||||
*
|
||||
* @throws {Error} If Base64 is invalid.
|
||||
*
|
||||
* @returns Instance of Bytes.
|
||||
*/
|
||||
public static fromBase64(base64: string): Bytes {
|
||||
// If the Base64 is invalid, throw an error.
|
||||
if (!isBase64(base64)) {
|
||||
throw new Error(`Cannot decode invalid base64 '${base64}'`);
|
||||
}
|
||||
|
||||
return new this(base64ToBin(base64));
|
||||
}
|
||||
|
||||
/**
|
||||
* Instantiates bytes from a hex string.
|
||||
*
|
||||
* @param hex The hex string.
|
||||
*
|
||||
* @throws {Error} If hex is invalid.
|
||||
*
|
||||
* @returns Instance of Bytes.
|
||||
*/
|
||||
public static fromHex(hex: string): Bytes {
|
||||
// If the hex is invalid, throw an error.
|
||||
if (!isHex(hex)) {
|
||||
throw new Error(`Cannot decode invalid hex '${hex}'`);
|
||||
}
|
||||
|
||||
return new this(hexToBin(hex));
|
||||
}
|
||||
|
||||
/**
|
||||
* Instantiates bytes from a UTF8 string.
|
||||
*
|
||||
* @param utf8 The UTF8 string.
|
||||
*
|
||||
* @returns Instance of Bytes.
|
||||
*/
|
||||
public static fromUtf8(utf8: string): Bytes {
|
||||
return new this(utf8ToBin(utf8));
|
||||
}
|
||||
|
||||
/**
|
||||
* Return bytes as a Base58 string.
|
||||
*
|
||||
* @returns Bytes as a Base58 string.
|
||||
*/
|
||||
public toBase58(): string {
|
||||
return binToBase58(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return bytes as a Base64 string.
|
||||
*
|
||||
* @returns Bytes as a Base64 string.
|
||||
*/
|
||||
public toBase64(): string {
|
||||
return binToBase64(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return bytes as a hex string.
|
||||
*
|
||||
* @returns Bytes as a hex string.
|
||||
*/
|
||||
public toHex(): string {
|
||||
return binToHex(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return bytes as a UTF8 string
|
||||
*
|
||||
* @returns Bytes as a UTF8 string.
|
||||
*/
|
||||
public toUtf8(): string {
|
||||
return binToUtf8(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return bytes as a Uint8Array
|
||||
*
|
||||
* @returns Bytes as a Uint8Array.
|
||||
*/
|
||||
public toUint8Array(): Uint8Array {
|
||||
// NOTE: We clone the instance of Uint8Array so that we don't accidentally mutate it.
|
||||
return new Uint8Array([...this]);
|
||||
}
|
||||
|
||||
public invert(): Bytes {
|
||||
return new Bytes(Uint8Array.from([...this].reverse()));
|
||||
}
|
||||
}
|
||||
4
src/crypto/index.ts
Normal file
4
src/crypto/index.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
export * from './aes-key.js';
|
||||
export * from './bytes.js';
|
||||
export * from './public-key.js';
|
||||
export * from './private-key.js';
|
||||
83
src/crypto/private-key.ts
Normal file
83
src/crypto/private-key.ts
Normal file
@@ -0,0 +1,83 @@
|
||||
import { getSharedSecret } from '@noble/secp256k1';
|
||||
import { sha256, secp256k1, generatePrivateKey } from '@bitauth/libauth';
|
||||
|
||||
import { AESKey } from './aes-key.js';
|
||||
import { PublicKey } from './public-key.js';
|
||||
import { Bytes } from './bytes.js';
|
||||
|
||||
export type PrivateKeyRaw = {
|
||||
bytes: Uint8Array;
|
||||
};
|
||||
|
||||
export class PrivateKey {
|
||||
static fromSeed(seed: string): PrivateKey {
|
||||
const seedBytes = new TextEncoder().encode(seed);
|
||||
return PrivateKey.fromBytes(seedBytes);
|
||||
}
|
||||
|
||||
static fromBytes(bytes: Uint8Array): PrivateKey {
|
||||
const seedHash = new Uint8Array(sha256.hash(bytes));
|
||||
|
||||
const privateKey = generatePrivateKey(() => seedHash);
|
||||
return new PrivateKey({ bytes: privateKey });
|
||||
}
|
||||
|
||||
static fromRaw(raw: PrivateKeyRaw): PrivateKey {
|
||||
return new PrivateKey(raw);
|
||||
}
|
||||
|
||||
constructor(private raw: PrivateKeyRaw) {}
|
||||
|
||||
derivePublicKey(): PublicKey {
|
||||
const publicKey = secp256k1.derivePublicKeyCompressed(this.raw.bytes);
|
||||
|
||||
if (typeof publicKey === 'string') {
|
||||
throw new Error(`Invalid public key: ${publicKey}`);
|
||||
}
|
||||
|
||||
return PublicKey.fromBytes(publicKey);
|
||||
}
|
||||
|
||||
toRaw(): PrivateKeyRaw {
|
||||
return this.raw;
|
||||
}
|
||||
|
||||
toBytes(): Bytes {
|
||||
return Bytes.from(this.raw.bytes);
|
||||
}
|
||||
|
||||
async deriveSharedSecret(publicKey: PublicKey): Promise<AESKey> {
|
||||
const sharedSecret = getSharedSecret(
|
||||
this.raw.bytes,
|
||||
publicKey.toBytes().toUint8Array(),
|
||||
);
|
||||
return AESKey.fromBytes(sharedSecret);
|
||||
}
|
||||
|
||||
sign(message: string): Bytes {
|
||||
const messageBytes = Bytes.fromUtf8(message).toUint8Array();
|
||||
|
||||
const signature = secp256k1.signMessageHashSchnorr(
|
||||
this.raw.bytes,
|
||||
messageBytes,
|
||||
);
|
||||
|
||||
if (typeof signature === 'string') {
|
||||
throw new Error(`Invalid signature: ${signature}`);
|
||||
}
|
||||
|
||||
return Bytes.from(signature);
|
||||
}
|
||||
|
||||
verify(message: string, signature: Uint8Array): boolean {
|
||||
const messageBytes = Bytes.fromUtf8(message).toUint8Array();
|
||||
|
||||
const publicKey = this.derivePublicKey();
|
||||
|
||||
return secp256k1.verifySignatureSchnorr(
|
||||
signature,
|
||||
publicKey.toBytes().toUint8Array(),
|
||||
messageBytes,
|
||||
);
|
||||
}
|
||||
}
|
||||
60
src/crypto/public-key.ts
Normal file
60
src/crypto/public-key.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import { getSharedSecret } from '@noble/secp256k1';
|
||||
|
||||
import { AESKey } from './aes-key.js';
|
||||
import { Bytes } from './bytes.js';
|
||||
import { PrivateKey } from './private-key.js';
|
||||
|
||||
export type PublicKeyRaw = {
|
||||
bytes: Uint8Array;
|
||||
};
|
||||
|
||||
export class PublicKey {
|
||||
static fromHex(string: string): PublicKey {
|
||||
const bytes = Bytes.fromHex(string);
|
||||
return PublicKey.fromBytes(bytes);
|
||||
}
|
||||
|
||||
static fromBase58(string: string): PublicKey {
|
||||
const bytes = Bytes.fromBase58(string);
|
||||
return PublicKey.fromBytes(bytes);
|
||||
}
|
||||
|
||||
static fromBase64(string: string): PublicKey {
|
||||
const bytes = Bytes.fromBase64(string);
|
||||
return PublicKey.fromBytes(bytes);
|
||||
}
|
||||
|
||||
static fromBytes(bytes: Uint8Array): PublicKey {
|
||||
return new PublicKey({ bytes });
|
||||
}
|
||||
|
||||
static fromRaw(raw: PublicKeyRaw): PublicKey {
|
||||
return new PublicKey(raw);
|
||||
}
|
||||
|
||||
constructor(private key: PublicKeyRaw) {}
|
||||
|
||||
toBytes(): Bytes {
|
||||
return Bytes.from(this.key.bytes);
|
||||
}
|
||||
|
||||
toHex(): string {
|
||||
return this.toBytes().toHex();
|
||||
}
|
||||
|
||||
toBase58(): string {
|
||||
return this.toBytes().toBase58();
|
||||
}
|
||||
|
||||
toBase64(): string {
|
||||
return this.toBytes().toBase64();
|
||||
}
|
||||
|
||||
async deriveSharedSecret(privateKey: PrivateKey): Promise<AESKey> {
|
||||
const sharedSecret = getSharedSecret(
|
||||
this.key.bytes,
|
||||
privateKey.toBytes().toUint8Array(),
|
||||
);
|
||||
return AESKey.fromBytes(sharedSecret);
|
||||
}
|
||||
}
|
||||
3
src/index.ts
Normal file
3
src/index.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export * from './crypto/index.js';
|
||||
|
||||
export * from './user/user.js';
|
||||
118
src/protocols/whisper.ts
Normal file
118
src/protocols/whisper.ts
Normal file
@@ -0,0 +1,118 @@
|
||||
/**
|
||||
* NOTES:
|
||||
* - Random keypairs every time is probably better
|
||||
* - Transports as part of the url
|
||||
* - Requested items
|
||||
* - verison
|
||||
* - mode: t | r
|
||||
*
|
||||
* Example url:
|
||||
* whisper://<public-key>?r=[<transport-url>]&q=[<item-url>]&v=1&m=t
|
||||
*
|
||||
* Transports:
|
||||
* - Libp2p
|
||||
* - WebSocket
|
||||
* - NFC
|
||||
* - Bluetooth
|
||||
*
|
||||
* Url handling:
|
||||
* Only allow specific parameters, unless prefixed with 'x-'
|
||||
*/
|
||||
|
||||
import { z } from 'zod/v4';
|
||||
|
||||
import { PublicKey } from 'src/crypto/index.js';
|
||||
|
||||
export type WhisperRaw = {
|
||||
publicKey: PublicKey;
|
||||
transports: string[];
|
||||
items: string[];
|
||||
version: number;
|
||||
mode: (typeof MODES)[number];
|
||||
};
|
||||
|
||||
export const MODES = ['t', 'r'] as const;
|
||||
|
||||
/**
|
||||
* Whisper is a protocol handler for sending a message to another user over a shared secret.
|
||||
*
|
||||
* It is a simple protocol that uses the shared secret to encrypt the message.
|
||||
*
|
||||
* The message is encrypted with the shared secret and sent to the other user.
|
||||
*
|
||||
* The other user decrypts the message with their shared secret.
|
||||
*/
|
||||
export class Whisper {
|
||||
static from(url: string | URL) {
|
||||
if (typeof url === 'string') {
|
||||
url = new URL(url);
|
||||
}
|
||||
|
||||
// Make sure its the whisper: protocol
|
||||
if (url.protocol !== 'whisper:') {
|
||||
throw new Error('Invalid protocol');
|
||||
}
|
||||
|
||||
// Get the public key from the path
|
||||
const publicKey = PublicKey.fromHex(url.pathname);
|
||||
|
||||
// Get the params
|
||||
const params = Whisper.schema.parse(
|
||||
Object.fromEntries(url.searchParams.entries()),
|
||||
);
|
||||
|
||||
return new Whisper({
|
||||
...params,
|
||||
publicKey,
|
||||
});
|
||||
}
|
||||
|
||||
constructor(private readonly raw: WhisperRaw) {}
|
||||
|
||||
toUrl() {
|
||||
return new URL(
|
||||
`whisper://${this.raw.publicKey.toHex()}?${new URLSearchParams(Object.entries(this.raw).map(([key, value]) => [Whisper.paramsMapping[key], value])).toString()}`,
|
||||
);
|
||||
}
|
||||
|
||||
toRaw() {
|
||||
return this.raw;
|
||||
}
|
||||
|
||||
static paramsMapping = {
|
||||
r: 'transports',
|
||||
q: 'items',
|
||||
v: 'version',
|
||||
m: 'mode',
|
||||
};
|
||||
|
||||
static schema = z
|
||||
.object({
|
||||
r: z.array(z.string()).default([]),
|
||||
q: z.array(z.string()).default([]),
|
||||
v: z.coerce.number(),
|
||||
m: z.enum(MODES).default('t'),
|
||||
})
|
||||
.catchall(z.string())
|
||||
.check((data) => {
|
||||
// If the item is not a valid key or prefixed with 'x-' then throw an error
|
||||
if (
|
||||
!Object.keys(data.value).every(
|
||||
(key) =>
|
||||
key.startsWith('x-') ||
|
||||
Object.keys(Whisper.paramsMapping).includes(key),
|
||||
)
|
||||
) {
|
||||
throw new Error('Invalid params');
|
||||
}
|
||||
})
|
||||
.transform((data) => {
|
||||
return {
|
||||
...data,
|
||||
transports: data.r,
|
||||
items: data.q,
|
||||
version: data.v,
|
||||
mode: data.m,
|
||||
};
|
||||
});
|
||||
}
|
||||
101
src/storage/base-storage.ts
Normal file
101
src/storage/base-storage.ts
Normal file
@@ -0,0 +1,101 @@
|
||||
import { EventEmitter } from 'src/utils/index.js';
|
||||
|
||||
/**
|
||||
* Query options for find operations
|
||||
*/
|
||||
export type FindOptions = {
|
||||
limit: number;
|
||||
skip: number;
|
||||
sort: { [key: string]: 1 | -1 };
|
||||
};
|
||||
|
||||
export type StorageEvent<T = Record<string, any>> = {
|
||||
insert: {
|
||||
key: string;
|
||||
value: T;
|
||||
};
|
||||
update: {
|
||||
key: string;
|
||||
value: T;
|
||||
};
|
||||
delete: {
|
||||
key: string;
|
||||
};
|
||||
clear: undefined;
|
||||
};
|
||||
|
||||
export abstract class BaseStorage<
|
||||
T extends Record<string, any> = Record<string, any>,
|
||||
> extends EventEmitter<StorageEvent<T>> {
|
||||
/**
|
||||
* Insert a document into the store
|
||||
* @param id Unique identifier for the document
|
||||
* @param document The document to insert
|
||||
*/
|
||||
abstract insertOne(id: string, document: T): Promise<void>;
|
||||
|
||||
/**
|
||||
* Insert multiple documents into the store
|
||||
* @param documents Map of ID to document
|
||||
*/
|
||||
abstract insertMany(documents: Map<string, T>): Promise<void>;
|
||||
|
||||
/**
|
||||
* Find a single document that matches the filter
|
||||
* @param filter MongoDB-like query filter
|
||||
*/
|
||||
abstract findOne(filter?: Partial<T>): Promise<T | null>;
|
||||
|
||||
/**
|
||||
* Find all documents that match the filter
|
||||
* @param filter MongoDB-like query filter
|
||||
* @param options Query options (limit, skip, sort)
|
||||
*/
|
||||
abstract find(filter?: Partial<T>, options?: FindOptions): Promise<T[]>;
|
||||
|
||||
/**
|
||||
* Update a document that matches the filter
|
||||
* @param filter Query to match the document to update
|
||||
* @param update Document or fields to update
|
||||
* @returns True if a document was updated, false otherwise
|
||||
*/
|
||||
abstract updateOne(filter: Partial<T>, update: Partial<T>): Promise<boolean>;
|
||||
|
||||
/**
|
||||
* Update all documents that match the filter
|
||||
* @param filter Query to match documents to update
|
||||
* @param update Document or fields to update
|
||||
* @param options Query options (limit, skip, sort)
|
||||
* @returns Number of documents updated
|
||||
*/
|
||||
abstract updateMany(
|
||||
filter: Partial<T>,
|
||||
update: Partial<T>,
|
||||
options: Partial<FindOptions>,
|
||||
): Promise<number>;
|
||||
|
||||
/**
|
||||
* Delete a document that matches the filter
|
||||
* @param filter Query to match the document to delete
|
||||
* @returns True if a document was deleted, false otherwise
|
||||
*/
|
||||
abstract deleteOne(filter: Partial<T>): Promise<boolean>;
|
||||
|
||||
/**
|
||||
* Delete all documents that match the filter with options
|
||||
* @param filter Query to match documents to delete
|
||||
* @param options Query options (limit, skip, sort)
|
||||
* @returns Number of documents deleted
|
||||
*/
|
||||
abstract deleteMany(
|
||||
filter: Partial<T>,
|
||||
options: Partial<FindOptions>,
|
||||
): Promise<number>;
|
||||
|
||||
/**
|
||||
* Derive a child storage from the current storage
|
||||
* @param path The path to derive the child storage from
|
||||
* @returns The child storage
|
||||
*/
|
||||
abstract deriveChild<C>(path: string): BaseStorage<C>;
|
||||
}
|
||||
139
src/storage/encrypted-storage.ts
Normal file
139
src/storage/encrypted-storage.ts
Normal file
@@ -0,0 +1,139 @@
|
||||
import { Packr } from 'msgpackr';
|
||||
|
||||
import { AESKey } from 'src/crypto/aes-key.js';
|
||||
import { Bytes } from 'src/crypto/bytes.js';
|
||||
|
||||
import { BaseStorage, type FindOptions } from './base-storage.js';
|
||||
|
||||
export class EncryptedStorage<
|
||||
T extends Record<string, any> = Record<string, any>,
|
||||
> extends BaseStorage<T> {
|
||||
static from<T>(storage: BaseStorage, key: AESKey) {
|
||||
return new EncryptedStorage<T>(storage, key);
|
||||
}
|
||||
|
||||
private readonly msgpackr = new Packr({
|
||||
moreTypes: true,
|
||||
structuredClone: true
|
||||
});
|
||||
|
||||
constructor(
|
||||
private readonly storage: BaseStorage,
|
||||
private readonly key: AESKey,
|
||||
) {
|
||||
super();
|
||||
|
||||
// Forward events from the underlying storage, decrypting the data
|
||||
this.storage.on('insert', async (event) => {
|
||||
const decryptedValue = await this.convertToDecrypted(event.value as Record<string, string>);
|
||||
this.emit('insert', { key: event.key, value: decryptedValue });
|
||||
});
|
||||
|
||||
this.storage.on('update', async (event) => {
|
||||
const decryptedValue = await this.convertToDecrypted(event.value as Record<string, string>);
|
||||
this.emit('update', { key: event.key, value: decryptedValue });
|
||||
});
|
||||
|
||||
this.storage.on('delete', (event) => {
|
||||
this.emit('delete', event);
|
||||
});
|
||||
|
||||
this.storage.on('clear', (event) => {
|
||||
this.emit('clear', event);
|
||||
});
|
||||
}
|
||||
|
||||
async insertOne(id: string, document: T): Promise<void> {
|
||||
const encrypted = await this.convertToEncrypted(document);
|
||||
await this.storage.insertOne(id, encrypted);
|
||||
}
|
||||
|
||||
async insertMany(documents: Map<string, T>): Promise<void> {
|
||||
const encrypted = new Map<string, Record<string, string>>();
|
||||
for (const [key, value] of documents.entries()) {
|
||||
encrypted.set(key, await this.convertToEncrypted(value));
|
||||
}
|
||||
await this.storage.insertMany(encrypted);
|
||||
}
|
||||
|
||||
async findOne(filter: Partial<T>): Promise<T | null> {
|
||||
const encryptedFilter = await this.convertToEncrypted(filter);
|
||||
|
||||
console.log('encryptedFilter', encryptedFilter);
|
||||
|
||||
const document = await this.storage.findOne(encryptedFilter);
|
||||
if (!document) return null;
|
||||
return this.convertToDecrypted(document);
|
||||
}
|
||||
|
||||
async find(filter: Partial<T>): Promise<T[]> {
|
||||
const encryptedFilter = await this.convertToEncrypted(filter);
|
||||
const documents = await this.storage.find(encryptedFilter);
|
||||
return Promise.all(
|
||||
documents.map(async (document) => this.convertToDecrypted(document)),
|
||||
);
|
||||
}
|
||||
|
||||
async updateOne(filter: Partial<T>, update: Partial<T>): Promise<boolean> {
|
||||
const encryptedFilter = await this.convertToEncrypted(filter);
|
||||
const encryptedUpdate = await this.convertToEncrypted(update);
|
||||
return this.storage.updateOne(encryptedFilter, encryptedUpdate);
|
||||
}
|
||||
|
||||
async updateMany(
|
||||
filter: Partial<T>,
|
||||
update: Partial<T>,
|
||||
options: Partial<FindOptions> = {},
|
||||
): Promise<number> {
|
||||
const encryptedFilter = await this.convertToEncrypted(filter);
|
||||
const encryptedUpdate = await this.convertToEncrypted(update);
|
||||
return this.storage.updateMany(encryptedFilter, encryptedUpdate, options);
|
||||
}
|
||||
|
||||
async deleteOne(filter: Partial<T>): Promise<boolean> {
|
||||
const encryptedFilter = await this.convertToEncrypted(filter);
|
||||
return this.storage.deleteOne(encryptedFilter);
|
||||
}
|
||||
|
||||
async deleteMany(
|
||||
filter: Partial<T>,
|
||||
options: Partial<FindOptions> = {},
|
||||
): Promise<number> {
|
||||
const encryptedFilter = await this.convertToEncrypted(filter);
|
||||
return this.storage.deleteMany(encryptedFilter, options);
|
||||
}
|
||||
|
||||
deriveChild<C>(path: string): BaseStorage<C> {
|
||||
return EncryptedStorage.from(this.storage.deriveChild(path), this.key);
|
||||
}
|
||||
|
||||
private async convertToEncrypted(
|
||||
document: Partial<T>,
|
||||
): Promise<Record<string, string>> {
|
||||
// For each key in the document, encrypt the value. This requires us to know the type of each value, so we must include it after converting it. Maybe this can be done by converting it to an object and json stringifying it.
|
||||
// Example: { a: 1, b: 'hello' } -> { a: { type: 'number', value: 1 }, b: { type: 'string', value: 'hello' } }
|
||||
const encrypted: Record<string, string> = {};
|
||||
for (const [key, value] of Object.entries(document)) {
|
||||
// Create our object to encrypt
|
||||
const bin = this.msgpackr.pack(value);
|
||||
// Encrypt it
|
||||
const encryptedValue = await this.key.encrypt(bin, true);
|
||||
encrypted[key] = encryptedValue.toBase64();
|
||||
}
|
||||
|
||||
return encrypted;
|
||||
}
|
||||
|
||||
private async convertToDecrypted(
|
||||
document: Record<string, string>,
|
||||
): Promise<T> {
|
||||
const decrypted: Record<string, any> = {};
|
||||
for (const [key, value] of Object.entries(document)) {
|
||||
const binaryString = await this.key.decrypt(Bytes.fromBase64(value));
|
||||
const object = this.msgpackr.unpack(binaryString);
|
||||
decrypted[key] = object;
|
||||
}
|
||||
|
||||
return decrypted as T;
|
||||
}
|
||||
}
|
||||
3
src/storage/index.ts
Normal file
3
src/storage/index.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export * from './storage-memory.js';
|
||||
export * from './storage-memory-synced.js';
|
||||
export * from './encrypted-storage.js';
|
||||
0
src/storage/storage-localstorage.ts
Normal file
0
src/storage/storage-localstorage.ts
Normal file
112
src/storage/storage-memory-synced.ts
Normal file
112
src/storage/storage-memory-synced.ts
Normal file
@@ -0,0 +1,112 @@
|
||||
import { BaseStorage, FindOptions } from './base-storage.js';
|
||||
import { StorageMemory } from './storage-memory.js';
|
||||
|
||||
/**
|
||||
* Storage Adapter that takes another Storage Adapter (e.g. IndexedDB) and "syncs" its contents to system memory for faster read access.
|
||||
*
|
||||
* All read operations will use system memory - all write operations will use the provided adapter.
|
||||
*/
|
||||
export class StorageMemorySynced<T extends Record<string, any> = Record<string, any>> extends BaseStorage<T> {
|
||||
constructor(
|
||||
private inMemoryCache: StorageMemory<T>,
|
||||
private store: BaseStorage<T>,
|
||||
) {
|
||||
super();
|
||||
|
||||
// Hook into all write operations so that we can sync the In-Memory cache.
|
||||
this.store.on('insert', async (payload) => {
|
||||
await this.inMemoryCache.insertOne(payload.key, payload.value);
|
||||
this.emit('insert', { key: payload.key, value: payload.value });
|
||||
});
|
||||
|
||||
this.store.on('update', async (payload) => {
|
||||
// For update events, we need to find and update the document in memory
|
||||
// Since we don't have the filter, we'll update by key
|
||||
await this.inMemoryCache.insertOne(payload.key, payload.value);
|
||||
this.emit('update', { key: payload.key, value: payload.value });
|
||||
});
|
||||
|
||||
this.store.on('delete', async (payload) => {
|
||||
// For delete events, we need to find and delete the document by key
|
||||
const allDocs = await this.inMemoryCache.find();
|
||||
for (const doc of allDocs) {
|
||||
if ('id' in doc && doc.id === payload.key) {
|
||||
const filter = {} as Partial<T>;
|
||||
(filter as any).id = payload.key;
|
||||
await this.inMemoryCache.deleteOne(filter);
|
||||
break;
|
||||
}
|
||||
}
|
||||
this.emit('delete', { key: payload.key });
|
||||
});
|
||||
|
||||
this.store.on('clear', async () => {
|
||||
// Clear all documents from memory cache
|
||||
await this.inMemoryCache.deleteMany({} as Partial<T>);
|
||||
this.emit('clear', undefined);
|
||||
});
|
||||
}
|
||||
|
||||
static async create<T extends Record<string, any>>(store: BaseStorage<T>) {
|
||||
// Instantiate in-memory cache and the backing store.
|
||||
const inMemoryCache = new StorageMemory<T>();
|
||||
|
||||
// Create instance of this store.
|
||||
const memorySyncedStore = new StorageMemorySynced<T>(inMemoryCache, store);
|
||||
|
||||
// Sync the data from the backing store into the In-Memory cache.
|
||||
const allDocuments = await store.find();
|
||||
for (const document of allDocuments) {
|
||||
if ('id' in document && typeof document.id === 'string') {
|
||||
await inMemoryCache.insertOne(document.id, document);
|
||||
}
|
||||
}
|
||||
|
||||
// Return our instance of this store.
|
||||
return memorySyncedStore;
|
||||
}
|
||||
|
||||
async insertOne(id: string, document: T): Promise<void> {
|
||||
await this.store.insertOne(id, document);
|
||||
}
|
||||
|
||||
async insertMany(documents: Map<string, T>): Promise<void> {
|
||||
await this.store.insertMany(documents);
|
||||
}
|
||||
|
||||
async findOne(filter?: Partial<T>): Promise<T | null> {
|
||||
return await this.inMemoryCache.findOne(filter);
|
||||
}
|
||||
|
||||
async find(filter?: Partial<T>, options?: FindOptions): Promise<T[]> {
|
||||
return await this.inMemoryCache.find(filter, options);
|
||||
}
|
||||
|
||||
async updateOne(filter: Partial<T>, update: Partial<T>): Promise<boolean> {
|
||||
return await this.store.updateOne(filter, update);
|
||||
}
|
||||
|
||||
async updateMany(
|
||||
filter: Partial<T>,
|
||||
update: Partial<T>,
|
||||
options: FindOptions = {} as FindOptions
|
||||
): Promise<number> {
|
||||
return await this.store.updateMany(filter, update, options);
|
||||
}
|
||||
|
||||
async deleteOne(filter: Partial<T>): Promise<boolean> {
|
||||
return await this.store.deleteOne(filter);
|
||||
}
|
||||
|
||||
async deleteMany(filter: Partial<T>, options: FindOptions = {} as FindOptions): Promise<number> {
|
||||
return await this.store.deleteMany(filter, options);
|
||||
}
|
||||
|
||||
deriveChild<C extends Record<string, any>>(path: string): BaseStorage<C> {
|
||||
const childStore = this.store.deriveChild<C>(path);
|
||||
const childMemory = this.inMemoryCache.deriveChild<C>(path);
|
||||
|
||||
// Create a new synced storage for the child
|
||||
return new StorageMemorySynced<C>(childMemory as StorageMemory<C>, childStore);
|
||||
}
|
||||
}
|
||||
164
src/storage/storage-memory.ts
Normal file
164
src/storage/storage-memory.ts
Normal file
@@ -0,0 +1,164 @@
|
||||
import { BaseStorage, FindOptions } from './base-storage.js';
|
||||
|
||||
/**
|
||||
* Implementation of BaseStore using Memory as the storage backend.
|
||||
*
|
||||
* @remarks
|
||||
* This implementation can be used testing and caching of expensive operations.
|
||||
*/
|
||||
export class StorageMemory<
|
||||
T extends Record<string, any> = Record<string, any>,
|
||||
> extends BaseStorage<T> {
|
||||
// TODO: Eventually this may accept indexes as an argument.
|
||||
static from<T>(): StorageMemory<T> {
|
||||
return new StorageMemory<T>();
|
||||
}
|
||||
|
||||
private store: Map<string, T>;
|
||||
private children: Map<string, StorageMemory<any>>;
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
|
||||
this.store = new Map();
|
||||
this.children = new Map();
|
||||
}
|
||||
|
||||
async insertOne(id: string, document: T): Promise<void> {
|
||||
this.store.set(id, document);
|
||||
this.emit('insert', { key: id, value: document });
|
||||
}
|
||||
|
||||
async insertMany(documents: Map<string, T>): Promise<void> {
|
||||
for (const [key, value] of documents.entries()) {
|
||||
this.store.set(key, value);
|
||||
this.emit('insert', { key, value });
|
||||
}
|
||||
}
|
||||
|
||||
private matchesFilter(item: T, filter?: Partial<T>): boolean {
|
||||
if (!filter || Object.keys(filter).length === 0) {
|
||||
return true;
|
||||
}
|
||||
|
||||
for (const [key, value] of Object.entries(filter)) {
|
||||
if (item[key] !== value) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
async findOne(filter?: Partial<T>): Promise<T | null> {
|
||||
for (const [, value] of this.store) {
|
||||
if (this.matchesFilter(value, filter)) {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
async find(filter?: Partial<T>): Promise<T[]> {
|
||||
const results: T[] = [];
|
||||
for (const [, value] of this.store) {
|
||||
if (this.matchesFilter(value, filter)) {
|
||||
results.push(value);
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
async updateOne(filter: Partial<T>, update: Partial<T>): Promise<boolean> {
|
||||
for (const [key, value] of this.store) {
|
||||
if (this.matchesFilter(value, filter)) {
|
||||
const updated = { ...value, ...update };
|
||||
this.store.set(key, updated);
|
||||
this.emit('update', { key, value: updated });
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
async updateMany(
|
||||
filter: Partial<T>,
|
||||
update: Partial<T>,
|
||||
options: Partial<FindOptions> = {},
|
||||
): Promise<number> {
|
||||
let updated = 0;
|
||||
const itemsToUpdate: Array<[string, T]> = [];
|
||||
|
||||
// Collect all matching items
|
||||
for (const [key, value] of this.store) {
|
||||
if (this.matchesFilter(value, filter)) {
|
||||
itemsToUpdate.push([key, value]);
|
||||
}
|
||||
}
|
||||
|
||||
// Apply skip and limit
|
||||
const startIndex = options.skip || 0;
|
||||
const endIndex = options.limit
|
||||
? startIndex + options.limit
|
||||
: itemsToUpdate.length;
|
||||
const itemsToProcess = itemsToUpdate.slice(startIndex, endIndex);
|
||||
|
||||
// Update items
|
||||
for (const [key, oldValue] of itemsToProcess) {
|
||||
const updatedValue = { ...oldValue, ...update };
|
||||
this.store.set(key, updatedValue);
|
||||
this.emit('update', { key, value: updatedValue });
|
||||
updated++;
|
||||
}
|
||||
|
||||
return updated;
|
||||
}
|
||||
|
||||
async deleteOne(filter: Partial<T>): Promise<boolean> {
|
||||
for (const [key, value] of this.store) {
|
||||
if (this.matchesFilter(value, filter)) {
|
||||
this.store.delete(key);
|
||||
this.emit('delete', { key });
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
async deleteMany(
|
||||
filter: Partial<T>,
|
||||
options: Partial<FindOptions> = {},
|
||||
): Promise<number> {
|
||||
let deleted = 0;
|
||||
const keysToDelete: string[] = [];
|
||||
|
||||
// Collect all matching keys
|
||||
for (const [key, value] of this.store) {
|
||||
if (this.matchesFilter(value, filter)) {
|
||||
keysToDelete.push(key);
|
||||
}
|
||||
}
|
||||
|
||||
// Apply skip and limit
|
||||
const startIndex = options.skip || 0;
|
||||
const endIndex = options.limit
|
||||
? startIndex + options.limit
|
||||
: keysToDelete.length;
|
||||
const keysToProcess = keysToDelete.slice(startIndex, endIndex);
|
||||
|
||||
// Delete items
|
||||
for (const key of keysToProcess) {
|
||||
this.store.delete(key);
|
||||
this.emit('delete', { key });
|
||||
deleted++;
|
||||
}
|
||||
|
||||
return deleted;
|
||||
}
|
||||
|
||||
deriveChild<C>(path: string): BaseStorage<C> {
|
||||
if (!this.children.has(path)) {
|
||||
this.children.set(path, new StorageMemory<C>());
|
||||
}
|
||||
return this.children.get(path) as StorageMemory<C>;
|
||||
}
|
||||
}
|
||||
0
src/storage/storage-remote-http.ts
Normal file
0
src/storage/storage-remote-http.ts
Normal file
14
src/transports/base-transport.ts
Normal file
14
src/transports/base-transport.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import { EventEmitter } from '../utils/event-emitter.js';
|
||||
|
||||
export type TransportEventMap = {
|
||||
message: unknown;
|
||||
connected: boolean;
|
||||
disconnected: boolean;
|
||||
error: unknown;
|
||||
};
|
||||
|
||||
export abstract class BaseTransport extends EventEmitter<TransportEventMap> {
|
||||
abstract send(url: string, body: unknown): Promise<unknown>;
|
||||
abstract connect(): Promise<void>;
|
||||
abstract disconnect(): Promise<void>;
|
||||
}
|
||||
33
src/transports/ephemeral-transport.ts
Normal file
33
src/transports/ephemeral-transport.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { ZodType, output } from 'zod/v4';
|
||||
import type { BaseTransport } from './base-transport.js';
|
||||
|
||||
export class EphemeralTransport {
|
||||
static from(transportConstructors: (url: string) => Promise<BaseTransport>) {
|
||||
return new EphemeralTransport(transportConstructors);
|
||||
}
|
||||
|
||||
constructor(
|
||||
private readonly transports: (url: string) => Promise<BaseTransport>,
|
||||
) {}
|
||||
|
||||
async send(url: string, body: unknown): Promise<unknown> {
|
||||
const transport = await this.transports(url);
|
||||
const response = await transport.send(url, body);
|
||||
transport.disconnect();
|
||||
return response;
|
||||
}
|
||||
|
||||
async receive<T extends ZodType>(url: string, schema: T): Promise<output<T>>;
|
||||
async receive(url: string): Promise<unknown>;
|
||||
async receive(url: string, schema?: ZodType): Promise<unknown> {
|
||||
const transport = await this.transports(url);
|
||||
const message = await transport.waitFor('message', () => true);
|
||||
transport.disconnect();
|
||||
|
||||
if (schema) {
|
||||
return schema.parse(message);
|
||||
}
|
||||
|
||||
return message;
|
||||
}
|
||||
}
|
||||
0
src/transports/transport-bluetooth.ts
Normal file
0
src/transports/transport-bluetooth.ts
Normal file
45
src/transports/transport-composite.ts
Normal file
45
src/transports/transport-composite.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
import { BaseTransport } from './base-transport.js';
|
||||
|
||||
export class TransportComposite extends BaseTransport {
|
||||
static from(transports: BaseTransport[]) {
|
||||
return new TransportComposite(transports);
|
||||
}
|
||||
|
||||
constructor(private readonly transports: BaseTransport[]) {
|
||||
super();
|
||||
|
||||
this.transports.forEach((transport) => {
|
||||
transport.on('message', (message) => {
|
||||
this.emit('message', message);
|
||||
});
|
||||
|
||||
transport.on('error', (error) => {
|
||||
this.emit('error', error);
|
||||
});
|
||||
|
||||
transport.on('connected', (connected) => {
|
||||
this.emit('connected', connected);
|
||||
});
|
||||
|
||||
transport.on('disconnected', (disconnected) => {
|
||||
this.emit('disconnected', disconnected);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async send(url: string, body: unknown): Promise<unknown> {
|
||||
return Promise.all(
|
||||
this.transports.map((transport) => transport.send(url, body)),
|
||||
);
|
||||
}
|
||||
|
||||
async connect(): Promise<void> {
|
||||
await Promise.all(this.transports.map((transport) => transport.connect()));
|
||||
}
|
||||
|
||||
async disconnect(): Promise<void> {
|
||||
await Promise.all(
|
||||
this.transports.map((transport) => transport.disconnect()),
|
||||
);
|
||||
}
|
||||
}
|
||||
0
src/transports/transport-libp2p.ts
Normal file
0
src/transports/transport-libp2p.ts
Normal file
0
src/transports/transport-nfc.ts
Normal file
0
src/transports/transport-nfc.ts
Normal file
51
src/transports/transport-sse.ts
Normal file
51
src/transports/transport-sse.ts
Normal file
@@ -0,0 +1,51 @@
|
||||
import { SSESession } from '../utils/sse-session.js';
|
||||
import { BaseTransport } from './base-transport.js';
|
||||
|
||||
export class TransportSSE extends BaseTransport {
|
||||
static async from(url: string) {
|
||||
const session = await SSESession.from(url);
|
||||
return new TransportSSE(session);
|
||||
}
|
||||
|
||||
static fromSSESession(session: SSESession) {
|
||||
return new TransportSSE(session);
|
||||
}
|
||||
|
||||
constructor(private readonly session: SSESession) {
|
||||
super();
|
||||
|
||||
console.log('created transport sse');
|
||||
|
||||
this.session.on('message', (message) => this.emit('message', message));
|
||||
this.session.on('connected', () => this.emit('connected', true));
|
||||
this.session.on('disconnected', () => this.emit('disconnected', true));
|
||||
}
|
||||
|
||||
/**
|
||||
* Send fetch request to the url, ignoring SSE client as it's not needed for this transport
|
||||
* @param url - The URL to send the message to
|
||||
* @param body - The body to send
|
||||
*/
|
||||
async send(url: string, body: unknown): Promise<unknown> {
|
||||
const response = await fetch(url, {
|
||||
method: 'POST',
|
||||
body: JSON.stringify(body),
|
||||
});
|
||||
|
||||
return response.json();
|
||||
}
|
||||
|
||||
/**
|
||||
* Connect the SSE session
|
||||
*/
|
||||
async connect(): Promise<void> {
|
||||
await this.session.connect();
|
||||
}
|
||||
|
||||
/**
|
||||
* Disconnect the SSE session
|
||||
*/
|
||||
async disconnect(): Promise<void> {
|
||||
this.session.close();
|
||||
}
|
||||
}
|
||||
0
src/transports/transport-websocket.ts
Normal file
0
src/transports/transport-websocket.ts
Normal file
9
src/user/service.ts
Normal file
9
src/user/service.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
|
||||
|
||||
export class Account {
|
||||
static from() {
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
25
src/user/user.ts
Normal file
25
src/user/user.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import { PrivateKey, PublicKey } from '../crypto/index.js';
|
||||
|
||||
export class User {
|
||||
static async fromSecret(secret: string) {
|
||||
return new User(secret);
|
||||
}
|
||||
|
||||
constructor(private secret: string) {}
|
||||
|
||||
async getPrivateKey() {
|
||||
return PrivateKey.fromSeed(this.secret);
|
||||
}
|
||||
|
||||
async getPublicKey() {
|
||||
return this.getPrivateKey().then((privateKey) =>
|
||||
privateKey.derivePublicKey(),
|
||||
);
|
||||
}
|
||||
|
||||
async getSharedSecret(publicKey: PublicKey) {
|
||||
return this.getPrivateKey().then((privateKey) =>
|
||||
privateKey.deriveSharedSecret(publicKey),
|
||||
);
|
||||
}
|
||||
}
|
||||
152
src/utils/event-emitter.ts
Normal file
152
src/utils/event-emitter.ts
Normal file
@@ -0,0 +1,152 @@
|
||||
// TODO: You'll probably want to use WeakRef's here.
|
||||
|
||||
export type EventMap = Record<string, unknown>;
|
||||
|
||||
type Listener<T> = (detail: T) => void;
|
||||
|
||||
interface ListenerEntry<T> {
|
||||
listener: Listener<T>;
|
||||
wrappedListener: Listener<T>;
|
||||
debounceTime?: number;
|
||||
once?: boolean;
|
||||
}
|
||||
|
||||
export type OffCallback = () => void;
|
||||
|
||||
export class EventEmitter<T extends EventMap> {
|
||||
private listeners: Map<keyof T, Set<ListenerEntry<T[keyof T]>>> = new Map();
|
||||
|
||||
on<K extends keyof T>(
|
||||
type: K,
|
||||
listener: Listener<T[K]>,
|
||||
debounceMilliseconds?: number,
|
||||
): OffCallback {
|
||||
const wrappedListener =
|
||||
debounceMilliseconds && debounceMilliseconds > 0
|
||||
? this.debounce(listener, debounceMilliseconds)
|
||||
: listener;
|
||||
|
||||
if (!this.listeners.has(type)) {
|
||||
this.listeners.set(type, new Set());
|
||||
}
|
||||
|
||||
const listenerEntry: ListenerEntry<T[K]> = {
|
||||
listener,
|
||||
wrappedListener,
|
||||
debounceTime: debounceMilliseconds,
|
||||
};
|
||||
|
||||
this.listeners.get(type)?.add(listenerEntry as ListenerEntry<T[keyof T]>);
|
||||
|
||||
// Return an "off" callback that can be called to stop listening for events.
|
||||
return () => this.off(type, listener);
|
||||
}
|
||||
|
||||
once<K extends keyof T>(
|
||||
type: K,
|
||||
listener: Listener<T[K]>,
|
||||
debounceMilliseconds?: number,
|
||||
): OffCallback {
|
||||
const wrappedListener: Listener<T[K]> = (detail: T[K]) => {
|
||||
this.off(type, listener);
|
||||
listener(detail);
|
||||
};
|
||||
|
||||
const debouncedListener =
|
||||
debounceMilliseconds && debounceMilliseconds > 0
|
||||
? this.debounce(wrappedListener, debounceMilliseconds)
|
||||
: wrappedListener;
|
||||
|
||||
if (!this.listeners.has(type)) {
|
||||
this.listeners.set(type, new Set());
|
||||
}
|
||||
|
||||
const listenerEntry: ListenerEntry<T[K]> = {
|
||||
listener,
|
||||
wrappedListener: debouncedListener,
|
||||
debounceTime: debounceMilliseconds,
|
||||
once: true,
|
||||
};
|
||||
|
||||
this.listeners.get(type)?.add(listenerEntry as ListenerEntry<T[keyof T]>);
|
||||
|
||||
// Return an "off" callback that can be called to stop listening for events.
|
||||
return () => this.off(type, listener);
|
||||
}
|
||||
|
||||
off<K extends keyof T>(type: K, listener: Listener<T[K]>): void {
|
||||
const listeners = this.listeners.get(type);
|
||||
if (!listeners) return;
|
||||
|
||||
const listenerEntry = Array.from(listeners).find(
|
||||
(entry) =>
|
||||
entry.listener === listener || entry.wrappedListener === listener,
|
||||
);
|
||||
|
||||
if (listenerEntry) {
|
||||
listeners.delete(listenerEntry);
|
||||
}
|
||||
}
|
||||
|
||||
emit<K extends keyof T>(type: K, payload: T[K]): boolean {
|
||||
const listeners = this.listeners.get(type);
|
||||
if (!listeners) return false;
|
||||
|
||||
listeners.forEach((entry) => {
|
||||
entry.wrappedListener(payload);
|
||||
});
|
||||
|
||||
return listeners.size > 0;
|
||||
}
|
||||
|
||||
removeAllListeners(): void {
|
||||
this.listeners.clear();
|
||||
}
|
||||
|
||||
async waitFor<K extends keyof T>(
|
||||
type: K,
|
||||
predicate: (payload: T[K]) => boolean,
|
||||
timeoutMs?: number,
|
||||
): Promise<T[K]> {
|
||||
return new Promise((resolve, reject) => {
|
||||
let timeoutId: ReturnType<typeof setTimeout> | undefined;
|
||||
|
||||
const listener = (payload: T[K]) => {
|
||||
if (predicate(payload)) {
|
||||
// Clean up
|
||||
this.off(type, listener);
|
||||
if (timeoutId !== undefined) {
|
||||
clearTimeout(timeoutId);
|
||||
}
|
||||
resolve(payload);
|
||||
}
|
||||
};
|
||||
|
||||
// Set up timeout if specified
|
||||
if (timeoutMs !== undefined) {
|
||||
timeoutId = setTimeout(() => {
|
||||
this.off(type, listener);
|
||||
reject(new Error(`Timeout waiting for event "${String(type)}"`));
|
||||
}, timeoutMs);
|
||||
}
|
||||
|
||||
this.on(type, listener);
|
||||
});
|
||||
}
|
||||
|
||||
private debounce<K extends keyof T>(
|
||||
func: Listener<T[K]>,
|
||||
wait: number,
|
||||
): Listener<T[K]> {
|
||||
let timeout: ReturnType<typeof setTimeout>;
|
||||
|
||||
return (detail: T[K]) => {
|
||||
if (timeout !== null) {
|
||||
clearTimeout(timeout);
|
||||
}
|
||||
timeout = setTimeout(() => {
|
||||
func(detail);
|
||||
}, wait);
|
||||
};
|
||||
}
|
||||
}
|
||||
155
src/utils/exponential-backoff.ts
Normal file
155
src/utils/exponential-backoff.ts
Normal file
@@ -0,0 +1,155 @@
|
||||
/**
|
||||
* Exponential backoff is a technique used to retry a function after a delay.
|
||||
*
|
||||
* The delay increases exponentially with each attempt, up to a maximum delay.
|
||||
*
|
||||
* The jitter is a random amount of time added to the delay to prevent thundering herd problems.
|
||||
*
|
||||
* The growth rate is the factor by which the delay increases with each attempt.
|
||||
*/
|
||||
export class ExponentialBackoff {
|
||||
/**
|
||||
* Create a new ExponentialBackoff instance
|
||||
*
|
||||
* @param config - The configuration for the exponential backoff
|
||||
* @returns The ExponentialBackoff instance
|
||||
*/
|
||||
static from(config?: Partial<ExponentialBackoffOptions>): ExponentialBackoff {
|
||||
const backoff = new ExponentialBackoff(config);
|
||||
return backoff;
|
||||
}
|
||||
|
||||
/**
|
||||
* Run the function with exponential backoff
|
||||
*
|
||||
* @param fn - The function to run
|
||||
* @param onError - The callback to call when an error occurs
|
||||
* @param options - The configuration for the exponential backoff
|
||||
*
|
||||
* @throws The last error if the function fails and we have hit the max attempts
|
||||
*
|
||||
* @returns The result of the function
|
||||
*/
|
||||
static run<T>(
|
||||
fn: () => Promise<T>,
|
||||
onError = (_error: Error) => {},
|
||||
options?: Partial<ExponentialBackoffOptions>,
|
||||
): Promise<T> {
|
||||
const backoff = ExponentialBackoff.from(options);
|
||||
return backoff.run(fn, onError);
|
||||
}
|
||||
|
||||
private readonly options: ExponentialBackoffOptions;
|
||||
|
||||
constructor(options?: Partial<ExponentialBackoffOptions>) {
|
||||
this.options = {
|
||||
maxDelay: 10000,
|
||||
maxAttempts: 10,
|
||||
baseDelay: 1000,
|
||||
growthRate: 2,
|
||||
jitter: 0.1,
|
||||
...options,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Run the function with exponential backoff
|
||||
*
|
||||
* If the function fails but we have not hit the max attempts, the error will be passed to the onError callback
|
||||
* and the function will be retried with an exponential delay
|
||||
*
|
||||
* If the function fails and we have hit the max attempts, the last error will be thrown
|
||||
*
|
||||
* @param fn - The function to run
|
||||
* @param onError - The callback to call when an error occurs
|
||||
*
|
||||
* @throws The last error if the function fails and we have hit the max attempts
|
||||
*
|
||||
* @returns The result of the function
|
||||
*/
|
||||
async run<T>(
|
||||
fn: () => Promise<T>,
|
||||
onError = (_error: Error) => {},
|
||||
): Promise<T> {
|
||||
let lastError: Error = new Error('Exponential backoff: Max retries hit');
|
||||
|
||||
let attempt = 0;
|
||||
|
||||
while (
|
||||
attempt < this.options.maxAttempts ||
|
||||
this.options.maxAttempts == 0
|
||||
) {
|
||||
try {
|
||||
return await fn();
|
||||
} catch (error) {
|
||||
// Store the error in case we fail every attempt
|
||||
lastError = error instanceof Error ? error : new Error(`${error}`);
|
||||
onError(lastError);
|
||||
|
||||
// Wait before going to the next attempt
|
||||
const delay = this.calculateDelay(attempt);
|
||||
await new Promise((resolve) => setTimeout(resolve, delay));
|
||||
}
|
||||
|
||||
attempt++;
|
||||
}
|
||||
|
||||
// We completed the loop without ever succeeding. Throw the last error we got
|
||||
throw lastError;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate the delay before we should attempt to retry
|
||||
*
|
||||
* NOTE: The maximum delay is (maxDelay * (1 + jitter))
|
||||
*
|
||||
* @param attempt
|
||||
* @returns The time in milliseconds before another attempt should be made
|
||||
*/
|
||||
private calculateDelay(attempt: number): number {
|
||||
// Get the power of the growth rate
|
||||
const power = Math.pow(this.options.growthRate, attempt);
|
||||
|
||||
// Get the delay before jitter or limit
|
||||
const rawDelay = this.options.baseDelay * power;
|
||||
|
||||
// Cap the delay to the maximum. Do this before the jitter so jitter does not become larger than delay
|
||||
const cappedDelay = Math.min(rawDelay, this.options.maxDelay);
|
||||
|
||||
// Get the jitter direction. This will be between -1 and 1
|
||||
const jitterDirection = 2 * Math.random() - 1;
|
||||
|
||||
// Calculate the jitter
|
||||
const jitter = jitterDirection * this.options.jitter * cappedDelay;
|
||||
|
||||
// Add the jitter to the delay
|
||||
return cappedDelay + jitter;
|
||||
}
|
||||
}
|
||||
|
||||
export type ExponentialBackoffOptions = {
|
||||
/**
|
||||
* The maximum delay between attempts in milliseconds
|
||||
*/
|
||||
maxDelay: number;
|
||||
|
||||
/**
|
||||
* The maximum number of attempts. Passing 0 will result in infinite attempts.
|
||||
*/
|
||||
maxAttempts: number;
|
||||
|
||||
/**
|
||||
* The base delay between attempts in milliseconds
|
||||
*/
|
||||
baseDelay: number;
|
||||
|
||||
/**
|
||||
* The growth rate of the delay
|
||||
*/
|
||||
growthRate: number;
|
||||
|
||||
/**
|
||||
* The jitter of the delay as a percentage of growthRate
|
||||
*/
|
||||
jitter: number;
|
||||
};
|
||||
3
src/utils/index.ts
Normal file
3
src/utils/index.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export * from './event-emitter.js';
|
||||
export * from './exponential-backoff.js';
|
||||
export * from './sse-session.js';
|
||||
371
src/utils/sse-session.ts
Normal file
371
src/utils/sse-session.ts
Normal file
@@ -0,0 +1,371 @@
|
||||
import { ExponentialBackoff } from './exponential-backoff.js';
|
||||
import { EventEmitter } from './event-emitter.js';
|
||||
|
||||
export type SSESessionEventMap = {
|
||||
message: unknown;
|
||||
connected: boolean;
|
||||
disconnected: boolean;
|
||||
error: unknown;
|
||||
};
|
||||
|
||||
/**
|
||||
* A Server-Sent Events client implementation using fetch API.
|
||||
* Supports custom headers, POST requests, and is non-blocking.
|
||||
*/
|
||||
export class SSESession extends EventEmitter<SSESessionEventMap> {
|
||||
/**
|
||||
* Creates and connects a new SSESession instance.
|
||||
* @param url The URL to connect to
|
||||
* @param options Configuration options
|
||||
* @returns A new connected SSESession instance
|
||||
*/
|
||||
public static async from(
|
||||
url: string,
|
||||
options: Partial<SSESessionOptions> = {},
|
||||
): Promise<SSESession> {
|
||||
const client = new SSESession(url, options);
|
||||
await client.connect();
|
||||
return client;
|
||||
}
|
||||
|
||||
// State.
|
||||
private url: string;
|
||||
private controller: AbortController;
|
||||
private connected: boolean = false;
|
||||
protected options: SSESessionOptions;
|
||||
protected messageBuffer: Uint8Array = new Uint8Array();
|
||||
|
||||
// Listener for when the tab is hidden or shown.
|
||||
private visibilityChangeHandler: ((event: Event) => void) | null = null;
|
||||
|
||||
// Text decoders and encoders for parsing the message buffer.
|
||||
private textDecoder: TextDecoder = new TextDecoder();
|
||||
private textEncoder: TextEncoder = new TextEncoder();
|
||||
|
||||
/**
|
||||
* Creates a new SSESession instance.
|
||||
* @param url The URL to connect to
|
||||
* @param options Configuration options
|
||||
*/
|
||||
constructor(url: string, options: Partial<SSESessionOptions> = {}) {
|
||||
super();
|
||||
|
||||
this.url = url;
|
||||
this.options = {
|
||||
// Use default fetch function.
|
||||
fetch: (...args) => fetch(...args),
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'text/event-stream',
|
||||
'Cache-Control': 'no-cache',
|
||||
},
|
||||
|
||||
// Reconnection options
|
||||
attemptReconnect: true,
|
||||
retryDelay: 1000,
|
||||
persistent: false,
|
||||
...options,
|
||||
};
|
||||
this.controller = new AbortController();
|
||||
|
||||
// Set up visibility change handling if in mobile browser environment
|
||||
if (typeof document !== 'undefined') {
|
||||
this.visibilityChangeHandler = this.handleVisibilityChange.bind(this);
|
||||
document.addEventListener(
|
||||
'visibilitychange',
|
||||
this.visibilityChangeHandler,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles visibility change events in the browser.
|
||||
*/
|
||||
private async handleVisibilityChange(): Promise<void> {
|
||||
// When going to background, close the current connection cleanly
|
||||
// This allows us to reconnect mobile devices when they come back after leaving the tab or browser app.
|
||||
if (document.visibilityState === 'hidden') {
|
||||
this.controller.abort();
|
||||
}
|
||||
|
||||
// When coming back to foreground, attempt to reconnect if not connected
|
||||
if (document.visibilityState === 'visible' && !this.connected) {
|
||||
await this.connect();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Connects to the SSE endpoint.
|
||||
*/
|
||||
public async connect(): Promise<void> {
|
||||
if (this.connected) return;
|
||||
|
||||
this.connected = true;
|
||||
this.controller = new AbortController();
|
||||
|
||||
const { method, headers, body } = this.options;
|
||||
|
||||
const fetchOptions: RequestInit = {
|
||||
method,
|
||||
headers,
|
||||
body,
|
||||
signal: this.controller.signal,
|
||||
cache: 'no-store',
|
||||
};
|
||||
|
||||
const exponentialBackoff = ExponentialBackoff.from({
|
||||
baseDelay: this.options.retryDelay,
|
||||
maxDelay: 10000,
|
||||
maxAttempts: 0,
|
||||
growthRate: 1.3,
|
||||
jitter: 0.3,
|
||||
});
|
||||
|
||||
// Establish the connection and get the reader using the exponential backoff
|
||||
const reader = await exponentialBackoff.run(async () => {
|
||||
const res = await this.options.fetch(this.url, fetchOptions);
|
||||
if (!res.ok) {
|
||||
throw new Error(`HTTP error! Status: ${res.status}`);
|
||||
}
|
||||
|
||||
if (!res.body) {
|
||||
throw new Error('Response body is null');
|
||||
}
|
||||
|
||||
return res.body.getReader();
|
||||
});
|
||||
|
||||
// Call the onConnected callback
|
||||
this.emit('connected', undefined);
|
||||
|
||||
const readStream = async () => {
|
||||
try {
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
|
||||
if (done) {
|
||||
this.connected = false;
|
||||
|
||||
// Call the onDisconnected callback.
|
||||
this.emit('disconnected', undefined);
|
||||
|
||||
// If the connection was closed by the server, we want to attempt a reconnect if the connection should be persistent.
|
||||
if (this.options.persistent) {
|
||||
await this.connect();
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
const events = this.parseEvents(value);
|
||||
|
||||
for (const event of events) {
|
||||
this.emit('message', event);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
this.connected = false;
|
||||
|
||||
// Call the onDisconnected callback.
|
||||
this.emit('disconnected', error);
|
||||
|
||||
// If the connection was aborted using the controller, we don't need to call onError.
|
||||
if (this.controller.signal.aborted) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Call the onError callback.
|
||||
// NOTE: we dont use the handleCallback here because it would result in 2 error callbacks.
|
||||
try {
|
||||
this.emit('error', error);
|
||||
} catch (error) {
|
||||
console.log(`SSE Session: onError callback error:`, error);
|
||||
}
|
||||
|
||||
// Attempt to reconnect if enabled
|
||||
if (this.options.attemptReconnect) {
|
||||
await this.connect();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
readStream();
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
protected parseEvents(chunk: Uint8Array): SSEvent[] {
|
||||
// Append new chunk to existing buffer
|
||||
this.messageBuffer = new Uint8Array([...this.messageBuffer, ...chunk]);
|
||||
|
||||
const events: SSEvent[] = [];
|
||||
const lines = this.textDecoder
|
||||
.decode(this.messageBuffer)
|
||||
.split(/\r\n|\r|\n/);
|
||||
|
||||
let currentEvent: Partial<SSEvent> = {};
|
||||
let completeEventCount = 0;
|
||||
|
||||
// Iterate over the lines to find complete events
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
|
||||
// Empty line signals the end of an event
|
||||
if (line === '') {
|
||||
if (currentEvent.data) {
|
||||
// Remove trailing newline if present
|
||||
currentEvent.data = currentEvent.data.replace(/\n$/, '');
|
||||
events.push(currentEvent as SSEvent);
|
||||
currentEvent = {};
|
||||
completeEventCount = i + 1;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// Parse field: value format
|
||||
const colonIndex = line.indexOf(':');
|
||||
if (colonIndex === -1) continue;
|
||||
|
||||
const field = line.slice(0, colonIndex);
|
||||
// Skip initial space after colon if present
|
||||
const valueStartIndex =
|
||||
colonIndex + 1 + (line[colonIndex + 1] === ' ' ? 1 : 0);
|
||||
const value = line.slice(valueStartIndex);
|
||||
|
||||
if (field === 'data') {
|
||||
currentEvent.data = currentEvent.data
|
||||
? currentEvent.data + '\n' + value
|
||||
: value;
|
||||
} else if (field === 'event') {
|
||||
currentEvent.event = value;
|
||||
} else if (field === 'id') {
|
||||
currentEvent.id = value;
|
||||
} else if (field === 'retry') {
|
||||
const retryMs = parseInt(value, 10);
|
||||
if (!isNaN(retryMs)) {
|
||||
currentEvent.retry = retryMs;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Store the remainder of the buffer for the next chunk
|
||||
const remainder = lines.slice(completeEventCount).join('\n');
|
||||
this.messageBuffer = this.textEncoder.encode(remainder);
|
||||
|
||||
return events;
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes the SSE connection and cleans up event listeners.
|
||||
*/
|
||||
public close(): void {
|
||||
// Clean up everything including the visibility handler
|
||||
this.controller.abort();
|
||||
|
||||
// Remove the visibility handler (This is only required on browsers)
|
||||
if (this.visibilityChangeHandler && typeof document !== 'undefined') {
|
||||
document.removeEventListener(
|
||||
'visibilitychange',
|
||||
this.visibilityChangeHandler,
|
||||
);
|
||||
this.visibilityChangeHandler = null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the client is currently connected.
|
||||
* @returns Whether the client is connected
|
||||
*/
|
||||
public isConnected(): boolean {
|
||||
return this.connected;
|
||||
}
|
||||
|
||||
/**
|
||||
* Will handle thrown errors from the callback and call the onError callback.
|
||||
* This is to avoid the sse-session from disconnecting from errors that are not a result of the sse-session itself.
|
||||
*
|
||||
* @param callback The callback to handle.
|
||||
* @param args The arguments to pass to the callback.
|
||||
*/
|
||||
private handleCallback<T>(callback: (args: T) => void, args: T): void {
|
||||
try {
|
||||
callback(args);
|
||||
} catch (error) {
|
||||
try {
|
||||
this.emit('error', error);
|
||||
} catch (error) {
|
||||
console.log(`SSE Session: onError callback error:`, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Configuration options for the SSESession.
|
||||
*/
|
||||
export interface SSESessionOptions {
|
||||
/**
|
||||
* The fetch function to use.
|
||||
*
|
||||
* NOTE: This is compatible with Browser/Node's native "fetcH" function.
|
||||
* We use this in place of "typeof fetch" so that we can accept non-standard URLs ("url" is a "string" here).
|
||||
* For example, a LibP2P adapter might not use a standardized URL format (and might only include "path").
|
||||
* This would cause a type error as native fetch expects type "URL".
|
||||
*/
|
||||
fetch: (url: string, options: RequestInit) => Promise<Response>;
|
||||
|
||||
/**
|
||||
* HTTP method to use (GET or POST).
|
||||
*/
|
||||
method: 'GET' | 'POST';
|
||||
|
||||
/**
|
||||
* HTTP headers to send with the request.
|
||||
*/
|
||||
headers?: Record<string, string>;
|
||||
|
||||
/**
|
||||
* Body to send with POST requests.
|
||||
*/
|
||||
body?: string | FormData | URLSearchParams;
|
||||
|
||||
/**
|
||||
* Whether to attempt to reconnect.
|
||||
*/
|
||||
attemptReconnect: boolean;
|
||||
|
||||
/**
|
||||
* The delay in milliseconds between reconnection attempts.
|
||||
*/
|
||||
retryDelay: number;
|
||||
|
||||
/**
|
||||
* Whether to reconnect when the session is terminated by the server.
|
||||
*/
|
||||
persistent: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a Server-Sent Event.
|
||||
*/
|
||||
export interface SSEvent {
|
||||
/**
|
||||
* Event data.
|
||||
*/
|
||||
data: string;
|
||||
|
||||
/**
|
||||
* Event type.
|
||||
*/
|
||||
event?: string;
|
||||
|
||||
/**
|
||||
* Event ID.
|
||||
*/
|
||||
id?: string;
|
||||
|
||||
/**
|
||||
* Reconnection time in milliseconds.
|
||||
*/
|
||||
retry?: number;
|
||||
}
|
||||
17
tests/ephemeral-transport.ts
Normal file
17
tests/ephemeral-transport.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { EphemeralTransport } from '../src/transports/ephemeral-transport.js';
|
||||
import { TransportComposite } from '../src/transports/transport-composite.js';
|
||||
|
||||
import { TransportSSE } from '../src/transports/transport-sse.js';
|
||||
|
||||
const composite = async (url: string) =>
|
||||
TransportComposite.from(
|
||||
await Promise.all([TransportSSE.from(url), TransportSSE.from(url)]),
|
||||
);
|
||||
|
||||
const ephemeral = EphemeralTransport.from(composite);
|
||||
|
||||
const message = await ephemeral.receive(
|
||||
'https://oracles.generalprotocols.com/sse/v1/messages',
|
||||
);
|
||||
|
||||
console.log(message);
|
||||
14
tsconfig.json
Normal file
14
tsconfig.json
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2024",
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
"outDir": "./dist",
|
||||
"rootDir": "./src",
|
||||
"baseUrl": ".",
|
||||
"lib": ["DOM", "ES2024"]
|
||||
},
|
||||
|
||||
// "include": ["src/**/*"],
|
||||
"exclude": ["node_modules", "dist"]
|
||||
}
|
||||
Reference in New Issue
Block a user