use @snort/system cache

This commit is contained in:
2023-06-15 12:03:05 +01:00
parent c2a3a706de
commit fc11381ccd
79 changed files with 679 additions and 524 deletions

View File

@ -0,0 +1,21 @@
{
"name": "@snort/shared",
"version": "1.0.0",
"description": "Shared components for Snort",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"repository": "https://git.v0l.io/Kieran/snort",
"author": "Kieran",
"license": "GPL-3.0-or-later",
"private": false,
"scripts": {
"build": "tsc"
},
"dependencies": {
"@noble/curves": "^1.1.0",
"@noble/hashes": "^1.3.1",
"@scure/base": "^1.1.1",
"debug": "^4.3.4",
"dexie": "^3.2.4"
}
}

View File

@ -0,0 +1,8 @@
/**
* Regex to match email address
*/
export const EmailRegex =
// eslint-disable-next-line no-useless-escape
/^(([^<>()\[\]\\.,;:\s@"]+(\.[^<>()\[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/;

View File

@ -0,0 +1,41 @@
export type HookFn<TSnapshot> = (e?: TSnapshot) => void;
export interface HookFilter<TSnapshot> {
fn: HookFn<TSnapshot>;
}
/**
* Simple React hookable store with manual change notifications
*/
export abstract class ExternalStore<TSnapshot> {
#hooks: Array<HookFilter<TSnapshot>> = [];
#snapshot: Readonly<TSnapshot> = {} as Readonly<TSnapshot>;
#changed = true;
hook(fn: HookFn<TSnapshot>) {
this.#hooks.push({
fn,
});
return () => {
const idx = this.#hooks.findIndex(a => a.fn === fn);
if (idx >= 0) {
this.#hooks.splice(idx, 1);
}
};
}
snapshot() {
if (this.#changed) {
this.#snapshot = this.takeSnapshot();
this.#changed = false;
}
return this.#snapshot;
}
protected notifyChange(sn?: TSnapshot) {
this.#changed = true;
this.#hooks.forEach(h => h.fn(sn));
}
abstract takeSnapshot(): TSnapshot;
}

View File

@ -0,0 +1,207 @@
import debug from "debug";
import { Table } from "dexie";
import { unixNowMs, unwrap } from "./utils";
type HookFn = () => void;
export interface KeyedHookFilter {
key: string;
fn: HookFn;
}
/**
* Dexie backed generic hookable store
*/
export abstract class FeedCache<TCached> {
#name: string;
#hooks: Array<KeyedHookFilter> = [];
#snapshot: Readonly<Array<TCached>> = [];
#changed = true;
#hits = 0;
#miss = 0;
protected table?: Table<TCached>;
protected onTable: Set<string> = new Set();
protected cache: Map<string, TCached> = new Map();
constructor(name: string, table?: Table<TCached>) {
this.#name = name;
this.table = table;
setInterval(() => {
debug(this.#name)(
"%d loaded, %d on-disk, %d hooks, %d% hit",
this.cache.size,
this.onTable.size,
this.#hooks.length,
((this.#hits / (this.#hits + this.#miss)) * 100).toFixed(1)
);
}, 30_000);
}
async preload() {
const keys = await this.table?.toCollection().primaryKeys() ?? [];
this.onTable = new Set<string>(keys.map(a => a as string));
}
hook(fn: HookFn, key: string | undefined) {
if (!key) {
return () => {
//noop
};
}
this.#hooks.push({
key,
fn,
});
return () => {
const idx = this.#hooks.findIndex(a => a.fn === fn);
if (idx >= 0) {
this.#hooks.splice(idx, 1);
}
};
}
getFromCache(key?: string) {
if (key) {
const ret = this.cache.get(key);
if (ret) {
this.#hits++;
} else {
this.#miss++;
}
return ret;
}
}
async get(key?: string) {
if (key && !this.cache.has(key) && this.table) {
const cached = await this.table.get(key);
if (cached) {
this.cache.set(this.key(cached), cached);
this.notifyChange([key]);
return cached;
}
}
return key ? this.cache.get(key) : undefined;
}
async bulkGet(keys: Array<string>) {
const missing = keys.filter(a => !this.cache.has(a));
if (missing.length > 0 && this.table) {
const cached = await this.table.bulkGet(missing);
cached.forEach(a => {
if (a) {
this.cache.set(this.key(a), a);
}
});
}
return keys
.map(a => this.cache.get(a))
.filter(a => a)
.map(a => unwrap(a));
}
async set(obj: TCached) {
const k = this.key(obj);
this.cache.set(k, obj);
if (this.table) {
await this.table.put(obj);
this.onTable.add(k);
}
this.notifyChange([k]);
}
async bulkSet(obj: Array<TCached>) {
if (this.table) {
await this.table.bulkPut(obj);
obj.forEach(a => this.onTable.add(this.key(a)));
}
obj.forEach(v => this.cache.set(this.key(v), v));
this.notifyChange(obj.map(a => this.key(a)));
}
/**
* Try to update an entry where created values exists
* @param m Profile metadata
* @returns
*/
async update<TCachedWithCreated extends TCached & { created: number; loaded: number }>(m: TCachedWithCreated) {
const k = this.key(m);
const existing = this.getFromCache(k) as TCachedWithCreated;
const updateType = (() => {
if (!existing) {
return "new";
}
if (existing.created < m.created) {
return "updated";
}
if (existing && existing.loaded < m.loaded) {
return "refresh";
}
return "no_change";
})();
debug(this.#name)("Updating %s %s %o", k, updateType, m);
if (updateType !== "no_change") {
const updated = {
...existing,
...m,
};
await this.set(updated);
}
return updateType;
}
/**
* Loads a list of rows from disk cache
* @param keys List of ids to load
* @returns Keys that do not exist on disk cache
*/
async buffer(keys: Array<string>): Promise<Array<string>> {
const needsBuffer = keys.filter(a => !this.cache.has(a));
if (this.table && needsBuffer.length > 0) {
const mapped = needsBuffer.map(a => ({
has: this.onTable.has(a),
key: a,
}));
const start = unixNowMs();
const fromCache = await this.table.bulkGet(mapped.filter(a => a.has).map(a => a.key));
const fromCacheFiltered = fromCache.filter(a => a !== undefined).map(a => unwrap(a));
fromCacheFiltered.forEach(a => {
this.cache.set(this.key(a), a);
});
this.notifyChange(fromCacheFiltered.map(a => this.key(a)));
debug(this.#name)(
`Loaded %d/%d in %d ms`,
fromCacheFiltered.length,
keys.length,
(unixNowMs() - start).toLocaleString()
);
return mapped.filter(a => !a.has).map(a => a.key);
}
// no IndexdDB always return all keys
return needsBuffer;
}
async clear() {
await this.table?.clear();
this.cache.clear();
this.onTable.clear();
}
snapshot() {
if (this.#changed) {
this.#snapshot = this.takeSnapshot();
this.#changed = false;
}
return this.#snapshot;
}
protected notifyChange(keys: Array<string>) {
this.#changed = true;
this.#hooks.filter(a => keys.includes(a.key) || a.key === "*").forEach(h => h.fn());
}
abstract key(of: TCached): string;
abstract takeSnapshot(): Array<TCached>;
}

View File

@ -0,0 +1,5 @@
export * from "./external-store";
export * from "./lnurl";
export * from "./utils";
export * from "./work-queue";
export * from "./feed-cache";

View File

@ -0,0 +1,230 @@
import { EmailRegex } from "./const";
import { bech32ToText, unwrap } from "./utils";
const PayServiceTag = "payRequest";
export enum LNURLErrorCode {
ServiceUnavailable = 1,
InvalidLNURL = 2,
}
export class LNURLError extends Error {
code: LNURLErrorCode;
constructor(code: LNURLErrorCode, msg: string) {
super(msg);
this.code = code;
}
}
export class LNURL {
#url: URL;
#service?: LNURLService;
/**
* Setup LNURL service
* @param lnurl bech32 lnurl / lightning address / https url
*/
constructor(lnurl: string) {
lnurl = lnurl.toLowerCase().trim();
if (lnurl.startsWith("lnurl")) {
const decoded = bech32ToText(lnurl);
if (!decoded.startsWith("http")) {
throw new LNURLError(LNURLErrorCode.InvalidLNURL, "Not a url");
}
this.#url = new URL(decoded);
} else if (lnurl.match(EmailRegex)) {
const [handle, domain] = lnurl.split("@");
this.#url = new URL(`https://${domain}/.well-known/lnurlp/${handle}`);
} else if (lnurl.startsWith("https:")) {
this.#url = new URL(lnurl);
} else if (lnurl.startsWith("lnurlp:")) {
const tmp = new URL(lnurl);
tmp.protocol = "https:";
this.#url = tmp;
} else {
throw new LNURLError(LNURLErrorCode.InvalidLNURL, "Could not determine service url");
}
}
/**
* URL of this payService
*/
get url() {
return this.#url;
}
/**
* Return the optimal formatted LNURL
*/
get lnurl() {
if (this.isLNAddress) {
return this.getLNAddress();
}
return this.#url.toString();
}
/**
* Human readable name for this service
*/
get name() {
// LN Address formatted URL
if (this.isLNAddress) {
return this.getLNAddress();
}
// Generic LUD-06 url
return this.#url.hostname;
}
/**
* Is this LNURL a LUD-16 Lightning Address
*/
get isLNAddress() {
return this.#url.pathname.startsWith("/.well-known/lnurlp/");
}
/**
* Get the LN Address for this LNURL
*/
getLNAddress() {
const pathParts = this.#url.pathname.split("/");
const username = pathParts[pathParts.length - 1];
return `${username}@${this.#url.hostname}`;
}
/**
* Create a NIP-57 zap tag from this LNURL
*/
getZapTag() {
if (this.isLNAddress) {
return ["zap", this.getLNAddress(), "lud16"];
} else {
return ["zap", this.#url.toString(), "lud06"];
}
}
async load() {
const rsp = await fetch(this.#url);
if (rsp.ok) {
this.#service = await rsp.json();
this.#validateService();
}
}
/**
* Fetch an invoice from the LNURL service
* @param amount Amount in sats
* @param comment
* @param zap
* @returns
*/
async getInvoice(amount: number, comment?: string, zap?: object) {
const callback = new URL(unwrap(this.#service?.callback));
const query = new Map<string, string>();
if (callback.search.length > 0) {
callback.search
.slice(1)
.split("&")
.forEach(a => {
const pSplit = a.split("=");
query.set(pSplit[0], pSplit[1]);
});
}
query.set("amount", Math.floor(amount * 1000).toString());
if (comment && this.#service?.commentAllowed) {
query.set("comment", comment);
}
if (this.#service?.nostrPubkey && zap) {
query.set("nostr", JSON.stringify(zap));
}
const baseUrl = `${callback.protocol}//${callback.host}${callback.pathname}`;
const queryJoined = [...query.entries()].map(v => `${v[0]}=${encodeURIComponent(v[1])}`).join("&");
try {
const rsp = await fetch(`${baseUrl}?${queryJoined}`);
if (rsp.ok) {
const data: LNURLInvoice = await rsp.json();
console.debug("[LNURL]: ", data);
if (data.status === "ERROR") {
throw new Error(data.reason);
} else {
return data;
}
} else {
throw new LNURLError(LNURLErrorCode.ServiceUnavailable, `Failed to fetch invoice (${rsp.statusText})`);
}
} catch (e) {
throw new LNURLError(LNURLErrorCode.ServiceUnavailable, "Failed to load callback");
}
}
/**
* Are zaps (NIP-57) supported
*/
get canZap() {
return this.#service?.nostrPubkey ? true : false;
}
/**
* Return pubkey of zap service
*/
get zapperPubkey() {
return this.#service?.nostrPubkey;
}
/**
* Get the max allowed comment length
*/
get maxCommentLength() {
return this.#service?.commentAllowed ?? 0;
}
/**
* Min sendable in milli-sats
*/
get min() {
return this.#service?.minSendable ?? 1_000; // 1 sat
}
/**
* Max sendable in milli-sats
*/
get max() {
return this.#service?.maxSendable ?? 100e9; // 1 BTC in milli-sats
}
#validateService() {
if (this.#service?.tag !== PayServiceTag) {
throw new LNURLError(LNURLErrorCode.InvalidLNURL, "Only LNURLp is supported");
}
if (!this.#service?.callback) {
throw new LNURLError(LNURLErrorCode.InvalidLNURL, "No callback url");
}
}
}
export interface LNURLService {
tag: string
nostrPubkey?: string
minSendable?: number
maxSendable?: number
metadata: string
callback: string
commentAllowed?: number
}
export interface LNURLStatus {
status: "SUCCESS" | "ERROR"
reason?: string
}
export interface LNURLInvoice extends LNURLStatus {
pr?: string
successAction?: LNURLSuccessAction
}
export interface LNURLSuccessAction {
description?: string
url?: string
}

View File

@ -0,0 +1,184 @@
import * as utils from "@noble/curves/abstract/utils";
import * as secp from "@noble/curves/secp256k1";
import { sha256 as sha2 } from "@noble/hashes/sha256";
import { bech32 } from "@scure/base";
export function unwrap<T>(v: T | undefined | null): T {
if (v === undefined || v === null) {
throw new Error("missing value");
}
return v;
}
/**
* Convert hex to bech32
*/
export function hexToBech32(hrp: string, hex?: string) {
if (typeof hex !== "string" || hex.length === 0 || hex.length % 2 !== 0) {
return "";
}
try {
const buf = utils.hexToBytes(hex);
return bech32.encode(hrp, bech32.toWords(buf));
} catch (e) {
console.warn("Invalid hex", hex, e);
return "";
}
}
export function sanitizeRelayUrl(url: string) {
try {
return new URL(url).toString();
} catch {
// ignore
}
}
export function unixNow() {
return Math.floor(unixNowMs() / 1000);
}
export function unixNowMs() {
return new Date().getTime();
}
export function deepClone<T>(obj: T) {
if ("structuredClone" in window) {
return structuredClone(obj);
} else {
return JSON.parse(JSON.stringify(obj));
}
}
export function deepEqual(x: any, y: any): boolean {
const ok = Object.keys,
tx = typeof x,
ty = typeof y;
return x && y && tx === "object" && tx === ty
? ok(x).length === ok(y).length && ok(x).every(key => deepEqual(x[key], y[key]))
: x === y;
}
export function countMembers(a: any) {
let ret = 0;
for (const [k, v] of Object.entries(a)) {
if (Array.isArray(v)) {
ret += v.length;
}
}
return ret;
}
export function equalProp(a: string | number | Array<string | number> | undefined, b: string | number | Array<string | number> | undefined) {
if ((a !== undefined && b === undefined) || (a === undefined && b !== undefined)) {
return false;
}
if (Array.isArray(a) && Array.isArray(b)) {
if (a.length !== b.length) {
return false;
}
if (!a.every(v => b.includes(v))) {
return false;
}
}
return a === b;
}
/**
* Compute the "distance" between two objects by comparing their difference in properties
* Missing/Added keys result in +10 distance
* This is not recursive
*/
export function distance(a: any, b: any): number {
const keys1 = Object.keys(a);
const keys2 = Object.keys(b);
const maxKeys = keys1.length > keys2.length ? keys1 : keys2;
let distance = 0;
for (const key of maxKeys) {
if (key in a && key in b) {
if (Array.isArray(a[key]) && Array.isArray(b[key])) {
const aa = a[key] as Array<string | number>;
const bb = b[key] as Array<string | number>;
if (aa.length === bb.length) {
if (aa.some(v => !bb.includes(v))) {
distance++;
}
} else {
distance++;
}
} else if (a[key] !== b[key]) {
distance++;
}
} else {
distance += 10;
}
}
return distance;
}
export function dedupe<T>(v: Array<T>) {
return [...new Set(v)];
}
export function appendDedupe<T>(a?: Array<T>, b?: Array<T>) {
return dedupe([...(a ?? []), ...(b ?? [])]);
}
export const sha256 = (str: string | Uint8Array): string => {
return utils.bytesToHex(sha2(str));
}
export function getPublicKey(privKey: string) {
return utils.bytesToHex(secp.schnorr.getPublicKey(privKey));
}
export function bech32ToHex(str: string) {
try {
const nKey = bech32.decode(str, 1_000);
const buff = bech32.fromWords(nKey.words);
return utils.bytesToHex(Uint8Array.from(buff));
} catch (e) {
return str;
}
}
/**
* Decode bech32 to string UTF-8
* @param str bech32 encoded string
* @returns
*/
export function bech32ToText(str: string) {
try {
const decoded = bech32.decode(str, 1000);
const buf = bech32.fromWords(decoded.words);
return new TextDecoder().decode(Uint8Array.from(buf));
} catch {
return "";
}
}
export async function fetchNip05Pubkey(name: string, domain: string, timeout = 2_000) {
interface NostrJson {
names: Record<string, string>;
}
if (!name || !domain) {
return undefined;
}
try {
const res = await fetch(`https://${domain}/.well-known/nostr.json?name=${encodeURIComponent(name)}`, {
signal: AbortSignal.timeout(timeout),
});
const data: NostrJson = await res.json();
const match = Object.keys(data.names).find(n => {
return n.toLowerCase() === name.toLowerCase();
});
return match ? data.names[match] : undefined;
} catch {
// ignored
}
return undefined;
}

View File

@ -0,0 +1,30 @@
export interface WorkQueueItem {
next: () => Promise<unknown>;
resolve(v: unknown): void;
reject(e: unknown): void;
}
export async function processWorkQueue(queue?: Array<WorkQueueItem>, queueDelay = 200) {
while (queue && queue.length > 0) {
const v = queue.shift();
if (v) {
try {
const ret = await v.next();
v.resolve(ret);
} catch (e) {
v.reject(e);
}
}
}
setTimeout(() => processWorkQueue(queue, queueDelay), queueDelay);
}
export const barrierQueue = async <T>(queue: Array<WorkQueueItem>, then: () => Promise<T>): Promise<T> => {
return new Promise<T>((resolve, reject) => {
queue.push({
next: then,
resolve,
reject,
});
});
};

View File

@ -0,0 +1,18 @@
{
"compilerOptions": {
"baseUrl": "src",
"target": "ES2020",
"moduleResolution": "node",
"esModuleInterop": true,
"noImplicitOverride": true,
"module": "CommonJS",
"strict": true,
"declaration": true,
"declarationMap": true,
"inlineSourceMap": true,
"outDir": "dist",
"skipLibCheck": true
},
"include": ["src/**/*.ts"],
"files": ["src/index.ts"]
}