feat: solve conflicts
This commit is contained in:
@ -7,61 +7,48 @@ Simple example:
|
||||
```js
|
||||
import {
|
||||
NostrSystem,
|
||||
EventPublisher,
|
||||
UserRelaysCache,
|
||||
RequestBuilder,
|
||||
FlatNoteStore,
|
||||
StoreSnapshot
|
||||
StoreSnapshot,
|
||||
NoteCollection
|
||||
} from "@snort/system"
|
||||
|
||||
// Provided in-memory / indexedDb cache for relays
|
||||
// You can also implement your own with "RelayCache" interface
|
||||
const RelaysCache = new UserRelaysCache();
|
||||
|
||||
// example auth handler using NIP-07
|
||||
const AuthHandler = async (challenge: string, relay: string) => {
|
||||
const pub = await EventPublisher.nip7();
|
||||
if (pub) {
|
||||
return await pub.nip42Auth(challenge, relay);
|
||||
}
|
||||
}
|
||||
|
||||
// Singleton instance to store all connections and access query fetching system
|
||||
const System = new NostrSystem({
|
||||
relayCache: RelaysCache,
|
||||
authHandler: AuthHandler // can be left undefined if you dont care about NIP-42 Auth
|
||||
});
|
||||
const System = new NostrSystem({});
|
||||
|
||||
(async () => {
|
||||
// connec to one "bootstrap" relay to pull profiles/relay lists from
|
||||
// also used as a fallback relay when gossip model doesnt know which relays to pick, or "authors" are not provided in the request
|
||||
await System.ConnectToRelay("wss://relay.snort.social", { read: true, write: false });
|
||||
// Setup cache system
|
||||
await System.Init();
|
||||
|
||||
// ID should be unique to the use case, this is important as all data fetched from this ID will be merged into the same NoteStore
|
||||
const rb = new RequestBuilder("get-posts");
|
||||
rb.withFilter()
|
||||
.authors(["63fe6318dc58583cfe16810f86dd09e18bfd76aabc24a0081ce2856f330504ed"]) // Kieran pubkey
|
||||
.kinds([1])
|
||||
.limit(10);
|
||||
// connec to one "bootstrap" relay to pull profiles/relay lists from
|
||||
// also used as a fallback relay when gossip model doesnt know which relays to pick, or "authors" are not provided in the request
|
||||
await System.ConnectToRelay("wss://relay.snort.social", { read: true, write: false });
|
||||
|
||||
const q = System.Query<FlatNoteStore>(FlatNoteStore, rb);
|
||||
// basic usage using "onEvent", fired for every event added to the store
|
||||
q.onEvent = (sub, e) => {
|
||||
console.debug(sub, e);
|
||||
}
|
||||
// ID should be unique to the use case, this is important as all data fetched from this ID will be merged into the same NoteStore
|
||||
const rb = new RequestBuilder("get-posts");
|
||||
rb.withFilter()
|
||||
.authors(["63fe6318dc58583cfe16810f86dd09e18bfd76aabc24a0081ce2856f330504ed"]) // Kieran pubkey
|
||||
.kinds([1])
|
||||
.limit(10);
|
||||
|
||||
// Hookable type using change notification, limited to every 500ms
|
||||
const release = q.feed.hook(() => {
|
||||
// since we use the FlatNoteStore we expect NostrEvent[]
|
||||
// other stores provide different data, like a single event instead of an array (latest version)
|
||||
const state = q.feed.snapshot as StoreSnapshot<ReturnType<FlatNoteStore["getSnapshotData"]>>;
|
||||
const q = System.Query(NoteCollection, rb);
|
||||
// basic usage using "onEvent", fired every 100ms
|
||||
q.feed.onEvent(evs => {
|
||||
console.log(evs);
|
||||
// something else..
|
||||
});
|
||||
|
||||
// do something with snapshot of store
|
||||
console.log(`We have ${state.data.length} events now!`)
|
||||
});
|
||||
// Hookable type using change notification, limited to every 500ms
|
||||
const release = q.feed.hook(() => {
|
||||
// since we use the NoteCollection we expect NostrEvent[]
|
||||
// other stores provide different data, like a single event instead of an array (latest version)
|
||||
const state = q.feed.snapshot as StoreSnapshot<ReturnType<NoteCollection["getSnapshotData"]>>;
|
||||
|
||||
// release the hook when its not needed anymore
|
||||
// these patterns will be managed in @snort/system-react to make it easier to use react or other UI frameworks
|
||||
// release();
|
||||
// do something with snapshot of store
|
||||
console.log(`We have ${state.data?.length} events now!`);
|
||||
});
|
||||
|
||||
// release the hook when its not needed anymore
|
||||
// these patterns will be managed in @snort/system-react to make it easier to use react or other UI frameworks
|
||||
release();
|
||||
})();
|
||||
```
|
||||
|
@ -1,24 +1,12 @@
|
||||
import { NostrSystem, EventPublisher, UserRelaysCache, RequestBuilder, FlatNoteStore, StoreSnapshot } from "../src";
|
||||
|
||||
// Provided in-memory / indexedDb cache for relays
|
||||
// You can also implement your own with "RelayCache" interface
|
||||
const RelaysCache = new UserRelaysCache();
|
||||
|
||||
// example auth handler using NIP-07
|
||||
const AuthHandler = async (challenge: string, relay: string) => {
|
||||
const pub = await EventPublisher.nip7();
|
||||
if (pub) {
|
||||
return await pub.nip42Auth(challenge, relay);
|
||||
}
|
||||
};
|
||||
import { NostrSystem, RequestBuilder, FlatNoteStore, StoreSnapshot, NoteCollection } from "../src";
|
||||
|
||||
// Singleton instance to store all connections and access query fetching system
|
||||
const System = new NostrSystem({
|
||||
relayCache: RelaysCache,
|
||||
authHandler: AuthHandler, // can be left undefined if you dont care about NIP-42 Auth
|
||||
});
|
||||
const System = new NostrSystem({});
|
||||
|
||||
(async () => {
|
||||
// Setup cache system
|
||||
await System.Init();
|
||||
|
||||
// connec to one "bootstrap" relay to pull profiles/relay lists from
|
||||
// also used as a fallback relay when gossip model doesnt know which relays to pick, or "authors" are not provided in the request
|
||||
await System.ConnectToRelay("wss://relay.snort.social", { read: true, write: false });
|
||||
@ -30,23 +18,24 @@ const System = new NostrSystem({
|
||||
.kinds([1])
|
||||
.limit(10);
|
||||
|
||||
const q = System.Query<FlatNoteStore>(FlatNoteStore, rb);
|
||||
// basic usage using "onEvent", fired for every event added to the store
|
||||
q.onEvent = (sub, e) => {
|
||||
console.debug(sub, e);
|
||||
};
|
||||
const q = System.Query(NoteCollection, rb);
|
||||
// basic usage using "onEvent", fired every 100ms
|
||||
q.feed.onEvent(evs => {
|
||||
console.log(evs);
|
||||
// something else..
|
||||
});
|
||||
|
||||
// Hookable type using change notification, limited to every 500ms
|
||||
const release = q.feed.hook(() => {
|
||||
// since we use the FlatNoteStore we expect NostrEvent[]
|
||||
// other stores provide different data, like a single event instead of an array (latest version)
|
||||
const state = q.feed.snapshot as StoreSnapshot<ReturnType<FlatNoteStore["getSnapshotData"]>>;
|
||||
const state = q.feed.snapshot as StoreSnapshot<ReturnType<NoteCollection["getSnapshotData"]>>;
|
||||
|
||||
// do something with snapshot of store
|
||||
console.log(`We have ${state.data.length} events now!`);
|
||||
console.log(`We have ${state.data?.length} events now!`);
|
||||
});
|
||||
|
||||
// release the hook when its not needed anymore
|
||||
// these patterns will be managed in @snort/system-react to make it easier to use react or other UI frameworks
|
||||
// release();
|
||||
release();
|
||||
})();
|
||||
|
@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@snort/system",
|
||||
"version": "1.0.17",
|
||||
"version": "1.0.21",
|
||||
"description": "Snort nostr system package",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"repository": "https://git.v0l.io/Kieran/snort",
|
||||
"author": "v0l",
|
||||
"license": "GPL-3.0-or-later",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"build": "rm -rf dist && tsc",
|
||||
"test": "jest --runInBand"
|
||||
@ -20,20 +20,25 @@
|
||||
"@peculiar/webcrypto": "^1.4.3",
|
||||
"@types/debug": "^4.1.8",
|
||||
"@types/jest": "^29.5.1",
|
||||
"@types/node": "^20.5.9",
|
||||
"@types/uuid": "^9.0.2",
|
||||
"@types/ws": "^8.5.5",
|
||||
"jest": "^29.5.0",
|
||||
"jest-environment-jsdom": "^29.5.0",
|
||||
"ts-jest": "^29.1.0",
|
||||
"ts-node": "^10.9.1",
|
||||
"typescript": "^5.2.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"@noble/curves": "^1.2.0",
|
||||
"@noble/hashes": "^1.3.2",
|
||||
"@scure/base": "^1.1.2",
|
||||
"@snort/shared": "^1.0.4",
|
||||
"@snort/shared": "^1.0.6",
|
||||
"@stablelib/xchacha20": "^1.0.1",
|
||||
"debug": "^4.3.4",
|
||||
"dexie": "^3.2.4",
|
||||
"uuid": "^9.0.0"
|
||||
"isomorphic-ws": "^5.0.0",
|
||||
"uuid": "^9.0.0",
|
||||
"ws": "^8.14.0"
|
||||
}
|
||||
}
|
||||
|
23
packages/system/src/cache/events.ts
vendored
Normal file
23
packages/system/src/cache/events.ts
vendored
Normal file
@ -0,0 +1,23 @@
|
||||
import { NostrEvent } from "nostr";
|
||||
import { db } from ".";
|
||||
import { FeedCache } from "@snort/shared";
|
||||
|
||||
export class EventsCache extends FeedCache<NostrEvent> {
|
||||
constructor() {
|
||||
super("EventsCache", db.events);
|
||||
}
|
||||
|
||||
key(of: NostrEvent): string {
|
||||
return of.id;
|
||||
}
|
||||
|
||||
override async preload(): Promise<void> {
|
||||
await super.preload();
|
||||
// load everything
|
||||
await this.buffer([...this.onTable]);
|
||||
}
|
||||
|
||||
takeSnapshot(): Array<NostrEvent> {
|
||||
return [...this.cache.values()];
|
||||
}
|
||||
}
|
@ -1,5 +1,6 @@
|
||||
import { v4 as uuid } from "uuid";
|
||||
import debug from "debug";
|
||||
import WebSocket from "isomorphic-ws";
|
||||
import { unwrap, ExternalStore, unixNowMs } from "@snort/shared";
|
||||
|
||||
import { DefaultConnectTimeout } from "./const";
|
||||
@ -138,7 +139,7 @@ export class Connection extends ExternalStore<ConnectionStateSnapshot> {
|
||||
this.#sendPendingRaw();
|
||||
}
|
||||
|
||||
OnClose(e: CloseEvent) {
|
||||
OnClose(e: WebSocket.CloseEvent) {
|
||||
if (this.ReconnectTimer) {
|
||||
clearTimeout(this.ReconnectTimer);
|
||||
this.ReconnectTimer = undefined;
|
||||
@ -171,10 +172,10 @@ export class Connection extends ExternalStore<ConnectionStateSnapshot> {
|
||||
this.notifyChange();
|
||||
}
|
||||
|
||||
OnMessage(e: MessageEvent) {
|
||||
OnMessage(e: WebSocket.MessageEvent) {
|
||||
this.#activity = unixNowMs();
|
||||
if (e.data.length > 0) {
|
||||
const msg = JSON.parse(e.data);
|
||||
if ((e.data as string).length > 0) {
|
||||
const msg = JSON.parse(e.data as string);
|
||||
const tag = msg[0];
|
||||
switch (tag) {
|
||||
case "AUTH": {
|
||||
@ -221,7 +222,7 @@ export class Connection extends ExternalStore<ConnectionStateSnapshot> {
|
||||
}
|
||||
}
|
||||
|
||||
OnError(e: Event) {
|
||||
OnError(e: WebSocket.Event) {
|
||||
this.#log("Error: %O", e);
|
||||
this.notifyChange();
|
||||
}
|
||||
@ -383,12 +384,12 @@ export class Connection extends ExternalStore<ConnectionStateSnapshot> {
|
||||
}
|
||||
this.AwaitingAuth.set(challenge, true);
|
||||
const authEvent = await this.Auth(challenge, this.Address);
|
||||
return new Promise(resolve => {
|
||||
if (!authEvent) {
|
||||
authCleanup();
|
||||
return Promise.reject("no event");
|
||||
}
|
||||
if (!authEvent) {
|
||||
authCleanup();
|
||||
throw new Error("No auth event");
|
||||
}
|
||||
|
||||
return await new Promise(resolve => {
|
||||
const t = setTimeout(() => {
|
||||
authCleanup();
|
||||
resolve();
|
||||
|
69
packages/system/src/encrypted.ts
Normal file
69
packages/system/src/encrypted.ts
Normal file
@ -0,0 +1,69 @@
|
||||
import { scryptAsync } from "@noble/hashes/scrypt";
|
||||
import { sha256 } from "@noble/hashes/sha256";
|
||||
import { hmac } from "@noble/hashes/hmac";
|
||||
import { bytesToHex, hexToBytes, randomBytes } from "@noble/hashes/utils";
|
||||
import { base64 } from "@scure/base";
|
||||
import { streamXOR as xchacha20 } from "@stablelib/xchacha20";
|
||||
|
||||
export class InvalidPinError extends Error {
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Pin protected data
|
||||
*/
|
||||
export class PinEncrypted {
|
||||
static readonly #opts = { N: 2 ** 20, r: 8, p: 1, dkLen: 32 };
|
||||
#decrypted?: Uint8Array;
|
||||
#encrypted: PinEncryptedPayload;
|
||||
|
||||
constructor(enc: PinEncryptedPayload) {
|
||||
this.#encrypted = enc;
|
||||
}
|
||||
|
||||
get value() {
|
||||
if (!this.#decrypted) throw new Error("Content has not been decrypted yet");
|
||||
return bytesToHex(this.#decrypted);
|
||||
}
|
||||
|
||||
async decrypt(pin: string) {
|
||||
const key = await scryptAsync(pin, base64.decode(this.#encrypted.salt), PinEncrypted.#opts);
|
||||
const ciphertext = base64.decode(this.#encrypted.ciphertext);
|
||||
const nonce = base64.decode(this.#encrypted.iv);
|
||||
const plaintext = xchacha20(key, nonce, ciphertext, new Uint8Array(32));
|
||||
if (plaintext.length !== 32) throw new InvalidPinError();
|
||||
const mac = base64.encode(hmac(sha256, key, plaintext));
|
||||
if (mac !== this.#encrypted.mac) throw new InvalidPinError();
|
||||
this.#decrypted = plaintext;
|
||||
}
|
||||
|
||||
toPayload() {
|
||||
return this.#encrypted;
|
||||
}
|
||||
|
||||
static async create(content: string, pin: string) {
|
||||
const salt = randomBytes(24);
|
||||
const nonce = randomBytes(24);
|
||||
const plaintext = hexToBytes(content);
|
||||
const key = await scryptAsync(pin, salt, PinEncrypted.#opts);
|
||||
const mac = base64.encode(hmac(sha256, key, plaintext));
|
||||
const ciphertext = xchacha20(key, nonce, plaintext, new Uint8Array(32));
|
||||
const ret = new PinEncrypted({
|
||||
salt: base64.encode(salt),
|
||||
ciphertext: base64.encode(ciphertext),
|
||||
iv: base64.encode(nonce),
|
||||
mac,
|
||||
});
|
||||
ret.#decrypted = plaintext;
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
|
||||
export interface PinEncryptedPayload {
|
||||
salt: string; // for KDF
|
||||
ciphertext: string;
|
||||
iv: string;
|
||||
mac: string;
|
||||
}
|
@ -1,6 +1,6 @@
|
||||
import { EventKind, HexKey, NostrPrefix, NostrEvent, EventSigner, PowMiner } from ".";
|
||||
import { HashtagRegex, MentionNostrEntityRegex } from "./const";
|
||||
import { getPublicKey, unixNow } from "@snort/shared";
|
||||
import { getPublicKey, jitter, unixNow } from "@snort/shared";
|
||||
import { EventExt } from "./event-ext";
|
||||
import { tryParseNostrLink } from "./nostr-link";
|
||||
|
||||
@ -12,6 +12,12 @@ export class EventBuilder {
|
||||
#tags: Array<Array<string>> = [];
|
||||
#pow?: number;
|
||||
#powMiner?: PowMiner;
|
||||
#jitter?: number;
|
||||
|
||||
jitter(n: number) {
|
||||
this.#jitter = n;
|
||||
return this;
|
||||
}
|
||||
|
||||
kind(k: EventKind) {
|
||||
this.#kind = k;
|
||||
@ -73,8 +79,8 @@ export class EventBuilder {
|
||||
pubkey: this.#pubkey ?? "",
|
||||
content: this.#content ?? "",
|
||||
kind: this.#kind,
|
||||
created_at: this.#createdAt ?? unixNow(),
|
||||
tags: this.#tags,
|
||||
created_at: (this.#createdAt ?? unixNow()) + (this.#jitter ? jitter(this.#jitter) : 0),
|
||||
tags: this.#tags.sort((a, b) => a[0].localeCompare(b[0])),
|
||||
} as NostrEvent;
|
||||
ev.id = EventExt.createId(ev);
|
||||
return ev;
|
||||
|
@ -94,6 +94,7 @@ export abstract class EventExt {
|
||||
value: tag[1],
|
||||
} as Tag;
|
||||
switch (ret.key) {
|
||||
case "a":
|
||||
case "e": {
|
||||
ret.relay = tag.length > 2 ? tag[2] : undefined;
|
||||
ret.marker = tag.length > 3 ? tag[3] : undefined;
|
||||
@ -102,40 +103,53 @@ export abstract class EventExt {
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
static extractThread(ev: NostrEvent) {
|
||||
const isThread = ev.tags.some(a => (a[0] === "e" && a[3] !== "mention") || a[0] == "a");
|
||||
if (!isThread) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
static extractThread(ev: NostrEvent) {
|
||||
const shouldWriteMarkers = ev.kind === EventKind.TextNote;
|
||||
const ret = {
|
||||
mentions: [],
|
||||
pubKeys: [],
|
||||
} as Thread;
|
||||
const eTags = ev.tags.filter(a => a[0] === "e" || a[0] === "a").map(a => EventExt.parseTag(a));
|
||||
const marked = eTags.some(a => a.marker);
|
||||
if (!marked) {
|
||||
ret.root = eTags[0];
|
||||
ret.root.marker = shouldWriteMarkers ? "root" : undefined;
|
||||
if (eTags.length > 1) {
|
||||
ret.replyTo = eTags[eTags.length - 1];
|
||||
ret.replyTo.marker = shouldWriteMarkers ? "reply" : undefined;
|
||||
}
|
||||
if (eTags.length > 2) {
|
||||
ret.mentions = eTags.slice(1, -1);
|
||||
if (shouldWriteMarkers) {
|
||||
ret.mentions.forEach(a => (a.marker = "mention"));
|
||||
const replyTags = ev.tags.filter(a => a[0] === "e" || a[0] === "a").map(a => EventExt.parseTag(a));
|
||||
if (replyTags.length > 0) {
|
||||
const marked = replyTags.some(a => a.marker);
|
||||
if (!marked) {
|
||||
ret.root = replyTags[0];
|
||||
ret.root.marker = shouldWriteMarkers ? "root" : undefined;
|
||||
if (replyTags.length > 1) {
|
||||
ret.replyTo = replyTags[replyTags.length - 1];
|
||||
ret.replyTo.marker = shouldWriteMarkers ? "reply" : undefined;
|
||||
}
|
||||
if (replyTags.length > 2) {
|
||||
ret.mentions = replyTags.slice(1, -1);
|
||||
if (shouldWriteMarkers) {
|
||||
ret.mentions.forEach(a => (a.marker = "mention"));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const root = replyTags.find(a => a.marker === "root");
|
||||
const reply = replyTags.find(a => a.marker === "reply");
|
||||
ret.root = root;
|
||||
ret.replyTo = reply;
|
||||
ret.mentions = replyTags.filter(a => a.marker === "mention");
|
||||
}
|
||||
} else {
|
||||
const root = eTags.find(a => a.marker === "root");
|
||||
const reply = eTags.find(a => a.marker === "reply");
|
||||
ret.root = root;
|
||||
ret.replyTo = reply;
|
||||
ret.mentions = eTags.filter(a => a.marker === "mention");
|
||||
return undefined;
|
||||
}
|
||||
ret.pubKeys = Array.from(new Set(ev.tags.filter(a => a[0] === "p").map(a => a[1])));
|
||||
return ret;
|
||||
}
|
||||
|
||||
/**
|
||||
* Assign props if undefined
|
||||
*/
|
||||
static fixupEvent(e: NostrEvent) {
|
||||
e.tags ??= [];
|
||||
e.created_at ??= 0;
|
||||
e.content ??= "";
|
||||
e.id ??= "";
|
||||
e.kind ??= 0;
|
||||
e.pubkey ??= "";
|
||||
e.sig ??= "";
|
||||
}
|
||||
}
|
||||
|
@ -12,6 +12,11 @@ enum EventKind {
|
||||
SimpleChatMessage = 9, // NIP-29
|
||||
SealedRumor = 13, // NIP-59
|
||||
ChatRumor = 14, // NIP-24
|
||||
PublicChatChannel = 40, // NIP-28
|
||||
PublicChatMetadata = 41, // NIP-28
|
||||
PublicChatMessage = 42, // NIP-28
|
||||
PublicChatMuteMessage = 43, // NIP-28
|
||||
PublicChatMuteUser = 44, // NIP-28
|
||||
SnortSubscriptions = 1000, // NIP-XX
|
||||
Polls = 6969, // NIP-69
|
||||
GiftWrap = 1059, // NIP-59
|
||||
@ -24,7 +29,10 @@ enum EventKind {
|
||||
TagLists = 30002, // NIP-51c
|
||||
Badge = 30009, // NIP-58
|
||||
ProfileBadges = 30008, // NIP-58
|
||||
LongFormTextNote = 30023, // NIP-23
|
||||
AppData = 30_078, // NIP-78
|
||||
LiveEvent = 30311, // NIP-102
|
||||
UserStatus = 30315, // NIP-38
|
||||
ZapstrTrack = 31337,
|
||||
SimpleChatMetadata = 39_000, // NIP-29
|
||||
ZapRequest = 9734, // NIP 57
|
||||
|
@ -3,16 +3,20 @@ import * as utils from "@noble/curves/abstract/utils";
|
||||
import { unwrap, getPublicKey, unixNow } from "@snort/shared";
|
||||
|
||||
import {
|
||||
decodeEncryptionPayload,
|
||||
EventKind,
|
||||
EventSigner,
|
||||
FullRelaySettings,
|
||||
HexKey,
|
||||
Lists,
|
||||
MessageEncryptorVersion,
|
||||
NostrEvent,
|
||||
NostrLink,
|
||||
NotSignedNostrEvent,
|
||||
PowMiner,
|
||||
PrivateKeySigner,
|
||||
RelaySettings,
|
||||
SignerSupports,
|
||||
TaggedNostrEvent,
|
||||
u256,
|
||||
UserMetadata,
|
||||
@ -22,6 +26,7 @@ import { EventBuilder } from "./event-builder";
|
||||
import { EventExt } from "./event-ext";
|
||||
import { findTag } from "./utils";
|
||||
import { Nip7Signer } from "./impl/nip7";
|
||||
import { base64 } from "@scure/base";
|
||||
|
||||
type EventBuilderHook = (ev: EventBuilder) => EventBuilder;
|
||||
|
||||
@ -57,16 +62,22 @@ export class EventPublisher {
|
||||
return new EventPublisher(signer, signer.getPubKey());
|
||||
}
|
||||
|
||||
supports(t: SignerSupports) {
|
||||
return this.#signer.supports.includes(t);
|
||||
}
|
||||
|
||||
get pubKey() {
|
||||
return this.#pubKey;
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply POW to every event
|
||||
* Create a copy of this publisher with PoW
|
||||
*/
|
||||
pow(target: number, miner?: PowMiner) {
|
||||
this.#pow = target;
|
||||
this.#miner = miner;
|
||||
const ret = new EventPublisher(this.#signer, this.#pubKey);
|
||||
ret.#pow = target;
|
||||
ret.#miner = miner;
|
||||
return ret;
|
||||
}
|
||||
|
||||
#eb(k: EventKind) {
|
||||
@ -180,10 +191,11 @@ export class EventPublisher {
|
||||
|
||||
const thread = EventExt.extractThread(replyTo);
|
||||
if (thread) {
|
||||
if (thread.root || thread.replyTo) {
|
||||
eb.tag(["e", thread.root?.value ?? thread.replyTo?.value ?? "", "", "root"]);
|
||||
const rootOrReplyAsRoot = thread.root || thread.replyTo;
|
||||
if (rootOrReplyAsRoot) {
|
||||
eb.tag([rootOrReplyAsRoot.key, rootOrReplyAsRoot.value ?? "", rootOrReplyAsRoot.relay ?? "", "root"]);
|
||||
}
|
||||
eb.tag(["e", replyTo.id, replyTo.relays?.[0] ?? "", "reply"]);
|
||||
eb.tag([...(NostrLink.fromEvent(replyTo).toEventTag() ?? []), "reply"]);
|
||||
|
||||
eb.tag(["p", replyTo.pubkey]);
|
||||
for (const pk of thread.pubKeys) {
|
||||
@ -193,7 +205,7 @@ export class EventPublisher {
|
||||
eb.tag(["p", pk]);
|
||||
}
|
||||
} else {
|
||||
eb.tag(["e", replyTo.id, "", "reply"]);
|
||||
eb.tag([...(NostrLink.fromEvent(replyTo).toEventTag() ?? []), "reply"]);
|
||||
// dont tag self in replies
|
||||
if (replyTo.pubkey !== this.#pubKey) {
|
||||
eb.tag(["p", replyTo.pubkey]);
|
||||
@ -262,6 +274,23 @@ export class EventPublisher {
|
||||
return await this.#sign(eb);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generic decryption using NIP-23 payload scheme
|
||||
*/
|
||||
async decryptGeneric(content: string, from: string) {
|
||||
const pl = decodeEncryptionPayload(content);
|
||||
switch (pl.v) {
|
||||
case MessageEncryptorVersion.Nip4: {
|
||||
const nip4Payload = `${base64.encode(pl.ciphertext)}?iv=${base64.encode(pl.nonce)}`;
|
||||
return await this.#signer.nip4Decrypt(nip4Payload, from);
|
||||
}
|
||||
case MessageEncryptorVersion.XChaCha20: {
|
||||
return await this.#signer.nip44Decrypt(content, from);
|
||||
}
|
||||
}
|
||||
throw new Error("Not supported version");
|
||||
}
|
||||
|
||||
async decryptDm(note: NostrEvent) {
|
||||
if (note.kind === EventKind.SealedRumor) {
|
||||
const unseal = await this.unsealRumor(note);
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { ReqFilter, UsersRelays } from ".";
|
||||
import { dedupe, unwrap } from "@snort/shared";
|
||||
import debug from "debug";
|
||||
import { FlatReqFilter } from "request-expander";
|
||||
import { FlatReqFilter } from "./query-optimizer";
|
||||
|
||||
const PickNRelays = 2;
|
||||
|
||||
@ -84,7 +84,7 @@ export function splitByWriteRelays(cache: RelayCache, filter: ReqFilter): Array<
|
||||
},
|
||||
});
|
||||
}
|
||||
debug("GOSSIP")("Picked %o", picked);
|
||||
debug("GOSSIP")("Picked %O => %O", filter, picked);
|
||||
return picked;
|
||||
}
|
||||
|
||||
@ -119,7 +119,7 @@ export function splitFlatByWriteRelays(cache: RelayCache, input: Array<FlatReqFi
|
||||
} as RelayTaggedFlatFilters);
|
||||
}
|
||||
|
||||
debug("GOSSIP")("Picked %o", picked);
|
||||
debug("GOSSIP")("Picked %d relays from %d filters", picked.length, input.length);
|
||||
return picked;
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,5 @@
|
||||
import { MessageEncryptor, MessageEncryptorPayload, MessageEncryptorVersion } from "index";
|
||||
|
||||
import { base64 } from "@scure/base";
|
||||
import { randomBytes } from "@noble/hashes/utils";
|
||||
import { streamXOR as xchacha20 } from "@stablelib/xchacha20";
|
||||
import { secp256k1 } from "@noble/curves/secp256k1";
|
||||
|
@ -63,6 +63,10 @@ export class Nip46Signer implements EventSigner {
|
||||
this.#insideSigner = insideSigner ?? new PrivateKeySigner(secp256k1.utils.randomPrivateKey());
|
||||
}
|
||||
|
||||
get supports(): string[] {
|
||||
return ["nip04"];
|
||||
}
|
||||
|
||||
get relays() {
|
||||
return [this.#relay];
|
||||
}
|
||||
|
@ -21,6 +21,10 @@ declare global {
|
||||
}
|
||||
|
||||
export class Nip7Signer implements EventSigner {
|
||||
get supports(): string[] {
|
||||
return ["nip04"];
|
||||
}
|
||||
|
||||
init(): Promise<void> {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
@ -1,9 +1,12 @@
|
||||
import { AuthHandler, RelaySettings, ConnectionStateSnapshot } from "./connection";
|
||||
import { RequestBuilder } from "./request-builder";
|
||||
import { NoteStore } from "./note-collection";
|
||||
import { NoteStore, NoteStoreSnapshotData } from "./note-collection";
|
||||
import { Query } from "./query";
|
||||
import { NostrEvent, ReqFilter } from "./nostr";
|
||||
import { NostrEvent, ReqFilter, TaggedNostrEvent } from "./nostr";
|
||||
import { ProfileLoaderService } from "./profile-cache";
|
||||
import { RelayCache } from "./gossip-model";
|
||||
import { QueryOptimizer } from "./query-optimizer";
|
||||
import { base64 } from "@scure/base";
|
||||
|
||||
export * from "./nostr-system";
|
||||
export { default as EventKind } from "./event-kind";
|
||||
@ -24,6 +27,8 @@ export * from "./signer";
|
||||
export * from "./text";
|
||||
export * from "./pow";
|
||||
export * from "./pow-util";
|
||||
export * from "./query-optimizer";
|
||||
export * from "./encrypted";
|
||||
|
||||
export * from "./impl/nip4";
|
||||
export * from "./impl/nip44";
|
||||
@ -40,14 +45,72 @@ export interface SystemInterface {
|
||||
* Handler function for NIP-42
|
||||
*/
|
||||
HandleAuth?: AuthHandler;
|
||||
|
||||
/**
|
||||
* Get a snapshot of the relay connections
|
||||
*/
|
||||
get Sockets(): Array<ConnectionStateSnapshot>;
|
||||
|
||||
/**
|
||||
* Get an active query by ID
|
||||
* @param id Query ID
|
||||
*/
|
||||
GetQuery(id: string): Query | undefined;
|
||||
Query<T extends NoteStore>(type: { new (): T }, req: RequestBuilder | null): Query;
|
||||
|
||||
/**
|
||||
* Open a new query to relays
|
||||
* @param type Store type
|
||||
* @param req Request to send to relays
|
||||
*/
|
||||
Query<T extends NoteStore>(type: { new (): T }, req: RequestBuilder): Query;
|
||||
|
||||
/**
|
||||
* Fetch data from nostr relays asynchronously
|
||||
* @param req Request to send to relays
|
||||
* @param cb A callback which will fire every 100ms when new data is received
|
||||
*/
|
||||
Fetch(req: RequestBuilder, cb?: (evs: Array<TaggedNostrEvent>) => void): Promise<NoteStoreSnapshotData>;
|
||||
|
||||
/**
|
||||
* Create a new permanent connection to a relay
|
||||
* @param address Relay URL
|
||||
* @param options Read/Write settings
|
||||
*/
|
||||
ConnectToRelay(address: string, options: RelaySettings): Promise<void>;
|
||||
|
||||
/**
|
||||
* Disconnect permanent relay connection
|
||||
* @param address Relay URL
|
||||
*/
|
||||
DisconnectRelay(address: string): void;
|
||||
|
||||
/**
|
||||
* Send an event to all permanent connections
|
||||
* @param ev Event to broadcast
|
||||
*/
|
||||
BroadcastEvent(ev: NostrEvent): void;
|
||||
|
||||
/**
|
||||
* Connect to a specific relay and send an event and wait for the response
|
||||
* @param relay Relay URL
|
||||
* @param ev Event to send
|
||||
*/
|
||||
WriteOnceToRelay(relay: string, ev: NostrEvent): Promise<void>;
|
||||
|
||||
/**
|
||||
* Profile cache/loader
|
||||
*/
|
||||
get ProfileLoader(): ProfileLoaderService;
|
||||
|
||||
/**
|
||||
* Relay cache for "Gossip" model
|
||||
*/
|
||||
get RelayCache(): RelayCache;
|
||||
|
||||
/**
|
||||
* Query optimizer
|
||||
*/
|
||||
get QueryOptimizer(): QueryOptimizer;
|
||||
}
|
||||
|
||||
export interface SystemSnapshot {
|
||||
@ -74,3 +137,25 @@ export interface MessageEncryptor {
|
||||
encryptData(plaintext: string, sharedSecet: Uint8Array): Promise<MessageEncryptorPayload> | MessageEncryptorPayload;
|
||||
decryptData(payload: MessageEncryptorPayload, sharedSecet: Uint8Array): Promise<string> | string;
|
||||
}
|
||||
|
||||
export function decodeEncryptionPayload(p: string) {
|
||||
if (p.startsWith("{") && p.endsWith("}")) {
|
||||
const pj = JSON.parse(p) as { v: number; nonce: string; ciphertext: string };
|
||||
return {
|
||||
v: pj.v,
|
||||
nonce: base64.decode(pj.nonce),
|
||||
ciphertext: base64.decode(pj.ciphertext),
|
||||
} as MessageEncryptorPayload;
|
||||
} else {
|
||||
const buf = base64.decode(p);
|
||||
return {
|
||||
v: buf[0],
|
||||
nonce: buf.subarray(1, 25),
|
||||
ciphertext: buf.subarray(25),
|
||||
} as MessageEncryptorPayload;
|
||||
}
|
||||
}
|
||||
|
||||
export function encodeEncryptionPayload(p: MessageEncryptorPayload) {
|
||||
return base64.encode(new Uint8Array([p.v, ...p.nonce, ...p.ciphertext]));
|
||||
}
|
||||
|
@ -2,7 +2,7 @@ import * as utils from "@noble/curves/abstract/utils";
|
||||
import { bech32 } from "@scure/base";
|
||||
import { HexKey } from "./nostr";
|
||||
|
||||
export enum NostrPrefix {
|
||||
export const enum NostrPrefix {
|
||||
PublicKey = "npub",
|
||||
PrivateKey = "nsec",
|
||||
Note = "note",
|
||||
|
@ -1,32 +1,74 @@
|
||||
import { bech32ToHex, hexToBech32 } from "@snort/shared";
|
||||
import { NostrPrefix, decodeTLV, TLVEntryType, encodeTLV } from ".";
|
||||
import { bech32ToHex, hexToBech32, unwrap } from "@snort/shared";
|
||||
import { NostrPrefix, decodeTLV, TLVEntryType, encodeTLV, NostrEvent, TaggedNostrEvent } from ".";
|
||||
import { findTag } from "./utils";
|
||||
|
||||
export interface NostrLink {
|
||||
type: NostrPrefix;
|
||||
id: string;
|
||||
kind?: number;
|
||||
author?: string;
|
||||
relays?: Array<string>;
|
||||
encode(): string;
|
||||
}
|
||||
export class NostrLink {
|
||||
constructor(
|
||||
readonly type: NostrPrefix,
|
||||
readonly id: string,
|
||||
readonly kind?: number,
|
||||
readonly author?: string,
|
||||
readonly relays?: Array<string>,
|
||||
) {}
|
||||
|
||||
export function createNostrLink(prefix: NostrPrefix, id: string, relays?: string[], kind?: number, author?: string) {
|
||||
return {
|
||||
type: prefix,
|
||||
id,
|
||||
relays,
|
||||
kind,
|
||||
author,
|
||||
encode: () => {
|
||||
if (prefix === NostrPrefix.Note || prefix === NostrPrefix.PublicKey) {
|
||||
return hexToBech32(prefix, id);
|
||||
encode(): string {
|
||||
if (this.type === NostrPrefix.Note || this.type === NostrPrefix.PrivateKey || this.type === NostrPrefix.PublicKey) {
|
||||
return hexToBech32(this.type, this.id);
|
||||
} else {
|
||||
return encodeTLV(this.type, this.id, this.relays, this.kind, this.author);
|
||||
}
|
||||
}
|
||||
|
||||
toEventTag() {
|
||||
const relayEntry = this.relays ? [this.relays[0]] : [];
|
||||
if (this.type === NostrPrefix.PublicKey) {
|
||||
return ["p", this.id];
|
||||
} else if (this.type === NostrPrefix.Note || this.type === NostrPrefix.Event) {
|
||||
return ["e", this.id, ...relayEntry];
|
||||
} else if (this.type === NostrPrefix.Address) {
|
||||
return ["a", `${this.kind}:${this.author}:${this.id}`, ...relayEntry];
|
||||
}
|
||||
}
|
||||
|
||||
matchesEvent(ev: NostrEvent) {
|
||||
if (this.type === NostrPrefix.Address) {
|
||||
const dTag = findTag(ev, "d");
|
||||
if (dTag && dTag === this.id && unwrap(this.author) === ev.pubkey && unwrap(this.kind) === ev.kind) {
|
||||
return true;
|
||||
}
|
||||
if (prefix === NostrPrefix.Address || prefix === NostrPrefix.Event || prefix === NostrPrefix.Profile) {
|
||||
return encodeTLV(prefix, id, relays, kind, author);
|
||||
} else if (this.type === NostrPrefix.Event || this.type === NostrPrefix.Note) {
|
||||
return this.id === ev.id;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
static fromTag(tag: Array<string>) {
|
||||
const relays = tag.length > 2 ? [tag[2]] : undefined;
|
||||
switch (tag[0]) {
|
||||
case "e": {
|
||||
return new NostrLink(NostrPrefix.Event, tag[1], undefined, undefined, relays);
|
||||
}
|
||||
return "";
|
||||
},
|
||||
} as NostrLink;
|
||||
case "p": {
|
||||
return new NostrLink(NostrPrefix.Profile, tag[1], undefined, undefined, relays);
|
||||
}
|
||||
case "a": {
|
||||
const [kind, author, dTag] = tag[1].split(":");
|
||||
return new NostrLink(NostrPrefix.Address, dTag, Number(kind), author, relays);
|
||||
}
|
||||
}
|
||||
throw new Error(`Unknown tag kind ${tag[0]}`);
|
||||
}
|
||||
|
||||
static fromEvent(ev: TaggedNostrEvent | NostrEvent) {
|
||||
const relays = "relays" in ev ? ev.relays : undefined;
|
||||
|
||||
if (ev.kind >= 30_000 && ev.kind < 40_000) {
|
||||
const dTag = unwrap(findTag(ev, "d"));
|
||||
return new NostrLink(NostrPrefix.Address, dTag, ev.kind, ev.pubkey, relays);
|
||||
}
|
||||
return new NostrLink(NostrPrefix.Event, ev.id, ev.kind, ev.pubkey, relays);
|
||||
}
|
||||
}
|
||||
|
||||
export function validateNostrLink(link: string): boolean {
|
||||
@ -63,19 +105,11 @@ export function parseNostrLink(link: string, prefixHint?: NostrPrefix): NostrLin
|
||||
if (isPrefix(NostrPrefix.PublicKey)) {
|
||||
const id = bech32ToHex(entity);
|
||||
if (id.length !== 64) throw new Error("Invalid nostr link, must contain 32 byte id");
|
||||
return {
|
||||
type: NostrPrefix.PublicKey,
|
||||
id: id,
|
||||
encode: () => hexToBech32(NostrPrefix.PublicKey, id),
|
||||
};
|
||||
return new NostrLink(NostrPrefix.PublicKey, id);
|
||||
} else if (isPrefix(NostrPrefix.Note)) {
|
||||
const id = bech32ToHex(entity);
|
||||
if (id.length !== 64) throw new Error("Invalid nostr link, must contain 32 byte id");
|
||||
return {
|
||||
type: NostrPrefix.Note,
|
||||
id: id,
|
||||
encode: () => hexToBech32(NostrPrefix.Note, id),
|
||||
};
|
||||
return new NostrLink(NostrPrefix.Note, id);
|
||||
} else if (isPrefix(NostrPrefix.Profile) || isPrefix(NostrPrefix.Event) || isPrefix(NostrPrefix.Address)) {
|
||||
const decoded = decodeTLV(entity);
|
||||
|
||||
@ -84,45 +118,17 @@ export function parseNostrLink(link: string, prefixHint?: NostrPrefix): NostrLin
|
||||
const author = decoded.find(a => a.type === TLVEntryType.Author)?.value as string;
|
||||
const kind = decoded.find(a => a.type === TLVEntryType.Kind)?.value as number;
|
||||
|
||||
const encode = () => {
|
||||
return entity; // return original
|
||||
};
|
||||
if (isPrefix(NostrPrefix.Profile)) {
|
||||
if (id.length !== 64) throw new Error("Invalid nostr link, must contain 32 byte id");
|
||||
return {
|
||||
type: NostrPrefix.Profile,
|
||||
id,
|
||||
relays,
|
||||
kind,
|
||||
author,
|
||||
encode,
|
||||
};
|
||||
return new NostrLink(NostrPrefix.Profile, id, kind, author, relays);
|
||||
} else if (isPrefix(NostrPrefix.Event)) {
|
||||
if (id.length !== 64) throw new Error("Invalid nostr link, must contain 32 byte id");
|
||||
return {
|
||||
type: NostrPrefix.Event,
|
||||
id,
|
||||
relays,
|
||||
kind,
|
||||
author,
|
||||
encode,
|
||||
};
|
||||
return new NostrLink(NostrPrefix.Event, id, kind, author, relays);
|
||||
} else if (isPrefix(NostrPrefix.Address)) {
|
||||
return {
|
||||
type: NostrPrefix.Address,
|
||||
id,
|
||||
relays,
|
||||
kind,
|
||||
author,
|
||||
encode,
|
||||
};
|
||||
return new NostrLink(NostrPrefix.Address, id, kind, author, relays);
|
||||
}
|
||||
} else if (prefixHint) {
|
||||
return {
|
||||
type: prefixHint,
|
||||
id: link,
|
||||
encode: () => hexToBech32(prefixHint, link),
|
||||
};
|
||||
return new NostrLink(prefixHint, link);
|
||||
}
|
||||
throw new Error("Invalid nostr link");
|
||||
}
|
||||
|
@ -4,8 +4,8 @@ import { unwrap, sanitizeRelayUrl, ExternalStore, FeedCache } from "@snort/share
|
||||
import { NostrEvent, TaggedNostrEvent } from "./nostr";
|
||||
import { AuthHandler, Connection, RelaySettings, ConnectionStateSnapshot } from "./connection";
|
||||
import { Query } from "./query";
|
||||
import { NoteStore } from "./note-collection";
|
||||
import { BuiltRawReqFilter, RequestBuilder } from "./request-builder";
|
||||
import { NoteCollection, NoteStore, NoteStoreSnapshotData } from "./note-collection";
|
||||
import { BuiltRawReqFilter, RequestBuilder, RequestStrategy } from "./request-builder";
|
||||
import { RelayMetricHandler } from "./relay-metric-handler";
|
||||
import {
|
||||
MetadataCache,
|
||||
@ -19,6 +19,10 @@ import {
|
||||
db,
|
||||
UsersRelays,
|
||||
} from ".";
|
||||
import { EventsCache } from "./cache/events";
|
||||
import { RelayCache } from "./gossip-model";
|
||||
import { QueryOptimizer, DefaultQueryOptimizer } from "./query-optimizer";
|
||||
import { trimFilters } from "./request-trim";
|
||||
|
||||
/**
|
||||
* Manages nostr content retrieval system
|
||||
@ -66,26 +70,38 @@ export class NostrSystem extends ExternalStore<SystemSnapshot> implements System
|
||||
*/
|
||||
#relayMetrics: RelayMetricHandler;
|
||||
|
||||
/**
|
||||
* General events cache
|
||||
*/
|
||||
#eventsCache: FeedCache<NostrEvent>;
|
||||
|
||||
/**
|
||||
* Query optimizer instance
|
||||
*/
|
||||
#queryOptimizer: QueryOptimizer;
|
||||
|
||||
constructor(props: {
|
||||
authHandler?: AuthHandler;
|
||||
relayCache?: FeedCache<UsersRelays>;
|
||||
profileCache?: FeedCache<MetadataCache>;
|
||||
relayMetrics?: FeedCache<RelayMetrics>;
|
||||
eventsCache?: FeedCache<NostrEvent>;
|
||||
queryOptimizer?: QueryOptimizer;
|
||||
}) {
|
||||
super();
|
||||
this.#handleAuth = props.authHandler;
|
||||
this.#relayCache = props.relayCache ?? new UserRelaysCache();
|
||||
this.#profileCache = props.profileCache ?? new UserProfileCache();
|
||||
this.#relayMetricsCache = props.relayMetrics ?? new RelayMetricCache();
|
||||
this.#eventsCache = props.eventsCache ?? new EventsCache();
|
||||
this.#queryOptimizer = props.queryOptimizer ?? DefaultQueryOptimizer;
|
||||
|
||||
this.#profileLoader = new ProfileLoaderService(this, this.#profileCache);
|
||||
this.#relayMetrics = new RelayMetricHandler(this.#relayMetricsCache);
|
||||
this.#cleanup();
|
||||
}
|
||||
HandleAuth?: AuthHandler | undefined;
|
||||
|
||||
/**
|
||||
* Profile loader service allows you to request profiles
|
||||
*/
|
||||
get ProfileLoader() {
|
||||
return this.#profileLoader;
|
||||
}
|
||||
@ -94,12 +110,25 @@ export class NostrSystem extends ExternalStore<SystemSnapshot> implements System
|
||||
return [...this.#sockets.values()].map(a => a.snapshot());
|
||||
}
|
||||
|
||||
get RelayCache(): RelayCache {
|
||||
return this.#relayCache;
|
||||
}
|
||||
|
||||
get QueryOptimizer(): QueryOptimizer {
|
||||
return this.#queryOptimizer;
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup caches
|
||||
*/
|
||||
async Init() {
|
||||
db.ready = await db.isAvailable();
|
||||
const t = [this.#relayCache.preload(), this.#profileCache.preload(), this.#relayMetricsCache.preload()];
|
||||
const t = [
|
||||
this.#relayCache.preload(),
|
||||
this.#profileCache.preload(),
|
||||
this.#relayMetricsCache.preload(),
|
||||
this.#eventsCache.preload(),
|
||||
];
|
||||
await Promise.all(t);
|
||||
}
|
||||
|
||||
@ -112,10 +141,10 @@ export class NostrSystem extends ExternalStore<SystemSnapshot> implements System
|
||||
if (!this.#sockets.has(addr)) {
|
||||
const c = new Connection(addr, options, this.#handleAuth?.bind(this));
|
||||
this.#sockets.set(addr, c);
|
||||
c.OnEvent = (s, e) => this.OnEvent(s, e);
|
||||
c.OnEose = s => this.OnEndOfStoredEvents(c, s);
|
||||
c.OnDisconnect = code => this.OnRelayDisconnect(c, code);
|
||||
c.OnConnected = r => this.OnRelayConnected(c, r);
|
||||
c.OnEvent = (s, e) => this.#onEvent(s, e);
|
||||
c.OnEose = s => this.#onEndOfStoredEvents(c, s);
|
||||
c.OnDisconnect = code => this.#onRelayDisconnect(c, code);
|
||||
c.OnConnected = r => this.#onRelayConnected(c, r);
|
||||
await c.Connect();
|
||||
} else {
|
||||
// update settings if already connected
|
||||
@ -126,7 +155,7 @@ export class NostrSystem extends ExternalStore<SystemSnapshot> implements System
|
||||
}
|
||||
}
|
||||
|
||||
OnRelayConnected(c: Connection, wasReconnect: boolean) {
|
||||
#onRelayConnected(c: Connection, wasReconnect: boolean) {
|
||||
if (wasReconnect) {
|
||||
for (const [, q] of this.Queries) {
|
||||
q.connectionRestored(c);
|
||||
@ -134,22 +163,22 @@ export class NostrSystem extends ExternalStore<SystemSnapshot> implements System
|
||||
}
|
||||
}
|
||||
|
||||
OnRelayDisconnect(c: Connection, code: number) {
|
||||
#onRelayDisconnect(c: Connection, code: number) {
|
||||
this.#relayMetrics.onDisconnect(c, code);
|
||||
for (const [, q] of this.Queries) {
|
||||
q.connectionLost(c.Id);
|
||||
}
|
||||
}
|
||||
|
||||
OnEndOfStoredEvents(c: Readonly<Connection>, sub: string) {
|
||||
#onEndOfStoredEvents(c: Readonly<Connection>, sub: string) {
|
||||
for (const [, v] of this.Queries) {
|
||||
v.eose(sub, c);
|
||||
}
|
||||
}
|
||||
|
||||
OnEvent(sub: string, ev: TaggedNostrEvent) {
|
||||
#onEvent(sub: string, ev: TaggedNostrEvent) {
|
||||
for (const [, v] of this.Queries) {
|
||||
v.onEvent(sub, ev);
|
||||
v.handleEvent(sub, ev);
|
||||
}
|
||||
}
|
||||
|
||||
@ -163,10 +192,10 @@ export class NostrSystem extends ExternalStore<SystemSnapshot> implements System
|
||||
if (!this.#sockets.has(addr)) {
|
||||
const c = new Connection(addr, { read: true, write: true }, this.#handleAuth?.bind(this), true);
|
||||
this.#sockets.set(addr, c);
|
||||
c.OnEvent = (s, e) => this.OnEvent(s, e);
|
||||
c.OnEose = s => this.OnEndOfStoredEvents(c, s);
|
||||
c.OnDisconnect = code => this.OnRelayDisconnect(c, code);
|
||||
c.OnConnected = r => this.OnRelayConnected(c, r);
|
||||
c.OnEvent = (s, e) => this.#onEvent(s, e);
|
||||
c.OnEose = s => this.#onEndOfStoredEvents(c, s);
|
||||
c.OnDisconnect = code => this.#onRelayDisconnect(c, code);
|
||||
c.OnConnected = r => this.#onRelayConnected(c, r);
|
||||
await c.Connect();
|
||||
return c;
|
||||
}
|
||||
@ -190,6 +219,35 @@ export class NostrSystem extends ExternalStore<SystemSnapshot> implements System
|
||||
return this.Queries.get(id);
|
||||
}
|
||||
|
||||
Fetch(req: RequestBuilder, cb?: (evs: Array<TaggedNostrEvent>) => void) {
|
||||
const q = this.Query(NoteCollection, req);
|
||||
return new Promise<NoteStoreSnapshotData>(resolve => {
|
||||
let t: ReturnType<typeof setTimeout> | undefined;
|
||||
let tBuf: Array<TaggedNostrEvent> = [];
|
||||
const releaseOnEvent = cb
|
||||
? q.feed.onEvent(evs => {
|
||||
if (!t) {
|
||||
tBuf = [...evs];
|
||||
t = setTimeout(() => {
|
||||
t = undefined;
|
||||
cb(tBuf);
|
||||
}, 100);
|
||||
} else {
|
||||
tBuf.push(...evs);
|
||||
}
|
||||
})
|
||||
: undefined;
|
||||
const releaseFeedHook = q.feed.hook(() => {
|
||||
if (q.progress === 1) {
|
||||
releaseOnEvent?.();
|
||||
releaseFeedHook();
|
||||
q.cancel();
|
||||
resolve(unwrap(q.feed.snapshot.data));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
Query<T extends NoteStore>(type: { new (): T }, req: RequestBuilder): Query {
|
||||
const existing = this.Queries.get(req.id);
|
||||
if (existing) {
|
||||
@ -197,9 +255,7 @@ export class NostrSystem extends ExternalStore<SystemSnapshot> implements System
|
||||
if (existing.fromInstance === req.instance) {
|
||||
return existing;
|
||||
}
|
||||
const filters = !req.options?.skipDiff
|
||||
? req.buildDiff(this.#relayCache, existing.flatFilters)
|
||||
: req.build(this.#relayCache);
|
||||
const filters = !req.options?.skipDiff ? req.buildDiff(this, existing.filters) : req.build(this);
|
||||
if (filters.length === 0 && !!req.options?.skipDiff) {
|
||||
return existing;
|
||||
} else {
|
||||
@ -212,8 +268,17 @@ export class NostrSystem extends ExternalStore<SystemSnapshot> implements System
|
||||
} else {
|
||||
const store = new type();
|
||||
|
||||
const filters = req.build(this.#relayCache);
|
||||
const filters = req.build(this);
|
||||
const q = new Query(req.id, req.instance, store, req.options?.leaveOpen);
|
||||
if (filters.some(a => a.filters.some(b => b.ids))) {
|
||||
const expectIds = new Set(filters.flatMap(a => a.filters).flatMap(a => a.ids ?? []));
|
||||
q.feed.onEvent(async evs => {
|
||||
const toSet = evs.filter(a => expectIds.has(a.id) && this.#eventsCache.getFromCache(a.id) === undefined);
|
||||
if (toSet.length > 0) {
|
||||
await this.#eventsCache.bulkSet(toSet);
|
||||
}
|
||||
});
|
||||
}
|
||||
this.Queries.set(req.id, q);
|
||||
for (const subQ of filters) {
|
||||
this.SendQuery(q, subQ);
|
||||
@ -224,6 +289,32 @@ export class NostrSystem extends ExternalStore<SystemSnapshot> implements System
|
||||
}
|
||||
|
||||
async SendQuery(q: Query, qSend: BuiltRawReqFilter) {
|
||||
// trim query of cached ids
|
||||
for (const f of qSend.filters) {
|
||||
if (f.ids) {
|
||||
const cacheResults = await this.#eventsCache.bulkGet(f.ids);
|
||||
if (cacheResults.length > 0) {
|
||||
const resultIds = new Set(cacheResults.map(a => a.id));
|
||||
f.ids = f.ids.filter(a => !resultIds.has(a));
|
||||
q.insertCompletedTrace(
|
||||
{
|
||||
filters: [{ ...f, ids: [...resultIds] }],
|
||||
strategy: RequestStrategy.ExplicitRelays,
|
||||
relay: qSend.relay,
|
||||
},
|
||||
cacheResults as Array<TaggedNostrEvent>,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// check for empty filters
|
||||
const fNew = trimFilters(qSend.filters);
|
||||
if (fNew.length === 0) {
|
||||
return;
|
||||
}
|
||||
qSend.filters = fNew;
|
||||
|
||||
if (qSend.relay) {
|
||||
this.#log("Sending query to %s %O", qSend.relay, qSend);
|
||||
const s = this.#sockets.get(qSend.relay);
|
||||
|
@ -20,7 +20,7 @@ export const EmptySnapshot = {
|
||||
},
|
||||
} as StoreSnapshot<FlatNoteStore>;
|
||||
|
||||
export type NoteStoreSnapshotData = Readonly<Array<TaggedNostrEvent>> | Readonly<TaggedNostrEvent>;
|
||||
export type NoteStoreSnapshotData = Array<TaggedNostrEvent> | TaggedNostrEvent;
|
||||
export type NoteStoreHook = () => void;
|
||||
export type NoteStoreHookRelease = () => void;
|
||||
export type OnEventCallback = (e: Readonly<Array<TaggedNostrEvent>>) => void;
|
||||
@ -134,10 +134,28 @@ export abstract class HookedNoteStore<TSnapshot extends NoteStoreSnapshotData> i
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A store which doesnt store anything, useful for hooks only
|
||||
*/
|
||||
export class NoopStore extends HookedNoteStore<Array<TaggedNostrEvent>> {
|
||||
override add(ev: readonly TaggedNostrEvent[] | Readonly<TaggedNostrEvent>): void {
|
||||
this.onChange(Array.isArray(ev) ? ev : [ev]);
|
||||
}
|
||||
|
||||
override clear(): void {
|
||||
// nothing to do
|
||||
}
|
||||
|
||||
protected override takeSnapshot(): TaggedNostrEvent[] | undefined {
|
||||
// nothing to do
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A simple flat container of events with no duplicates
|
||||
*/
|
||||
export class FlatNoteStore extends HookedNoteStore<Readonly<Array<TaggedNostrEvent>>> {
|
||||
export class FlatNoteStore extends HookedNoteStore<Array<TaggedNostrEvent>> {
|
||||
#events: Array<TaggedNostrEvent> = [];
|
||||
#ids: Set<u256> = new Set();
|
||||
|
||||
@ -176,7 +194,7 @@ export class FlatNoteStore extends HookedNoteStore<Readonly<Array<TaggedNostrEve
|
||||
/**
|
||||
* A note store that holds a single replaceable event for a given user defined key generator function
|
||||
*/
|
||||
export class KeyedReplaceableNoteStore extends HookedNoteStore<Readonly<Array<TaggedNostrEvent>>> {
|
||||
export class KeyedReplaceableNoteStore extends HookedNoteStore<Array<TaggedNostrEvent>> {
|
||||
#keyFn: (ev: TaggedNostrEvent) => string;
|
||||
#events: Map<string, TaggedNostrEvent> = new Map();
|
||||
|
||||
|
@ -20,15 +20,7 @@ export function minePow(e: NostrPowEvent, target: number) {
|
||||
e.tags.push(["nonce", ctr.toString(), target.toString()]);
|
||||
}
|
||||
do {
|
||||
//roll ctr and compute id
|
||||
const now = Math.floor(new Date().getTime() / 1000);
|
||||
// reset ctr if timestamp changed, this is not really needed but makes the ctr value smaller
|
||||
if (now !== e.created_at) {
|
||||
ctr = 0;
|
||||
e.created_at = now;
|
||||
}
|
||||
e.tags[nonceTagIdx][1] = (++ctr).toString();
|
||||
|
||||
e.id = createId(e);
|
||||
} while (countLeadingZeros(e.id) < target);
|
||||
|
||||
|
@ -37,13 +37,11 @@ export class ProfileLoaderService {
|
||||
* Request profile metadata for a set of pubkeys
|
||||
*/
|
||||
TrackMetadata(pk: HexKey | Array<HexKey>) {
|
||||
const bufferNow = [];
|
||||
for (const p of Array.isArray(pk) ? pk : [pk]) {
|
||||
if (p.length === 64 && this.#wantsMetadata.add(p)) {
|
||||
bufferNow.push(p);
|
||||
if (p.length === 64) {
|
||||
this.#wantsMetadata.add(p);
|
||||
}
|
||||
}
|
||||
this.#cache.buffer(bufferNow);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -64,6 +62,25 @@ export class ProfileLoaderService {
|
||||
}
|
||||
}
|
||||
|
||||
async fetchProfile(key: string) {
|
||||
const existing = this.Cache.get(key);
|
||||
if (existing) {
|
||||
return existing;
|
||||
} else {
|
||||
return await new Promise<MetadataCache>((resolve, reject) => {
|
||||
this.TrackMetadata(key);
|
||||
const release = this.Cache.hook(() => {
|
||||
const existing = this.Cache.getFromCache(key);
|
||||
if (existing) {
|
||||
resolve(existing);
|
||||
release();
|
||||
this.UntrackMetadata(key);
|
||||
}
|
||||
}, key);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async #FetchMetadata() {
|
||||
const missingFromCache = await this.#cache.buffer([...this.#wantsMetadata]);
|
||||
|
||||
|
46
packages/system/src/query-optimizer/index.ts
Normal file
46
packages/system/src/query-optimizer/index.ts
Normal file
@ -0,0 +1,46 @@
|
||||
import { ReqFilter } from "../nostr";
|
||||
import { expandFilter } from "./request-expander";
|
||||
import { flatMerge, mergeSimilar } from "./request-merger";
|
||||
import { diffFilters } from "./request-splitter";
|
||||
|
||||
export interface FlatReqFilter {
|
||||
keys: number;
|
||||
ids?: string;
|
||||
authors?: string;
|
||||
kinds?: number;
|
||||
"#e"?: string;
|
||||
"#p"?: string;
|
||||
"#t"?: string;
|
||||
"#d"?: string;
|
||||
"#r"?: string;
|
||||
search?: string;
|
||||
since?: number;
|
||||
until?: number;
|
||||
limit?: number;
|
||||
}
|
||||
|
||||
export interface QueryOptimizer {
|
||||
expandFilter(f: ReqFilter): Array<FlatReqFilter>;
|
||||
getDiff(prev: Array<ReqFilter>, next: Array<ReqFilter>): Array<FlatReqFilter>;
|
||||
flatMerge(all: Array<FlatReqFilter>): Array<ReqFilter>;
|
||||
compress(all: Array<ReqFilter>): Array<ReqFilter>;
|
||||
}
|
||||
|
||||
export const DefaultQueryOptimizer = {
|
||||
expandFilter: (f: ReqFilter) => {
|
||||
return expandFilter(f);
|
||||
},
|
||||
getDiff: (prev: Array<ReqFilter>, next: Array<ReqFilter>) => {
|
||||
const diff = diffFilters(
|
||||
prev.flatMap(a => expandFilter(a)),
|
||||
next.flatMap(a => expandFilter(a)),
|
||||
);
|
||||
return diff.added;
|
||||
},
|
||||
flatMerge: (all: Array<FlatReqFilter>) => {
|
||||
return flatMerge(all);
|
||||
},
|
||||
compress: (all: Array<ReqFilter>) => {
|
||||
return mergeSimilar(all);
|
||||
},
|
||||
} as QueryOptimizer;
|
@ -1,20 +1,5 @@
|
||||
import { ReqFilter } from "./nostr";
|
||||
|
||||
export interface FlatReqFilter {
|
||||
keys: number;
|
||||
ids?: string;
|
||||
authors?: string;
|
||||
kinds?: number;
|
||||
"#e"?: string;
|
||||
"#p"?: string;
|
||||
"#t"?: string;
|
||||
"#d"?: string;
|
||||
"#r"?: string;
|
||||
search?: string;
|
||||
since?: number;
|
||||
until?: number;
|
||||
limit?: number;
|
||||
}
|
||||
import { FlatReqFilter } from ".";
|
||||
import { ReqFilter } from "../nostr";
|
||||
|
||||
/**
|
||||
* Expand a filter into its most fine grained form
|
@ -1,6 +1,6 @@
|
||||
import { distance } from "@snort/shared";
|
||||
import { ReqFilter } from ".";
|
||||
import { FlatReqFilter } from "./request-expander";
|
||||
import { ReqFilter } from "..";
|
||||
import { FlatReqFilter } from ".";
|
||||
|
||||
/**
|
||||
* Keys which can change the entire meaning of the filter outside the array types
|
||||
@ -105,7 +105,7 @@ export function flatMerge(all: Array<FlatReqFilter>): Array<ReqFilter> {
|
||||
function mergeFiltersInSet(filters: Array<FlatReqFilter>) {
|
||||
return filters.reduce((acc, a) => {
|
||||
Object.entries(a).forEach(([k, v]) => {
|
||||
if (k === "keys") return;
|
||||
if (k === "keys" || v === undefined) return;
|
||||
if (DiscriminatorKeys.includes(k)) {
|
||||
acc[k] = v;
|
||||
} else {
|
@ -1,5 +1,5 @@
|
||||
import { flatFilterEq } from "./utils";
|
||||
import { FlatReqFilter } from "./request-expander";
|
||||
import { flatFilterEq } from "../utils";
|
||||
import { FlatReqFilter } from ".";
|
||||
|
||||
export function diffFilters(prev: Array<FlatReqFilter>, next: Array<FlatReqFilter>, calcRemoved?: boolean) {
|
||||
const added = [];
|
@ -4,9 +4,7 @@ import { unixNowMs, unwrap } from "@snort/shared";
|
||||
|
||||
import { Connection, ReqFilter, Nips, TaggedNostrEvent } from ".";
|
||||
import { NoteStore } from "./note-collection";
|
||||
import { flatMerge } from "./request-merger";
|
||||
import { BuiltRawReqFilter } from "./request-builder";
|
||||
import { FlatReqFilter, expandFilter } from "./request-expander";
|
||||
import { eventMatchesFilter } from "./request-matcher";
|
||||
|
||||
/**
|
||||
@ -19,7 +17,6 @@ class QueryTrace {
|
||||
eose?: number;
|
||||
close?: number;
|
||||
#wasForceClosed = false;
|
||||
readonly flatFilters: Array<FlatReqFilter>;
|
||||
readonly #fnClose: (id: string) => void;
|
||||
readonly #fnProgress: () => void;
|
||||
|
||||
@ -34,7 +31,6 @@ class QueryTrace {
|
||||
this.start = unixNowMs();
|
||||
this.#fnClose = fnClose;
|
||||
this.#fnProgress = fnProgress;
|
||||
this.flatFilters = filters.flatMap(expandFilter);
|
||||
}
|
||||
|
||||
sentToRelay() {
|
||||
@ -166,18 +162,14 @@ export class Query implements QueryBase {
|
||||
* Recompute the complete set of compressed filters from all query traces
|
||||
*/
|
||||
get filters() {
|
||||
return flatMerge(this.flatFilters);
|
||||
}
|
||||
|
||||
get flatFilters() {
|
||||
return this.#tracing.flatMap(a => a.flatFilters);
|
||||
return this.#tracing.flatMap(a => a.filters);
|
||||
}
|
||||
|
||||
get feed() {
|
||||
return this.#feed;
|
||||
}
|
||||
|
||||
onEvent(sub: string, e: TaggedNostrEvent) {
|
||||
handleEvent(sub: string, e: TaggedNostrEvent) {
|
||||
for (const t of this.#tracing) {
|
||||
if (t.id === sub) {
|
||||
if (t.filters.some(v => eventMatchesFilter(e, v))) {
|
||||
@ -205,6 +197,28 @@ export class Query implements QueryBase {
|
||||
this.#stopCheckTraces();
|
||||
}
|
||||
|
||||
/**
|
||||
* Insert a new trace as a placeholder
|
||||
*/
|
||||
insertCompletedTrace(subq: BuiltRawReqFilter, data: Readonly<Array<TaggedNostrEvent>>) {
|
||||
const qt = new QueryTrace(
|
||||
subq.relay,
|
||||
subq.filters,
|
||||
"",
|
||||
() => {
|
||||
// nothing to close
|
||||
},
|
||||
() => {
|
||||
// nothing to progress
|
||||
},
|
||||
);
|
||||
qt.sentToRelay();
|
||||
qt.gotEose();
|
||||
this.#tracing.push(qt);
|
||||
this.feed.add(data);
|
||||
return qt;
|
||||
}
|
||||
|
||||
sendToRelay(c: Connection, subq: BuiltRawReqFilter) {
|
||||
if (!this.#canSendQuery(c, subq)) {
|
||||
return;
|
||||
|
@ -1,12 +1,11 @@
|
||||
import debug from "debug";
|
||||
import { v4 as uuid } from "uuid";
|
||||
import { appendDedupe, sanitizeRelayUrl, unixNowMs } from "@snort/shared";
|
||||
import { appendDedupe, dedupe, sanitizeRelayUrl, unixNowMs, unwrap } from "@snort/shared";
|
||||
|
||||
import { ReqFilter, u256, HexKey, EventKind } from ".";
|
||||
import { diffFilters } from "./request-splitter";
|
||||
import EventKind from "./event-kind";
|
||||
import { NostrLink, NostrPrefix, SystemInterface } from "index";
|
||||
import { ReqFilter, u256, HexKey } from "./nostr";
|
||||
import { RelayCache, splitByWriteRelays, splitFlatByWriteRelays } from "./gossip-model";
|
||||
import { flatMerge, mergeSimilar } from "./request-merger";
|
||||
import { FlatReqFilter, expandFilter } from "./request-expander";
|
||||
|
||||
/**
|
||||
* Which strategy is used when building REQ filters
|
||||
@ -83,6 +82,12 @@ export class RequestBuilder {
|
||||
return ret;
|
||||
}
|
||||
|
||||
withBareFilter(f: ReqFilter) {
|
||||
const ret = new RequestFilterBuilder(f);
|
||||
this.#builders.push(ret);
|
||||
return ret;
|
||||
}
|
||||
|
||||
withOptions(opt: RequestBuilderOptions) {
|
||||
this.#options = {
|
||||
...this.#options,
|
||||
@ -95,26 +100,25 @@ export class RequestBuilder {
|
||||
return this.#builders.map(f => f.filter);
|
||||
}
|
||||
|
||||
build(relays: RelayCache): Array<BuiltRawReqFilter> {
|
||||
const expanded = this.#builders.flatMap(a => a.build(relays, this.id));
|
||||
return this.#groupByRelay(expanded);
|
||||
build(system: SystemInterface): Array<BuiltRawReqFilter> {
|
||||
const expanded = this.#builders.flatMap(a => a.build(system.RelayCache, this.id));
|
||||
return this.#groupByRelay(system, expanded);
|
||||
}
|
||||
|
||||
/**
|
||||
* Detects a change in request from a previous set of filters
|
||||
*/
|
||||
buildDiff(relays: RelayCache, prev: Array<FlatReqFilter>): Array<BuiltRawReqFilter> {
|
||||
buildDiff(system: SystemInterface, prev: Array<ReqFilter>): Array<BuiltRawReqFilter> {
|
||||
const start = unixNowMs();
|
||||
|
||||
const next = this.#builders.flatMap(f => expandFilter(f.filter));
|
||||
const diff = diffFilters(prev, next);
|
||||
const diff = system.QueryOptimizer.getDiff(prev, this.buildRaw());
|
||||
const ts = unixNowMs() - start;
|
||||
this.#log("buildDiff %s %d ms", this.id, ts);
|
||||
if (diff.changed) {
|
||||
return splitFlatByWriteRelays(relays, diff.added).map(a => {
|
||||
this.#log("buildDiff %s %d ms +%d", this.id, ts, diff.length);
|
||||
if (diff.length > 0) {
|
||||
return splitFlatByWriteRelays(system.RelayCache, diff).map(a => {
|
||||
return {
|
||||
strategy: RequestStrategy.AuthorsRelays,
|
||||
filters: flatMerge(a.filters),
|
||||
filters: system.QueryOptimizer.flatMerge(a.filters),
|
||||
relay: a.relay,
|
||||
};
|
||||
});
|
||||
@ -129,7 +133,7 @@ export class RequestBuilder {
|
||||
* @param expanded
|
||||
* @returns
|
||||
*/
|
||||
#groupByRelay(expanded: Array<BuiltRawReqFilter>) {
|
||||
#groupByRelay(system: SystemInterface, expanded: Array<BuiltRawReqFilter>) {
|
||||
const relayMerged = expanded.reduce((acc, v) => {
|
||||
const existing = acc.get(v.relay);
|
||||
if (existing) {
|
||||
@ -142,7 +146,9 @@ export class RequestBuilder {
|
||||
|
||||
const filtersSquashed = [...relayMerged.values()].map(a => {
|
||||
return {
|
||||
filters: mergeSimilar(a.flatMap(b => b.filters)),
|
||||
filters: system.QueryOptimizer.flatMerge(
|
||||
a.flatMap(b => b.filters.flatMap(c => system.QueryOptimizer.expandFilter(c))),
|
||||
),
|
||||
relay: a[0].relay,
|
||||
strategy: a[0].strategy,
|
||||
} as BuiltRawReqFilter;
|
||||
@ -156,9 +162,13 @@ export class RequestBuilder {
|
||||
* Builder class for a single request filter
|
||||
*/
|
||||
export class RequestFilterBuilder {
|
||||
#filter: ReqFilter = {};
|
||||
#filter: ReqFilter;
|
||||
#relays = new Set<string>();
|
||||
|
||||
constructor(f?: ReqFilter) {
|
||||
this.#filter = f ?? {};
|
||||
}
|
||||
|
||||
get filter() {
|
||||
return { ...this.#filter };
|
||||
}
|
||||
@ -209,9 +219,9 @@ export class RequestFilterBuilder {
|
||||
return this;
|
||||
}
|
||||
|
||||
tag(key: "e" | "p" | "d" | "t" | "r" | "a" | "g", value?: Array<string>) {
|
||||
tag(key: "e" | "p" | "d" | "t" | "r" | "a" | "g" | string, value?: Array<string>) {
|
||||
if (!value) return this;
|
||||
this.#filter[`#${key}`] = appendDedupe(this.#filter[`#${key}`], value);
|
||||
this.#filter[`#${key}`] = appendDedupe(this.#filter[`#${key}`] as Array<string>, value);
|
||||
return this;
|
||||
}
|
||||
|
||||
@ -221,6 +231,36 @@ export class RequestFilterBuilder {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get event from link
|
||||
*/
|
||||
link(link: NostrLink) {
|
||||
if (link.type === NostrPrefix.Address) {
|
||||
this.tag("d", [link.id])
|
||||
.kinds([unwrap(link.kind)])
|
||||
.authors([unwrap(link.author)]);
|
||||
} else {
|
||||
this.ids([link.id]);
|
||||
}
|
||||
link.relays?.forEach(v => this.relay(v));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get replies to link with e/a tags
|
||||
*/
|
||||
replyToLink(links: Array<NostrLink>) {
|
||||
const types = dedupe(links.map(a => a.type));
|
||||
if (types.length > 1) throw new Error("Cannot add multiple links of different kinds");
|
||||
|
||||
const tags = links.map(a => unwrap(a.toEventTag()));
|
||||
this.tag(
|
||||
tags[0][0],
|
||||
tags.map(v => v[1]),
|
||||
);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build/expand this filter into a set of relay specific queries
|
||||
*/
|
||||
@ -250,7 +290,7 @@ export class RequestFilterBuilder {
|
||||
|
||||
return [
|
||||
{
|
||||
filters: [this.filter],
|
||||
filters: [this.#filter],
|
||||
relay: "",
|
||||
strategy: RequestStrategy.DefaultRelays,
|
||||
},
|
||||
|
15
packages/system/src/request-trim.ts
Normal file
15
packages/system/src/request-trim.ts
Normal file
@ -0,0 +1,15 @@
|
||||
import { ReqFilter } from "nostr";
|
||||
|
||||
/**
|
||||
* Remove empty filters, filters which would result in no results
|
||||
*/
|
||||
export function trimFilters(filters: Array<ReqFilter>) {
|
||||
const fNew = [];
|
||||
for (const f of filters) {
|
||||
const ent = Object.entries(f).filter(([, v]) => Array.isArray(v));
|
||||
if (ent.every(([, v]) => (v as Array<string | number>).length > 0)) {
|
||||
fNew.push(f);
|
||||
}
|
||||
}
|
||||
return fNew;
|
||||
}
|
@ -3,10 +3,12 @@ import { getPublicKey } from "@snort/shared";
|
||||
import { EventExt } from "./event-ext";
|
||||
import { Nip4WebCryptoEncryptor } from "./impl/nip4";
|
||||
import { XChaCha20Encryptor } from "./impl/nip44";
|
||||
import { MessageEncryptorPayload, MessageEncryptorVersion } from "./index";
|
||||
import { MessageEncryptorVersion, decodeEncryptionPayload, encodeEncryptionPayload } from "./index";
|
||||
import { NostrEvent } from "./nostr";
|
||||
import { base64 } from "@scure/base";
|
||||
|
||||
export type SignerSupports = "nip04" | "nip44" | string;
|
||||
|
||||
export interface EventSigner {
|
||||
init(): Promise<void>;
|
||||
getPubKey(): Promise<string> | string;
|
||||
@ -15,6 +17,7 @@ export interface EventSigner {
|
||||
nip44Encrypt(content: string, key: string): Promise<string>;
|
||||
nip44Decrypt(content: string, otherKey: string): Promise<string>;
|
||||
sign(ev: NostrEvent): Promise<NostrEvent>;
|
||||
get supports(): Array<SignerSupports>;
|
||||
}
|
||||
|
||||
export class PrivateKeySigner implements EventSigner {
|
||||
@ -30,6 +33,10 @@ export class PrivateKeySigner implements EventSigner {
|
||||
this.#publicKey = getPublicKey(this.#privateKey);
|
||||
}
|
||||
|
||||
get supports(): string[] {
|
||||
return ["nip04", "nip44"];
|
||||
}
|
||||
|
||||
get privateKey() {
|
||||
return this.#privateKey;
|
||||
}
|
||||
@ -67,11 +74,11 @@ export class PrivateKeySigner implements EventSigner {
|
||||
const enc = new XChaCha20Encryptor();
|
||||
const shared = enc.getSharedSecret(this.#privateKey, key);
|
||||
const data = enc.encryptData(content, shared);
|
||||
return this.#encodePayload(data);
|
||||
return encodeEncryptionPayload(data);
|
||||
}
|
||||
|
||||
async nip44Decrypt(content: string, otherKey: string) {
|
||||
const payload = this.#decodePayload(content);
|
||||
const payload = decodeEncryptionPayload(content);
|
||||
if (payload.v !== MessageEncryptorVersion.XChaCha20) throw new Error("Invalid payload version");
|
||||
|
||||
const enc = new XChaCha20Encryptor();
|
||||
@ -79,28 +86,6 @@ export class PrivateKeySigner implements EventSigner {
|
||||
return enc.decryptData(payload, shared);
|
||||
}
|
||||
|
||||
#decodePayload(p: string) {
|
||||
if (p.startsWith("{") && p.endsWith("}")) {
|
||||
const pj = JSON.parse(p) as { v: number; nonce: string; ciphertext: string };
|
||||
return {
|
||||
v: pj.v,
|
||||
nonce: base64.decode(pj.nonce),
|
||||
ciphertext: base64.decode(pj.ciphertext),
|
||||
} as MessageEncryptorPayload;
|
||||
} else {
|
||||
const buf = base64.decode(p);
|
||||
return {
|
||||
v: buf[0],
|
||||
nonce: buf.subarray(1, 25),
|
||||
ciphertext: buf.subarray(25),
|
||||
} as MessageEncryptorPayload;
|
||||
}
|
||||
}
|
||||
|
||||
#encodePayload(p: MessageEncryptorPayload) {
|
||||
return base64.encode(new Uint8Array([p.v, ...p.nonce, ...p.ciphertext]));
|
||||
}
|
||||
|
||||
sign(ev: NostrEvent): Promise<NostrEvent> {
|
||||
EventExt.sign(ev, this.#privateKey);
|
||||
return Promise.resolve(ev);
|
||||
|
@ -2,10 +2,12 @@ import { ExternalStore } from "@snort/shared";
|
||||
|
||||
import { SystemSnapshot, SystemInterface, ProfileLoaderService } from ".";
|
||||
import { AuthHandler, ConnectionStateSnapshot, RelaySettings } from "./connection";
|
||||
import { NostrEvent } from "./nostr";
|
||||
import { NoteStore } from "./note-collection";
|
||||
import { NostrEvent, TaggedNostrEvent } from "./nostr";
|
||||
import { NoteStore, NoteStoreSnapshotData } from "./note-collection";
|
||||
import { Query } from "./query";
|
||||
import { RequestBuilder } from "./request-builder";
|
||||
import { RelayCache } from "./gossip-model";
|
||||
import { QueryOptimizer } from "./query-optimizer";
|
||||
|
||||
export class SystemWorker extends ExternalStore<SystemSnapshot> implements SystemInterface {
|
||||
#port: MessagePort;
|
||||
@ -21,10 +23,21 @@ export class SystemWorker extends ExternalStore<SystemSnapshot> implements Syste
|
||||
}
|
||||
}
|
||||
|
||||
Fetch(req: RequestBuilder, cb?: (evs: Array<TaggedNostrEvent>) => void): Promise<NoteStoreSnapshotData> {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
|
||||
get ProfileLoader(): ProfileLoaderService {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
|
||||
get RelayCache(): RelayCache {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
|
||||
get QueryOptimizer(): QueryOptimizer {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
HandleAuth?: AuthHandler;
|
||||
|
||||
get Sockets(): ConnectionStateSnapshot[] {
|
||||
|
@ -1,5 +1,5 @@
|
||||
import { equalProp } from "@snort/shared";
|
||||
import { FlatReqFilter } from "./request-expander";
|
||||
import { FlatReqFilter } from "./query-optimizer";
|
||||
import { NostrEvent, ReqFilter } from "./nostr";
|
||||
|
||||
export function findTag(e: NostrEvent, tag: string) {
|
||||
|
@ -4,6 +4,8 @@ import { HexKey, NostrEvent } from "./nostr";
|
||||
import { findTag } from "./utils";
|
||||
import { MetadataCache } from "./cache";
|
||||
|
||||
const ParsedZapCache = new Map<string, ParsedZap>();
|
||||
|
||||
function getInvoice(zap: NostrEvent): InvoiceDetails | undefined {
|
||||
const bolt11 = findTag(zap, "bolt11");
|
||||
if (!bolt11) {
|
||||
@ -13,6 +15,11 @@ function getInvoice(zap: NostrEvent): InvoiceDetails | undefined {
|
||||
}
|
||||
|
||||
export function parseZap(zapReceipt: NostrEvent, userCache: FeedCache<MetadataCache>, refNote?: NostrEvent): ParsedZap {
|
||||
const existing = ParsedZapCache.get(zapReceipt.id);
|
||||
if (existing) {
|
||||
return existing;
|
||||
}
|
||||
|
||||
let innerZapJson = findTag(zapReceipt, "description");
|
||||
if (innerZapJson) {
|
||||
try {
|
||||
@ -67,7 +74,7 @@ export function parseZap(zapReceipt: NostrEvent, userCache: FeedCache<MetadataCa
|
||||
// ignored: console.debug("Invalid zap", zapReceipt, e);
|
||||
}
|
||||
}
|
||||
return {
|
||||
const ret = {
|
||||
id: zapReceipt.id,
|
||||
zapService: zapReceipt.pubkey,
|
||||
amount: 0,
|
||||
@ -75,6 +82,8 @@ export function parseZap(zapReceipt: NostrEvent, userCache: FeedCache<MetadataCa
|
||||
anonZap: false,
|
||||
errors: ["invalid zap, parsing failed"],
|
||||
};
|
||||
ParsedZapCache.set(ret.id, ret);
|
||||
return ret;
|
||||
}
|
||||
|
||||
export interface ParsedZap {
|
||||
|
14
packages/system/tests/node.ts
Normal file
14
packages/system/tests/node.ts
Normal file
@ -0,0 +1,14 @@
|
||||
import { NostrSystem, SystemInterface } from "..";
|
||||
|
||||
const Relay = "wss://relay.snort.social/";
|
||||
|
||||
const system = new NostrSystem({}) as SystemInterface;
|
||||
|
||||
async function test() {
|
||||
await system.ConnectToRelay(Relay, { read: true, write: true });
|
||||
setTimeout(() => {
|
||||
system.DisconnectRelay(Relay);
|
||||
}, 1000);
|
||||
}
|
||||
|
||||
test().catch(console.error);
|
@ -29,7 +29,7 @@ describe("splitByUrl", () => {
|
||||
"https://i.imgur.com/rkqhjeq.png",
|
||||
" Every form of money that could be inflated by way of force or technological advancement has been. ",
|
||||
"https://www.dw.com/de/amtsinhaber-mnangagwa-gewinnt-präsidentenwahl-in-simbabwe/a-66640006?maca=de-rss-de-all-1119-xml-atom",
|
||||
" and some shit."
|
||||
" and some shit.",
|
||||
];
|
||||
|
||||
expect(splitByUrl(inputStr)).toEqual(expectedOutput);
|
||||
|
Reference in New Issue
Block a user