forked from Kieran/snort
1
0
Fork 0
This commit is contained in:
Kieran 2023-07-22 19:37:46 +01:00
parent 74a3cd7754
commit b1f93c9fd8
Signed by: Kieran
GPG Key ID: DE71CEB3925BE941
65 changed files with 1115 additions and 1029 deletions

View File

@ -6,7 +6,7 @@ concurrency:
limit: 1
trigger:
branch:
- main
- main
metadata:
namespace: git
steps:
@ -34,9 +34,9 @@ steps:
- img build -t voidic/snort:latest --platform linux/amd64,linux/arm64 -f Dockerfile.prebuilt .
- img push voidic/snort:latest
volumes:
- name: cache
claim:
name: docker-cache
- name: cache
claim:
name: docker-cache
---
kind: pipeline
type: kubernetes
@ -60,9 +60,9 @@ steps:
- yarn workspace @snort/app eslint
- yarn workspace @snort/app prettier --check .
volumes:
- name: cache
claim:
name: docker-cache
- name: cache
claim:
name: docker-cache
---
kind: pipeline
type: kubernetes
@ -71,7 +71,7 @@ concurrency:
limit: 1
trigger:
branch:
- main
- main
metadata:
namespace: git
steps:
@ -98,9 +98,9 @@ steps:
- 'git commit -a -m "chore: Update translations"'
- git push -u origin main
volumes:
- name: cache
claim:
name: docker-cache
- name: cache
claim:
name: docker-cache
---
kind: pipeline
type: kubernetes
@ -109,7 +109,7 @@ concurrency:
limit: 1
trigger:
event:
- tag
- tag
metadata:
namespace: git
steps:
@ -137,6 +137,6 @@ steps:
- img build -t voidic/snort:$DRONE_TAG --platform linux/amd64,linux/arm64 -f Dockerfile.prebuilt .
- img push voidic/snort:$DRONE_TAG
volumes:
- name: cache
claim:
name: docker-cache
- name: cache
claim:
name: docker-cache

View File

@ -29,13 +29,13 @@ jobs:
- name: Rust cache
uses: swatinem/rust-cache@v2
with:
workspaces: './src-tauri -> target'
workspaces: "./src-tauri -> target"
- name: Sync node version and setup cache
uses: actions/setup-node@v3
with:
node-version: '16'
cache: 'yarn'
node-version: "16"
cache: "yarn"
- name: Install frontend dependencies
run: yarn install
- name: Build the app
@ -44,7 +44,7 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
tagName: ${{ github.ref_name }}
releaseName: 'Snort v__VERSION__'
releaseBody: 'See the assets to download and install this version.'
releaseName: "Snort v__VERSION__"
releaseBody: "See the assets to download and install this version."
releaseDraft: true
prerelease: false
prerelease: false

20
.vscode/settings.json vendored
View File

@ -1,11 +1,11 @@
{
"files.exclude": {
"**/.git": true,
"**/.svn": true,
"**/.hg": true,
"**/CVS": true,
"**/.DS_Store": true,
"**/Thumbs.db": true,
"**/node_modules": true
}
}
"files.exclude": {
"**/.git": true,
"**/.svn": true,
"**/.hg": true,
"**/CVS": true,
"**/.DS_Store": true,
"**/Thumbs.db": true,
"**/node_modules": true
}
}

View File

@ -63,6 +63,7 @@ $ yarn build
Translations are managed on [Crowdin](https://crowdin.com/project/snort)
To extract translations run:
```bash
yarn workspace @snort/app intl-extract
yarn workspace @snort/app intl-compile

View File

@ -1,7 +1,6 @@
interface Env {
}
interface Env {}
export const onRequest: PagesFunction<Env> = async (context) => {
export const onRequest: PagesFunction<Env> = async context => {
const id = context.params.id as string;
const next = await context.next();
@ -11,16 +10,16 @@ export const onRequest: PagesFunction<Env> = async (context) => {
body: await next.arrayBuffer(),
headers: {
"user-agent": "Snort-Functions/1.0 (https://snort.social)",
"content-type": "text/plain"
}
"content-type": "text/plain",
},
});
if (rsp.ok) {
const body = await rsp.text();
if (body.length > 0) {
return new Response(body, {
headers: {
"content-type": "text/html"
}
"content-type": "text/html",
},
});
}
}
@ -28,4 +27,4 @@ export const onRequest: PagesFunction<Env> = async (context) => {
// ignore
}
return next;
}
};

View File

@ -1,7 +1,6 @@
interface Env {
}
interface Env {}
export const onRequest: PagesFunction<Env> = async (context) => {
export const onRequest: PagesFunction<Env> = async context => {
const id = context.params.id as string;
const next = await context.next();
@ -11,16 +10,16 @@ export const onRequest: PagesFunction<Env> = async (context) => {
body: await next.arrayBuffer(),
headers: {
"user-agent": "Snort-Functions/1.0 (https://snort.social)",
"content-type": "text/plain"
}
"content-type": "text/plain",
},
});
if (rsp.ok) {
const body = await rsp.text();
if (body.length > 0) {
return new Response(body, {
headers: {
"content-type": "text/html"
}
"content-type": "text/html",
},
});
}
}
@ -28,4 +27,4 @@ export const onRequest: PagesFunction<Env> = async (context) => {
// ignore
}
return next;
}
};

View File

@ -1,8 +1,8 @@
{
"compilerOptions": {
"target": "esnext",
"module": "esnext",
"lib": ["esnext"],
"types": ["@cloudflare/workers-types"]
}
}
"compilerOptions": {
"target": "esnext",
"module": "esnext",
"lib": ["esnext"],
"types": ["@cloudflare/workers-types"]
}
}

View File

@ -11,5 +11,10 @@
"devDependencies": {
"@tauri-apps/cli": "^1.2.3",
"@cloudflare/workers-types": "^4.20230307.0"
},
"prettier": {
"printWidth": 120,
"bracketSameLine": true,
"arrowParens": "avoid"
}
}
}

View File

@ -1,5 +0,0 @@
{
"printWidth": 120,
"bracketSameLine": true,
"arrowParens": "avoid"
}

View File

@ -65,4 +65,4 @@
"author": "",
"license": "ISC",
"description": ""
}
}

View File

@ -1,7 +1,7 @@
import { NostrError } from "../common"
import { RawEvent, parseEvent } from "../event"
import { Conn } from "./conn"
import * as utils from "@noble/curves/abstract/utils";
import * as utils from "@noble/curves/abstract/utils"
import { EventEmitter } from "./emitter"
import { fetchRelayInfo, ReadyState, Relay } from "./relay"
import { Filters } from "../filters"
@ -71,9 +71,9 @@ export class Nostr extends EventEmitter {
opts?.fetchInfo === false
? Promise.resolve({})
: fetchRelayInfo(relayUrl).catch((e) => {
this.#error(e)
return {}
})
this.#error(e)
return {}
})
// If there is no existing connection, open a new one.
const conn = new Conn({
@ -128,7 +128,8 @@ export class Nostr extends EventEmitter {
if (conn.relay.readyState !== ReadyState.CONNECTING) {
this.#error(
new NostrError(
`bug: expected connection to ${relayUrl.toString()} to have readyState CONNECTING, got ${conn.relay.readyState
`bug: expected connection to ${relayUrl.toString()} to have readyState CONNECTING, got ${
conn.relay.readyState
}`
)
)
@ -293,7 +294,7 @@ export class Nostr extends EventEmitter {
relay.info === undefined
? undefined
: // Deep copy of the info.
JSON.parse(JSON.stringify(relay.info))
JSON.parse(JSON.stringify(relay.info))
return { ...relay, info }
}
})

View File

@ -1,6 +1,6 @@
import * as secp from "@noble/curves/secp256k1"
import * as utils from "@noble/curves/abstract/utils";
import {sha256 as sha} from "@noble/hashes/sha256";
import * as utils from "@noble/curves/abstract/utils"
import { sha256 as sha } from "@noble/hashes/sha256"
import base64 from "base64-js"
import { bech32 } from "bech32"
@ -92,11 +92,7 @@ export function schnorrSign(data: Hex, priv: PrivateKey): Hex {
/**
* Verify that the elliptic curve signature is correct.
*/
export function schnorrVerify(
sig: Hex,
data: Hex,
key: PublicKey
): boolean {
export function schnorrVerify(sig: Hex, data: Hex, key: PublicKey): boolean {
return secp.schnorr.verify(sig.toString(), data.toString(), key.toString())
}

View File

@ -159,14 +159,14 @@ export async function signEvent<T extends RawEvent>(
* Parse an event from its raw format.
*/
export function parseEvent(event: RawEvent): Event {
if (event.id !== (serializeEventId(event))) {
if (event.id !== serializeEventId(event)) {
throw new NostrError(
`invalid id ${event.id} for event ${JSON.stringify(
event
)}, expected ${serializeEventId(event)}`
)
}
if (!(schnorrVerify(event.sig, event.id, event.pubkey))) {
if (!schnorrVerify(event.sig, event.id, event.pubkey)) {
throw new NostrError(`invalid signature for event ${JSON.stringify(event)}`)
}
@ -221,9 +221,7 @@ export function parseEvent(event: RawEvent): Event {
}
}
function serializeEventId(
event: UnsignedWithPubkey<RawEvent>
): EventId {
function serializeEventId(event: UnsignedWithPubkey<RawEvent>): EventId {
const serialized = JSON.stringify([
0,
event.pubkey,

View File

@ -1,6 +1,6 @@
const fs = require("fs")
const isProduction = process.env.NODE_ENV == "production";
const isProduction = process.env.NODE_ENV == "production"
const entry = {
lib: "./src/index.ts",

View File

@ -1,8 +1,6 @@
/**
* Regex to match email address
*/
export const EmailRegex =
// eslint-disable-next-line no-useless-escape
/^(([^<>()\[\]\\.,;:\s@"]+(\.[^<>()\[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/;
export const EmailRegex =
// eslint-disable-next-line no-useless-escape
/^(([^<>()\[\]\\.,;:\s@"]+(\.[^<>()\[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/;

View File

@ -1,14 +1,13 @@
declare module "light-bolt11-decoder" {
export function decode(pr?: string): ParsedInvoice;
export function decode(pr?: string): ParsedInvoice;
export interface ParsedInvoice {
paymentRequest: string;
sections: Section[];
}
export interface ParsedInvoice {
paymentRequest: string;
sections: Section[];
}
export interface Section {
name: string;
value: string | Uint8Array | number | undefined;
}
export interface Section {
name: string;
value: string | Uint8Array | number | undefined;
}
}

View File

@ -38,7 +38,7 @@ export abstract class FeedCache<TCached> {
}
async preload() {
const keys = await this.table?.toCollection().primaryKeys() ?? [];
const keys = (await this.table?.toCollection().primaryKeys()) ?? [];
this.onTable = new Set<string>(keys.map(a => a as string));
}

View File

@ -3,4 +3,4 @@ export * from "./lnurl";
export * from "./utils";
export * from "./work-queue";
export * from "./feed-cache";
export * from "./invoices";
export * from "./invoices";

View File

@ -1,48 +1,55 @@
import { bytesToHex } from "@noble/hashes/utils";
import { decode as invoiceDecode } from "light-bolt11-decoder";
export interface InvoiceDetails {
amount?: number;
expire?: number;
timestamp?: number;
description?: string;
descriptionHash?: string;
paymentHash?: string;
expired: boolean;
pr: string;
amount?: number;
expire?: number;
timestamp?: number;
description?: string;
descriptionHash?: string;
paymentHash?: string;
expired: boolean;
pr: string;
}
export function decodeInvoice(pr: string): InvoiceDetails | undefined {
try {
const parsed = invoiceDecode(pr);
try {
const parsed = invoiceDecode(pr);
const amountSection = parsed.sections.find(a => a.name === "amount");
const amount = amountSection ? Number(amountSection.value as number | string) : undefined;
const amountSection = parsed.sections.find(a => a.name === "amount");
const amount = amountSection ? Number(amountSection.value as number | string) : undefined;
const timestampSection = parsed.sections.find(a => a.name === "timestamp");
const timestamp = timestampSection ? Number(timestampSection.value as number | string) : undefined;
const timestampSection = parsed.sections.find(a => a.name === "timestamp");
const timestamp = timestampSection ? Number(timestampSection.value as number | string) : undefined;
const expirySection = parsed.sections.find(a => a.name === "expiry");
const expire = expirySection ? Number(expirySection.value as number | string) : undefined;
const descriptionSection = parsed.sections.find(a => a.name === "description")?.value;
const descriptionHashSection = parsed.sections.find(a => a.name === "description_hash")?.value;
const paymentHashSection = parsed.sections.find(a => a.name === "payment_hash")?.value;
const ret = {
pr,
amount: amount,
expire: timestamp && expire ? timestamp + expire : undefined,
timestamp: timestamp,
description: descriptionSection as string | undefined,
descriptionHash: descriptionHashSection ? (typeof descriptionHashSection === "string" ? descriptionHashSection as string : bytesToHex(descriptionHashSection as Uint8Array)) : undefined,
paymentHash: paymentHashSection ? (typeof paymentHashSection === "string" ? paymentHashSection as string : bytesToHex(paymentHashSection as Uint8Array)) : undefined,
expired: false,
};
if (ret.expire) {
ret.expired = ret.expire < new Date().getTime() / 1000;
}
return ret;
} catch (e) {
console.error(e);
const expirySection = parsed.sections.find(a => a.name === "expiry");
const expire = expirySection ? Number(expirySection.value as number | string) : undefined;
const descriptionSection = parsed.sections.find(a => a.name === "description")?.value;
const descriptionHashSection = parsed.sections.find(a => a.name === "description_hash")?.value;
const paymentHashSection = parsed.sections.find(a => a.name === "payment_hash")?.value;
const ret = {
pr,
amount: amount,
expire: timestamp && expire ? timestamp + expire : undefined,
timestamp: timestamp,
description: descriptionSection as string | undefined,
descriptionHash: descriptionHashSection
? typeof descriptionHashSection === "string"
? (descriptionHashSection as string)
: bytesToHex(descriptionHashSection as Uint8Array)
: undefined,
paymentHash: paymentHashSection
? typeof paymentHashSection === "string"
? (paymentHashSection as string)
: bytesToHex(paymentHashSection as Uint8Array)
: undefined,
expired: false,
};
if (ret.expire) {
ret.expired = ret.expire < new Date().getTime() / 1000;
}
return ret;
} catch (e) {
console.error(e);
}
}

View File

@ -205,26 +205,26 @@ export class LNURL {
}
export interface LNURLService {
tag: string
nostrPubkey?: string
minSendable?: number
maxSendable?: number
metadata: string
callback: string
commentAllowed?: number
tag: string;
nostrPubkey?: string;
minSendable?: number;
maxSendable?: number;
metadata: string;
callback: string;
commentAllowed?: number;
}
export interface LNURLStatus {
status: "SUCCESS" | "ERROR"
reason?: string
status: "SUCCESS" | "ERROR";
reason?: string;
}
export interface LNURLInvoice extends LNURLStatus {
pr?: string
successAction?: LNURLSuccessAction
pr?: string;
successAction?: LNURLSuccessAction;
}
export interface LNURLSuccessAction {
description?: string
url?: string
description?: string;
url?: string;
}

View File

@ -71,7 +71,10 @@ export function countMembers(a: any) {
return ret;
}
export function equalProp(a: string | number | Array<string | number> | undefined, b: string | number | Array<string | number> | undefined) {
export function equalProp(
a: string | number | Array<string | number> | undefined,
b: string | number | Array<string | number> | undefined
) {
if ((a !== undefined && b === undefined) || (a === undefined && b !== undefined)) {
return false;
}
@ -130,7 +133,7 @@ export function appendDedupe<T>(a?: Array<T>, b?: Array<T>) {
export const sha256 = (str: string | Uint8Array): string => {
return utils.bytesToHex(sha2(str));
}
};
export function getPublicKey(privKey: string) {
return utils.bytesToHex(secp.schnorr.getPublicKey(privKey));

View File

@ -3,54 +3,49 @@
React hooks for @snort/system
Sample:
```js
import { useMemo } from "react"
import { useMemo } from "react";
import { useRequestBuilder, useUserProfile } from "@snort/system-react";
import { FlatNoteStore, NostrSystem, RequestBuilder, TaggedRawEvent } from "@snort/system"
import { FlatNoteStore, NostrSystem, RequestBuilder, TaggedRawEvent } from "@snort/system";
// singleton nostr system class
const System = new NostrSystem({});
// some bootstrap relays
[
"wss://relay.snort.social",
"wss://nos.lol"
].forEach(r => System.ConnectToRelay(r, { read: true, write: false }));
["wss://relay.snort.social", "wss://nos.lol"].forEach(r => System.ConnectToRelay(r, { read: true, write: false }));
export function Note({ ev }: { ev: TaggedRawEvent }) {
// get profile from cache or request a profile from relays
const profile = useUserProfile(System, ev.pubkey);
// get profile from cache or request a profile from relays
const profile = useUserProfile(System, ev.pubkey);
return <div>
Post by: {profile.name ?? profile.display_name}
<p>
{ev.content}
</p>
return (
<div>
Post by: {profile.name ?? profile.display_name}
<p>{ev.content}</p>
</div>
);
}
export function UserPosts(props: { pubkey: string }) {
const sub = useMemo(() => {
const rb = new RequestBuilder("get-posts");
rb.withFilter()
.authors([props.pubkey])
.kinds([1])
.limit(10);
const sub = useMemo(() => {
const rb = new RequestBuilder("get-posts");
rb.withFilter().authors([props.pubkey]).kinds([1]).limit(10);
return rb;
}, [props.pubkey]);
return rb;
}, [props.pubkey]);
const data = useRequestBuilder<FlatNoteStore>(System, FlatNoteStore, sub);
return (
<>
{data.data.map(a => <Note ev={a} />)}
</>
)
const data = useRequestBuilder < FlatNoteStore > (System, FlatNoteStore, sub);
return (
<>
{data.data.map(a => (
<Note ev={a} />
))}
</>
);
}
export function MyApp() {
return (
<UserPosts pubkey="63fe6318dc58583cfe16810f86dd09e18bfd76aabc24a0081ce2856f330504ed" />
)
return <UserPosts pubkey="63fe6318dc58583cfe16810f86dd09e18bfd76aabc24a0081ce2856f330504ed" />;
}
```
```

View File

@ -1,48 +1,42 @@
import { useMemo } from "react"
import { useMemo } from "react";
import { useRequestBuilder, useUserProfile } from "../src";
import { FlatNoteStore, NostrSystem, RequestBuilder, TaggedRawEvent } from "@snort/system"
import { FlatNoteStore, NostrSystem, RequestBuilder, TaggedRawEvent } from "@snort/system";
const System = new NostrSystem({});
// some bootstrap relays
[
"wss://relay.snort.social",
"wss://nos.lol"
].forEach(r => System.ConnectToRelay(r, { read: true, write: false }));
["wss://relay.snort.social", "wss://nos.lol"].forEach(r => System.ConnectToRelay(r, { read: true, write: false }));
export function Note({ ev }: { ev: TaggedRawEvent }) {
const profile = useUserProfile(System, ev.pubkey);
const profile = useUserProfile(System, ev.pubkey);
return <div>
Post by: {profile.name ?? profile.display_name}
<p>
{ev.content}
</p>
return (
<div>
Post by: {profile.name ?? profile.display_name}
<p>{ev.content}</p>
</div>
);
}
export function UserPosts(props: { pubkey: string }) {
const sub = useMemo(() => {
const rb = new RequestBuilder("get-posts");
rb.withFilter()
.authors([props.pubkey])
.kinds([1])
.limit(10);
const sub = useMemo(() => {
const rb = new RequestBuilder("get-posts");
rb.withFilter().authors([props.pubkey]).kinds([1]).limit(10);
return rb;
}, [props.pubkey]);
return rb;
}, [props.pubkey]);
const data = useRequestBuilder<FlatNoteStore>(System, FlatNoteStore, sub);
return (
<>
{data.data.map(a => <Note ev={a} />)}
</>
)
const data = useRequestBuilder<FlatNoteStore>(System, FlatNoteStore, sub);
return (
<>
{data.data.map(a => (
<Note ev={a} />
))}
</>
);
}
export function MyApp() {
return (
<UserPosts pubkey="63fe6318dc58583cfe16810f86dd09e18bfd76aabc24a0081ce2856f330504ed" />
)
}
return <UserPosts pubkey="63fe6318dc58583cfe16810f86dd09e18bfd76aabc24a0081ce2856f330504ed" />;
}

View File

@ -20,4 +20,4 @@
"@snort/system": "^1.0.16",
"@snort/shared": "^1.0.4"
}
}
}

View File

@ -1,3 +1,3 @@
export * from "./useRequestBuilder";
export * from "./useSystemState";
export * from "./useUserProfile";
export * from "./useUserProfile";

View File

@ -7,7 +7,7 @@ import { unwrap } from "@snort/shared";
*/
const useRequestBuilder = <TStore extends NoteStore, TSnapshot = ReturnType<TStore["getSnapshotData"]>>(
system: SystemInterface,
type: { new(): TStore },
type: { new (): TStore },
rb: RequestBuilder | null
) => {
const subscribe = (onChanged: () => void) => {
@ -37,4 +37,4 @@ const useRequestBuilder = <TStore extends NoteStore, TSnapshot = ReturnType<TSto
);
};
export { useRequestBuilder };
export { useRequestBuilder };

View File

@ -16,7 +16,7 @@ export function useUserProfile(system: NostrSystem, pubKey?: HexKey): MetadataCa
if (pubKey) {
system.ProfileLoader.UntrackMetadata(pubKey);
}
}
};
},
() => system.ProfileLoader.Cache.getFromCache(pubKey)
);

View File

@ -3,14 +3,15 @@
A collection of caching and querying techniquies used by https://snort.social to serve all content from the nostr protocol.
Simple example:
```js
import {
NostrSystem,
EventPublisher,
UserRelaysCache,
RequestBuilder,
FlatNoteStore,
StoreSnapshot
import {
NostrSystem,
EventPublisher,
UserRelaysCache,
RequestBuilder,
FlatNoteStore,
StoreSnapshot
} from "@snort/system"
// Provided in-memory / indexedDb cache for relays
@ -63,4 +64,4 @@ const System = new NostrSystem({
// these patterns will be managed in @snort/system-react to make it easier to use react or other UI frameworks
// release();
})();
```
```

View File

@ -1,4 +1,4 @@
import { NostrSystem, EventPublisher, UserRelaysCache, RequestBuilder, FlatNoteStore, StoreSnapshot } from "../src"
import { NostrSystem, EventPublisher, UserRelaysCache, RequestBuilder, FlatNoteStore, StoreSnapshot } from "../src";
// Provided in-memory / indexedDb cache for relays
// You can also implement your own with "RelayCache" interface
@ -6,47 +6,47 @@ const RelaysCache = new UserRelaysCache();
// example auth handler using NIP-07
const AuthHandler = async (challenge: string, relay: string) => {
const pub = await EventPublisher.nip7();
if (pub) {
return await pub.nip42Auth(challenge, relay);
}
}
const pub = await EventPublisher.nip7();
if (pub) {
return await pub.nip42Auth(challenge, relay);
}
};
// Singleton instance to store all connections and access query fetching system
const System = new NostrSystem({
relayCache: RelaysCache,
authHandler: AuthHandler // can be left undefined if you dont care about NIP-42 Auth
relayCache: RelaysCache,
authHandler: AuthHandler, // can be left undefined if you dont care about NIP-42 Auth
});
(async () => {
// connec to one "bootstrap" relay to pull profiles/relay lists from
// also used as a fallback relay when gossip model doesnt know which relays to pick, or "authors" are not provided in the request
await System.ConnectToRelay("wss://relay.snort.social", { read: true, write: false });
// connec to one "bootstrap" relay to pull profiles/relay lists from
// also used as a fallback relay when gossip model doesnt know which relays to pick, or "authors" are not provided in the request
await System.ConnectToRelay("wss://relay.snort.social", { read: true, write: false });
// ID should be unique to the use case, this is important as all data fetched from this ID will be merged into the same NoteStore
const rb = new RequestBuilder("get-posts");
rb.withFilter()
.authors(["63fe6318dc58583cfe16810f86dd09e18bfd76aabc24a0081ce2856f330504ed"]) // Kieran pubkey
.kinds([1])
.limit(10);
// ID should be unique to the use case, this is important as all data fetched from this ID will be merged into the same NoteStore
const rb = new RequestBuilder("get-posts");
rb.withFilter()
.authors(["63fe6318dc58583cfe16810f86dd09e18bfd76aabc24a0081ce2856f330504ed"]) // Kieran pubkey
.kinds([1])
.limit(10);
const q = System.Query<FlatNoteStore>(FlatNoteStore, rb);
// basic usage using "onEvent", fired for every event added to the store
q.onEvent = (sub, e) => {
console.debug(sub, e);
}
const q = System.Query<FlatNoteStore>(FlatNoteStore, rb);
// basic usage using "onEvent", fired for every event added to the store
q.onEvent = (sub, e) => {
console.debug(sub, e);
};
// Hookable type using change notification, limited to every 500ms
const release = q.feed.hook(() => {
// since we use the FlatNoteStore we expect NostrEvent[]
// other stores provide different data, like a single event instead of an array (latest version)
const state = q.feed.snapshot as StoreSnapshot<ReturnType<FlatNoteStore["getSnapshotData"]>>;
// Hookable type using change notification, limited to every 500ms
const release = q.feed.hook(() => {
// since we use the FlatNoteStore we expect NostrEvent[]
// other stores provide different data, like a single event instead of an array (latest version)
const state = q.feed.snapshot as StoreSnapshot<ReturnType<FlatNoteStore["getSnapshotData"]>>;
// do something with snapshot of store
console.log(`We have ${state.data.length} events now!`)
});
// do something with snapshot of store
console.log(`We have ${state.data.length} events now!`);
});
// release the hook when its not needed anymore
// these patterns will be managed in @snort/system-react to make it easier to use react or other UI frameworks
// release();
})();
// release the hook when its not needed anymore
// these patterns will be managed in @snort/system-react to make it easier to use react or other UI frameworks
// release();
})();

View File

@ -33,4 +33,4 @@
"dexie": "^3.2.4",
"uuid": "^9.0.0"
}
}
}

View File

@ -6,37 +6,37 @@ const NAME = "snort-system";
const VERSION = 2;
const STORES = {
users: "++pubkey, name, display_name, picture, nip05, npub",
relayMetrics: "++addr",
userRelays: "++pubkey",
events: "++id, pubkey, created_at"
users: "++pubkey, name, display_name, picture, nip05, npub",
relayMetrics: "++addr",
userRelays: "++pubkey",
events: "++id, pubkey, created_at",
};
export class SnortSystemDb extends Dexie {
ready = false;
users!: Table<MetadataCache>;
relayMetrics!: Table<RelayMetrics>;
userRelays!: Table<UsersRelays>;
events!: Table<NostrEvent>;
dms!: Table<NostrEvent>;
ready = false;
users!: Table<MetadataCache>;
relayMetrics!: Table<RelayMetrics>;
userRelays!: Table<UsersRelays>;
events!: Table<NostrEvent>;
dms!: Table<NostrEvent>;
constructor() {
super(NAME);
this.version(VERSION).stores(STORES);
}
constructor() {
super(NAME);
this.version(VERSION).stores(STORES);
}
isAvailable() {
if ("indexedDB" in window) {
return new Promise<boolean>(resolve => {
const req = window.indexedDB.open("dummy", 1);
req.onsuccess = () => {
resolve(true);
};
req.onerror = () => {
resolve(false);
};
});
}
return Promise.resolve(false);
isAvailable() {
if ("indexedDB" in window) {
return new Promise<boolean>(resolve => {
const req = window.indexedDB.open("dummy", 1);
req.onsuccess = () => {
resolve(true);
};
req.onerror = () => {
resolve(false);
};
});
}
}
return Promise.resolve(false);
}
}

View File

@ -70,4 +70,4 @@ export function mapEventToProfile(ev: NostrEvent) {
} catch (e) {
console.error("Failed to parse JSON", ev, e);
}
}
}

View File

@ -2,21 +2,21 @@ import { db, RelayMetrics } from ".";
import { FeedCache } from "@snort/shared";
export class RelayMetricCache extends FeedCache<RelayMetrics> {
constructor() {
super("RelayMetrics", db.relayMetrics);
}
constructor() {
super("RelayMetrics", db.relayMetrics);
}
key(of: RelayMetrics): string {
return of.addr;
}
key(of: RelayMetrics): string {
return of.addr;
}
override async preload(): Promise<void> {
await super.preload();
// load everything
await this.buffer([...this.onTable]);
}
override async preload(): Promise<void> {
await super.preload();
// load everything
await this.buffer([...this.onTable]);
}
takeSnapshot(): Array<RelayMetrics> {
return [...this.cache.values()];
}
}
takeSnapshot(): Array<RelayMetrics> {
return [...this.cache.values()];
}
}

View File

@ -145,4 +145,4 @@ export class UserProfileCache extends FeedCache<MetadataCache> {
}
}
}
}
}

View File

@ -26,4 +26,4 @@ export class UserRelaysCache extends FeedCache<UsersRelays> {
takeSnapshot(): Array<UsersRelays> {
return [...this.cache.values()];
}
}
}

View File

@ -421,7 +421,12 @@ export class Connection extends ExternalStore<ConnectionStateSnapshot> {
const lastActivity = unixNowMs() - this.#activity;
if (lastActivity > 30_000 && !this.IsClosed) {
if (this.ActiveRequests.size > 0) {
this.#log("%s Inactive connection has %d active requests! %O", this.Address, this.ActiveRequests.size, this.ActiveRequests);
this.#log(
"%s Inactive connection has %d active requests! %O",
this.Address,
this.ActiveRequests.size,
this.ActiveRequests
);
} else {
this.Close();
}

View File

@ -9,7 +9,6 @@ export const DefaultConnectTimeout = 2000;
// eslint-disable-next-line no-useless-escape
export const HashtagRegex = /(#[^\s!@#$%^&*()=+.\/,\[{\]};:'"?><]+)/g;
/**
* How long profile cache should be considered valid for
*/

View File

@ -6,17 +6,17 @@ import { EventKind, HexKey, NostrEvent } from ".";
import { Nip4WebCryptoEncryptor } from "./impl/nip4";
export interface Tag {
key: string
value?: string
relay?: string
marker?: string // NIP-10
key: string;
value?: string;
relay?: string;
marker?: string; // NIP-10
}
export interface Thread {
root?: Tag
replyTo?: Tag
mentions: Array<Tag>
pubKeys: Array<HexKey>
root?: Tag;
replyTo?: Tag;
mentions: Array<Tag>;
pubKeys: Array<HexKey>;
}
export abstract class EventExt {
@ -41,7 +41,7 @@ export abstract class EventExt {
const sig = secp.schnorr.sign(e.id, key);
e.sig = utils.bytesToHex(sig);
if (!(secp.schnorr.verify(e.sig, e.id, e.pubkey))) {
if (!secp.schnorr.verify(e.sig, e.id, e.pubkey)) {
throw new Error("Signing failed");
}
}
@ -84,12 +84,12 @@ export abstract class EventExt {
static parseTag(tag: Array<string>) {
if (tag.length < 1) {
throw new Error("Invalid tag, must have more than 2 items")
throw new Error("Invalid tag, must have more than 2 items");
}
const ret = {
key: tag[0],
value: tag[1]
value: tag[1],
} as Tag;
switch (ret.key) {
case "e": {

View File

@ -95,7 +95,7 @@ export class EventPublisher {
* Create an EventPublisher for a private key
*/
static privateKey(privateKey: string) {
const signer = new PrivateKeySigner(privateKey)
const signer = new PrivateKeySigner(privateKey);
return new EventPublisher(signer, signer.getPubKey());
}

View File

@ -1,6 +1,7 @@
import { ReqFilter, UsersRelays } from ".";
import { unwrap } from "@snort/shared";
import { dedupe, unwrap } from "@snort/shared";
import debug from "debug";
import { FlatReqFilter } from "request-expander";
const PickNRelays = 2;
@ -9,6 +10,11 @@ export interface RelayTaggedFilter {
filter: ReqFilter;
}
export interface RelayTaggedFlatFilters {
relay: string;
filters: Array<FlatReqFilter>;
}
export interface RelayTaggedFilters {
relay: string;
filters: Array<ReqFilter>;
@ -43,11 +49,10 @@ export function splitAllByWriteRelays(cache: RelayCache, filters: Array<ReqFilte
/**
* Split filters by authors
* @param filter
* @returns
*/
export function splitByWriteRelays(cache: RelayCache, filter: ReqFilter): Array<RelayTaggedFilter> {
if ((filter.authors?.length ?? 0) === 0) {
const authors = filter.authors;
if ((authors?.length ?? 0) === 0) {
return [
{
relay: "",
@ -56,10 +61,13 @@ export function splitByWriteRelays(cache: RelayCache, filter: ReqFilter): Array<
];
}
const allRelays = unwrap(filter.authors).map(a => {
const allRelays = unwrap(authors).map(a => {
return {
key: a,
relays: cache.getFromCache(a)?.relays?.filter(a => a.settings.write).sort(() => Math.random() < 0.5 ? 1 : -1),
relays: cache
.getFromCache(a)
?.relays?.filter(a => a.settings.write)
.sort(() => (Math.random() < 0.5 ? 1 : -1)),
};
});
@ -83,7 +91,7 @@ export function splitByWriteRelays(cache: RelayCache, filter: ReqFilter): Array<
// <key, relay[]> - pick n top relays
// <relay, key[]> - map keys per relay (for subscription filter)
const userPickedRelays = unwrap(filter.authors).map(k => {
const userPickedRelays = unwrap(authors).map(k => {
// pick top 3 relays for this key
const relaysForKey = topRelays
.filter(([, v]) => v.has(k))
@ -116,3 +124,98 @@ export function splitByWriteRelays(cache: RelayCache, filter: ReqFilter): Array<
debug("GOSSIP")("Picked %o", picked);
return picked;
}
/**
* Split filters by author
*/
export function splitFlatByWriteRelays(cache: RelayCache, input: Array<FlatReqFilter>): Array<RelayTaggedFlatFilters> {
const authors = input.filter(a => a.authors).map(a => unwrap(a.authors));
if (authors.length === 0) {
return [
{
relay: "",
filters: input,
},
];
}
const topRelays = pickTopRelays(cache, authors, PickNRelays);
const pickedRelays = dedupe(topRelays.flatMap(a => a.relays));
const picked = pickedRelays.map(a => {
const keysOnPickedRelay = new Set(userPickedRelays.filter(b => b.relaysForKey.includes(a)).map(b => b.k));
return {
relay: a,
filter: {
...filter,
authors: [...keysOnPickedRelay],
},
} as RelayTaggedFilter;
});
if (missing.length > 0) {
picked.push({
relay: "",
filter: {
...filter,
authors: missing.map(a => a.key),
},
});
}
debug("GOSSIP")("Picked %o", picked);
return picked;
}
/**
* Pick most popular relays for each authors
*/
function pickTopRelays(cache: RelayCache, authors: Array<string>, n: number) {
// map of pubkey -> [write relays]
const allRelays = authors.map(a => {
return {
key: a,
relays: cache
.getFromCache(a)
?.relays?.filter(a => a.settings.write)
.sort(() => (Math.random() < 0.5 ? 1 : -1)),
};
});
const missing = allRelays.filter(a => a.relays === undefined || a.relays.length === 0);
const hasRelays = allRelays.filter(a => a.relays !== undefined && a.relays.length > 0);
// map of relay -> [pubkeys]
const relayUserMap = hasRelays.reduce((acc, v) => {
for (const r of unwrap(v.relays)) {
if (!acc.has(r.url)) {
acc.set(r.url, new Set([v.key]));
} else {
unwrap(acc.get(r.url)).add(v.key);
}
}
return acc;
}, new Map<string, Set<string>>());
// selection algo will just pick relays with the most users
const topRelays = [...relayUserMap.entries()].sort(([, v], [, v1]) => v1.size - v.size);