Start converting components again

This commit is contained in:
Jonathan Staab 2023-07-21 06:32:58 -07:00
parent 479f9d6681
commit a7cae60ca9
39 changed files with 1860 additions and 1933 deletions

View File

@ -1,7 +1,8 @@
# Current # Current
- [ ] Refactor - [ ] Refactor
- [ ] Fix load new notes button - [ ] Move pubkey loader to utils
- [ ] Have meta intercept subscribe/publish rather than listen to specific events
- [ ] Speed up note detail - [ ] Speed up note detail
- [ ] Fix feed control state - [ ] Fix feed control state
- [ ] Remove external dependencies from engine, open source it? - [ ] Remove external dependencies from engine, open source it?

View File

@ -1,5 +1,5 @@
import {identity} from "ramda" import {identity} from "ramda"
import {createDefaultEngine} from "src/engine" import {Engine} from "src/engine"
export const DUFFLEPUD_URL = import.meta.env.VITE_DUFFLEPUD_URL export const DUFFLEPUD_URL = import.meta.env.VITE_DUFFLEPUD_URL
@ -28,7 +28,7 @@ export const DEFAULT_FOLLOWS = (import.meta.env.VITE_DEFAULT_FOLLOWS || "")
export const ENABLE_ZAPS = JSON.parse(import.meta.env.VITE_ENABLE_ZAPS) export const ENABLE_ZAPS = JSON.parse(import.meta.env.VITE_ENABLE_ZAPS)
const engine = createDefaultEngine({ const engine = new Engine({
DUFFLEPUD_URL, DUFFLEPUD_URL,
MULTIPLEXTR_URL, MULTIPLEXTR_URL,
FORCE_RELAYS, FORCE_RELAYS,
@ -39,40 +39,40 @@ const engine = createDefaultEngine({
}) })
export default engine export default engine
export const Alerts = engine.Alerts export const Alerts = engine.components.Alerts
export const Builder = engine.Builder export const Builder = engine.components.Builder
export const Content = engine.Content export const Content = engine.components.Content
export const Crypt = engine.Crypt export const Crypt = engine.components.Crypt
export const Directory = engine.Directory export const Directory = engine.components.Directory
export const Events = engine.Events export const Events = engine.components.Events
export const Keys = engine.Keys export const Keys = engine.components.Keys
export const Meta = engine.Meta export const Meta = engine.components.Meta
export const Network = engine.Network export const Network = engine.components.Network
export const Nip02 = engine.Nip02 export const Nip02 = engine.components.Nip02
export const Nip04 = engine.Nip04 export const Nip04 = engine.components.Nip04
export const Nip05 = engine.Nip05 export const Nip05 = engine.components.Nip05
export const Nip28 = engine.Nip28 export const Nip28 = engine.components.Nip28
export const Nip57 = engine.Nip57 export const Nip57 = engine.components.Nip57
export const Nip65 = engine.Nip65 export const Nip65 = engine.components.Nip65
export const Outbox = engine.Outbox export const Outbox = engine.components.Outbox
export const PubkeyLoader = engine.PubkeyLoader export const PubkeyLoader = engine.components.PubkeyLoader
export const Storage = engine.Storage export const Storage = engine.components.Storage
export const User = engine.User export const User = engine.components.User
export const alerts = engine.Alerts export const alerts = engine.components.Alerts
export const builder = engine.Builder export const builder = engine.components.Builder
export const content = engine.Content export const content = engine.components.Content
export const directory = engine.Directory export const directory = engine.components.Directory
export const events = engine.Events export const events = engine.components.Events
export const keys = engine.Keys export const keys = engine.components.Keys
export const meta = engine.Meta export const meta = engine.components.Meta
export const network = engine.Network export const network = engine.components.Network
export const nip02 = engine.Nip02 export const nip02 = engine.components.Nip02
export const nip04 = engine.Nip04 export const nip04 = engine.components.Nip04
export const nip05 = engine.Nip05 export const nip05 = engine.components.Nip05
export const nip28 = engine.Nip28 export const nip28 = engine.components.Nip28
export const nip57 = engine.Nip57 export const nip57 = engine.components.Nip57
export const nip65 = engine.Nip65 export const nip65 = engine.components.Nip65
export const outbox = engine.Outbox export const outbox = engine.components.Outbox
export const pubkeyLoader = engine.PubkeyLoader export const pubkeyLoader = engine.components.PubkeyLoader
export const storage = engine.Storage export const storage = engine.components.Storage
export const user = engine.User export const user = engine.components.User

View File

@ -4,6 +4,8 @@
import {stringToHue, hsl} from "src/util/misc" import {stringToHue, hsl} from "src/util/misc"
import ImageCircle from "src/partials/ImageCircle.svelte" import ImageCircle from "src/partials/ImageCircle.svelte"
import LogoSvg from "src/partials/LogoSvg.svelte" import LogoSvg from "src/partials/LogoSvg.svelte"
import type {Readable} from 'src/engine/util/store'
import type {Profile} from 'src/engine/types'
import {directory} from "src/app/engine" import {directory} from "src/app/engine"
export let pubkey export let pubkey
@ -12,7 +14,7 @@
const hue = stringToHue(pubkey) const hue = stringToHue(pubkey)
const primary = hsl(hue, {lightness: 80}) const primary = hsl(hue, {lightness: 80})
const secondary = hsl(hue, {saturation: 30, lightness: 30}) const secondary = hsl(hue, {saturation: 30, lightness: 30})
const profile = directory.profiles.key(pubkey).derived(defaultTo({pubkey})) const profile = directory.profiles.key(pubkey).derived(defaultTo({pubkey})) as Readable<Profile>
</script> </script>
{#if $profile.picture} {#if $profile.picture}

View File

@ -10,6 +10,7 @@ import {warn} from "src/util/logger"
import {now} from "src/util/misc" import {now} from "src/util/misc"
import {userKinds, noteKinds} from "src/util/nostr" import {userKinds, noteKinds} from "src/util/nostr"
import {modal, toast} from "src/partials/state" import {modal, toast} from "src/partials/state"
import type {Event} from 'src/engine/types'
import { import {
FORCE_RELAYS, FORCE_RELAYS,
DEFAULT_FOLLOWS, DEFAULT_FOLLOWS,
@ -26,10 +27,11 @@ import {
// Routing // Routing
export const routes = { export const routes = {
person: (pubkey, tab = "notes") => `/people/${nip19.npubEncode(pubkey)}/${tab}`, person: (pubkey: string, tab = "notes") => `/people/${nip19.npubEncode(pubkey)}/${tab}`,
} }
export const addToList = (type, value) => modal.push({type: "list/select", item: {type, value}}) export const addToList = (type: string, value: string) =>
modal.push({type: "list/select", item: {type, value}})
// Menu // Menu
@ -38,7 +40,7 @@ export const menuIsOpen = writable(false)
// Redact long strings, especially hex and bech32 keys which are 64 and 63 // Redact long strings, especially hex and bech32 keys which are 64 and 63
// characters long, respectively. Put the threshold a little lower in case // characters long, respectively. Put the threshold a little lower in case
// someone accidentally enters a key with the last few digits missing // someone accidentally enters a key with the last few digits missing
const redactErrorInfo = info => const redactErrorInfo = (info: any) =>
JSON.parse(JSON.stringify(info || null).replace(/\w{60}\w+/g, "[REDACTED]")) JSON.parse(JSON.stringify(info || null).replace(/\w{60}\w+/g, "[REDACTED]"))
// Wait for bugsnag to be started in main // Wait for bugsnag to be started in main
@ -67,7 +69,7 @@ setTimeout(() => {
const session = Math.random().toString().slice(2) const session = Math.random().toString().slice(2)
export const logUsage = async name => { export const logUsage = async (name: string) => {
// Hash the user's pubkey so we can identify unique users without knowing // Hash the user's pubkey so we can identify unique users without knowing
// anything about them // anything about them
const pubkey = Keys.pubkey.get() const pubkey = Keys.pubkey.get()
@ -101,7 +103,7 @@ setInterval(() => {
if (stats.last_activity < now() - 60) { if (stats.last_activity < now() - 60) {
Network.pool.remove(url) Network.pool.remove(url)
} else if (userRelays.has(url) && first(Meta.getRelayQuality(url)) < 0.3) { } else if (userRelays.has(url) && Meta.getRelayQuality(url)[0] < 0.3) {
$slowConnections.push(url) $slowConnections.push(url)
} }
} }
@ -117,9 +119,9 @@ export const listenForNotifications = async () => {
const channelIds = pluck("id", Nip28.channels.get().filter(whereEq({joined: true}))) const channelIds = pluck("id", Nip28.channels.get().filter(whereEq({joined: true})))
const eventIds = doPipe(Events.cache.get(), [ const eventIds: string[] = doPipe(Events.cache.get(), [
filter(e => noteKinds.includes(e.kind)), filter((e: Event) => noteKinds.includes(e.kind)),
sortBy(e => -e.created_at), sortBy((e: Event) => -e.created_at),
slice(0, 256), slice(0, 256),
pluck("id"), pluck("id"),
]) ])
@ -166,7 +168,7 @@ export const loadAppData = async () => {
listenForNotifications() listenForNotifications()
} }
export const login = async (method, key) => { export const login = async (method: string, key: string) => {
Keys.login(method, key) Keys.login(method, key)
if (FORCE_RELAYS.length > 0) { if (FORCE_RELAYS.length > 0) {
@ -188,7 +190,7 @@ export const login = async (method, key) => {
} }
} }
export const publishWithToast = (event, relays) => export const publishWithToast = (event: Event, relays: string[]) =>
Outbox.publish(event, relays, ({completed, succeeded, failed, timeouts, pending}) => { Outbox.publish(event, relays, ({completed, succeeded, failed, timeouts, pending}) => {
let message = `Published to ${succeeded.size}/${relays.length} relays` let message = `Published to ${succeeded.size}/${relays.length} relays`
@ -215,8 +217,8 @@ export const publishWithToast = (event, relays) =>
// Feeds // Feeds
export const compileFilter = (filter: DynamicFilter): Filter => { export const compileFilter = (filter: DynamicFilter): Filter => {
const getAuthors = pubkeys => const getAuthors = (pubkeys: string[]) =>
shuffle(pubkeys.length > 0 ? pubkeys : DEFAULT_FOLLOWS).slice(0, 256) shuffle(pubkeys.length > 0 ? pubkeys : DEFAULT_FOLLOWS as string[]).slice(0, 256)
if (filter.authors === "global") { if (filter.authors === "global") {
filter = omit(["authors"], filter) filter = omit(["authors"], filter)

53
src/engine/Engine.ts Normal file
View File

@ -0,0 +1,53 @@
import type {Env} from "./types"
import {Alerts} from "./components/Alerts"
import {Builder} from "./components/Builder"
import {Content} from "./components/Content"
import {Crypt} from "./components/Crypt"
import {Directory} from "./components/Directory"
import {Events} from "./components/Events"
import {Keys} from "./components/Keys"
import {Meta} from "./components/Meta"
import {Network} from "./components/Network"
import {Nip02} from "./components/Nip02"
import {Nip04} from "./components/Nip04"
import {Nip05} from "./components/Nip05"
import {Nip28} from "./components/Nip28"
import {Nip57} from "./components/Nip57"
import {Nip65} from "./components/Nip65"
import {Outbox} from "./components/Outbox"
import {PubkeyLoader} from "./components/PubkeyLoader"
import {Storage} from "./components/Storage"
import {User} from "./components/User"
export class Engine {
Env: Env
components = {
Alerts: new Alerts(),
Builder: new Builder(),
Content: new Content(),
Crypt: new Crypt(),
Directory: new Directory(),
Events: new Events(),
Keys: new Keys(),
Meta: new Meta(),
Network: new Network(),
Nip02: new Nip02(),
Nip04: new Nip04(),
Nip05: new Nip05(),
Nip28: new Nip28(),
Nip57: new Nip57(),
Nip65: new Nip65(),
Outbox: new Outbox(),
PubkeyLoader: new PubkeyLoader(),
Storage: new Storage(),
User: new User(),
}
constructor(Env: Env) {
this.Env = Env
for (const component of Object.values(this.components)) {
component.initialize?.(this)
}
}
}

View File

@ -1,45 +1,31 @@
import {reduce} from "ramda" import {reduce} from "ramda"
import {Tags, noteKinds, isLike, findReplyId, findRootId} from "src/util/nostr" import {Tags, noteKinds, isLike, findReplyId, findRootId} from "src/util/nostr"
import {collection, writable, derived} from "../util/store" import {collection, writable, derived} from "src/engine/util/store"
import type {Readable} from "src/engine/util/store"
import type {Engine} from "src/engine/Engine"
import type {Event} from "src/engine/types" import type {Event} from "src/engine/types"
export class Alerts { export class Alerts {
static contributeState() { events = collection<Event>("id")
const events = collection<Event>("id") lastChecked = writable(0)
latestNotification = this.events.derived(reduce((n, e) => Math.max(n, e.created_at), 0))
hasNewNotfications = derived([this.lastChecked, this.latestNotification], ([c, n]) => n > c)
const lastChecked = writable(0) initialize(engine: Engine) {
const {Alerts, Events, Keys, User} = engine.components
const latestNotification = events.derived(reduce((n, e) => Math.max(n, e.created_at), 0)) const isMention = (e: Event) => Tags.from(e).pubkeys().includes(Keys.pubkey.get())
const hasNewNotfications = derived( const isUserEvent = (id: string) => Events.cache.key(id).get()?.pubkey === Keys.pubkey.get()
[lastChecked, latestNotification],
([$lastChecked, $latestNotification]) => $latestNotification > $lastChecked
)
return {events, lastChecked, latestNotification, hasNewNotfications} const isDescendant = (e: Event) => isUserEvent(findRootId(e))
}
static initialize({Alerts, Events, Keys, User}) { const isReply = (e: Event) => isUserEvent(findReplyId(e))
const isMention = e => Tags.from(e).pubkeys().includes(Keys.pubkey.get())
const isUserEvent = id => Events.cache.key(id).get()?.pubkey === Keys.pubkey.get() const handleNotification = (e: Event) => {
const isDescendant = e => isUserEvent(findRootId(e))
const isReply = e => isUserEvent(findReplyId(e))
const handleNotification = condition => e => {
const pubkey = Keys.pubkey.get() const pubkey = Keys.pubkey.get()
if (!pubkey || e.pubkey === pubkey) { if (!pubkey || e.pubkey === pubkey || User.isMuted(e)) {
return
}
if (!condition(e)) {
return
}
if (User.isMuted(e)) {
return return
} }
@ -47,17 +33,23 @@ export class Alerts {
} }
noteKinds.forEach(kind => { noteKinds.forEach(kind => {
Events.addHandler( Events.addHandler(kind, (e: Event) => {
kind, if (isMention(e) || isReply(e) || isDescendant(e)) {
handleNotification(e => isMention(e) || isReply(e) || isDescendant(e)) handleNotification(e)
) }
})
}) })
Events.addHandler( Events.addHandler(7, (e: Event) => {
7, if (isLike(e.content) && isReply(e)) {
handleNotification(e => isLike(e.content) && isReply(e)) handleNotification(e)
) }
})
Events.addHandler(9735, handleNotification(isReply)) Events.addHandler(9735, (e: Event) => {
if (isReply(e)) {
handleNotification(e)
}
})
} }
} }

View File

@ -2,180 +2,178 @@ import {last, pick, uniqBy} from "ramda"
import {doPipe, first} from "hurdak" import {doPipe, first} from "hurdak"
import {Tags, channelAttrs, findRoot, findReply} from "src/util/nostr" import {Tags, channelAttrs, findRoot, findReply} from "src/util/nostr"
import {parseContent} from "src/util/notes" import {parseContent} from "src/util/notes"
import type {Event, RelayPolicy, RelayPolicyEntry} from "src/engine/types"
import type {Engine} from "src/engine/Engine"
const uniqTags = uniqBy(t => t.slice(0, 2).join(":")) type EventOpts = {
content?: string
tags?: string[][]
tagClient?: boolean
}
const buildEvent = (kind, {content = "", tags = [], tagClient = true}) => { const uniqTags = uniqBy((t: string[]) => t.slice(0, 2).join(":"))
const buildEvent = (kind: number, {content = "", tags = [], tagClient = true}: EventOpts) => {
if (tagClient) { if (tagClient) {
tags = tags.filter(t => t[0] !== "client").concat([["client", "coracle"]]) tags = tags.filter((t: string[]) => t[0] !== "client").concat([["client", "coracle"]])
} }
return {kind, content, tags} return {kind, content, tags}
} }
export class Builder { export class Builder {
static contributeActions({Nip65, Directory}) { engine: Engine
const getEventHint = event => first(Nip65.getEventHints(1, event)) || ""
const getPubkeyHint = pubkey => first(Nip65.getPubkeyHints(1, pubkey)) || "" getEventHint = (event: Event) => first(this.engine.components.Nip65.getEventHints(1, event)) || ""
const getPubkeyPetname = pubkey => { getPubkeyHint = (pubkey: string): string =>
const profile = Directory.getProfile(pubkey) first(this.engine.components.Nip65.getPubkeyHints(1, pubkey)) || ""
return profile ? Directory.displayProfile(profile) : "" getPubkeyPetname = (pubkey: string) => {
} const profile = this.engine.components.Directory.getProfile(pubkey)
const mention = pubkey => { return profile ? this.engine.components.Directory.displayProfile(profile) : ""
const hint = getPubkeyHint(pubkey) }
const petname = getPubkeyPetname(pubkey)
return ["p", pubkey, hint, petname] mention = (pubkey: string): string[] => {
} const hint = this.getPubkeyHint(pubkey)
const petname = this.getPubkeyPetname(pubkey)
const tagsFromContent = (content, tags) => { return ["p", pubkey, hint, petname]
const seen = new Set(Tags.wrap(tags).values().all()) }
for (const {type, value} of parseContent({content})) { tagsFromContent = (content: string, tags: string[][]) => {
if (type === "topic") { const seen = new Set(Tags.wrap(tags).values().all())
tags = tags.concat([["t", value]])
seen.add(value)
}
if (type.match(/nostr:(note|nevent)/) && !seen.has(value.id)) { for (const {type, value} of parseContent({content, tags: []})) {
tags = tags.concat([["e", value.id, value.relays?.[0] || "", "mention"]]) if (type === "topic") {
seen.add(value.id) tags = tags.concat([["t", value]])
} seen.add(value)
if (type.match(/nostr:(nprofile|npub)/) && !seen.has(value.pubkey)) {
tags = tags.concat([mention(value.pubkey)])
seen.add(value.pubkey)
}
} }
return tags if (type.match(/nostr:(note|nevent)/) && !seen.has(value.id)) {
} tags = tags.concat([["e", value.id, value.relays?.[0] || "", "mention"]])
seen.add(value.id)
const getReplyTags = (n, inherit = false) => {
const extra = inherit
? Tags.from(n)
.type("e")
.reject(t => last(t) === "mention")
.all()
.map(t => t.slice(0, 3))
: []
const eHint = getEventHint(n)
const reply = ["e", n.id, eHint, "reply"]
const root = doPipe(findRoot(n) || findReply(n) || reply, [
t => (t.length < 3 ? t.concat(eHint) : t),
t => t.slice(0, 3).concat("root"),
])
return [mention(n.pubkey), root, ...extra, reply]
}
const authenticate = (url, challenge) =>
buildEvent(22242, {
tags: [
["challenge", challenge],
["relay", url],
],
})
const setProfile = profile => buildEvent(0, {content: JSON.stringify(profile)})
const setRelays = relays =>
buildEvent(10002, {
tags: relays.map(r => {
const t = ["r", r.url]
if (!r.write) {
t.push("read")
}
return t
}),
})
const setAppData = (d, content = "") => buildEvent(30078, {content, tags: [["d", d]]})
const setPetnames = petnames => buildEvent(3, {tags: petnames})
const setMutes = mutes => buildEvent(10000, {tags: mutes})
const createList = list => buildEvent(30001, {tags: list})
const createChannel = channel =>
buildEvent(40, {content: JSON.stringify(pick(channelAttrs, channel))})
const updateChannel = ({id, ...channel}) =>
buildEvent(41, {
content: JSON.stringify(pick(channelAttrs, channel)),
tags: [["e", id]],
})
const createChatMessage = (channelId, content, url) =>
buildEvent(42, {content, tags: [["e", channelId, url, "root"]]})
const createDirectMessage = (pubkey, content) => buildEvent(4, {content, tags: [["p", pubkey]]})
const createNote = (content, tags = []) =>
buildEvent(1, {content, tags: uniqTags(tagsFromContent(content, tags))})
const createReaction = (note, content) => buildEvent(7, {content, tags: getReplyTags(note)})
const createReply = (note, content, tags = []) =>
buildEvent(1, {
content,
tags: doPipe(tags, [
tags => tags.concat(getReplyTags(note, true)),
tags => tagsFromContent(content, tags),
uniqTags,
]),
})
const requestZap = (relays, content, pubkey, eventId, amount, lnurl) => {
const tags = [
["relays", ...relays],
["amount", amount.toString()],
["lnurl", lnurl],
["p", pubkey],
]
if (eventId) {
tags.push(["e", eventId])
} }
return buildEvent(9734, {content, tags, tagClient: false}) if (type.match(/nostr:(nprofile|npub)/) && !seen.has(value.pubkey)) {
tags = tags.concat([this.mention(value.pubkey)])
seen.add(value.pubkey)
}
} }
const deleteEvents = ids => buildEvent(5, {tags: ids.map(id => ["e", id])}) return tags
}
const deleteNaddrs = naddrs => buildEvent(5, {tags: naddrs.map(naddr => ["a", naddr])}) getReplyTags = (n: Event, inherit = false) => {
const extra = inherit
? Tags.from(n)
.type("e")
.reject(t => last(t) === "mention")
.all()
.map(t => t.slice(0, 3))
: []
const eHint = this.getEventHint(n)
const reply = ["e", n.id, eHint, "reply"]
const root = doPipe(findRoot(n) || findReply(n) || reply, [
t => (t.length < 3 ? t.concat(eHint) : t),
t => t.slice(0, 3).concat("root"),
])
const createLabel = payload => buildEvent(1985, payload) return [this.mention(n.pubkey), root, ...extra, reply]
}
return { authenticate = (url: string, challenge: string) =>
mention, buildEvent(22242, {
tagsFromContent, tags: [
getReplyTags, ["challenge", challenge],
authenticate, ["relay", url],
setProfile, ],
setRelays, })
setAppData,
setPetnames, setProfile = (profile: Record<string, any>) => buildEvent(0, {content: JSON.stringify(profile)})
setMutes,
createList, setRelays = (relays: RelayPolicyEntry[]) =>
createChannel, buildEvent(10002, {
updateChannel, tags: relays.map(r => {
createChatMessage, const t = ["r", r.url]
createDirectMessage,
createNote, if (!r.write) {
createReaction, t.push("read")
createReply, }
requestZap,
deleteEvents, return t
deleteNaddrs, }),
createLabel, })
setAppData = (d: string, content = "") => buildEvent(30078, {content, tags: [["d", d]]})
setPetnames = (petnames: string[][]) => buildEvent(3, {tags: petnames})
setMutes = (mutes: string[][]) => buildEvent(10000, {tags: mutes})
createList = (list: string[][]) => buildEvent(30001, {tags: list})
createChannel = (channel: Record<string, any>) =>
buildEvent(40, {content: JSON.stringify(pick(channelAttrs, channel))})
updateChannel = ({id, ...channel}: Record<string, any>) =>
buildEvent(41, {
content: JSON.stringify(pick(channelAttrs, channel)),
tags: [["e", id]],
})
createChatMessage = (channelId: string, content: string, url: string) =>
buildEvent(42, {content, tags: [["e", channelId, url, "root"]]})
createDirectMessage = (pubkey: string, content: string) =>
buildEvent(4, {content, tags: [["p", pubkey]]})
createNote = (content: string, tags: string[][] = []) =>
buildEvent(1, {content, tags: uniqTags(this.tagsFromContent(content, tags))})
createReaction = (note: Event, content: string) =>
buildEvent(7, {content, tags: this.getReplyTags(note)})
createReply = (note: Event, content: string, tags: string[][] = []) =>
buildEvent(1, {
content,
tags: doPipe(tags, [
tags => tags.concat(this.getReplyTags(note, true)),
tags => this.tagsFromContent(content, tags),
uniqTags,
]),
})
requestZap = (
relays: string[],
content: string,
pubkey: string,
eventId: string,
amount: number,
lnurl: string
) => {
const tags = [
["relays", ...relays],
["amount", amount.toString()],
["lnurl", lnurl],
["p", pubkey],
]
if (eventId) {
tags.push(["e", eventId])
} }
return buildEvent(9734, {content, tags, tagClient: false})
}
deleteEvents = (ids: string[]) => buildEvent(5, {tags: ids.map(id => ["e", id])})
deleteNaddrs = (naddrs: string[]) => buildEvent(5, {tags: naddrs.map(naddr => ["a", naddr])})
createLabel = (payload: {content: string; tags: string[][]}) => buildEvent(1985, payload)
initialize(engine: Engine) {
this.engine = engine
} }
} }

View File

@ -3,29 +3,22 @@ import {nth, inc} from "ramda"
import {fuzzy} from "src/util/misc" import {fuzzy} from "src/util/misc"
import {Tags} from "src/util/nostr" import {Tags} from "src/util/nostr"
import type {Topic, List} from "src/engine/types" import type {Topic, List} from "src/engine/types"
import {derived, collection} from "../util/store" import {derived, collection} from "src/engine/util/store"
import type {Engine} from "src/engine/Engine"
import type {Event} from "src/engine/types"
export class Content { export class Content {
static contributeState() { topics = collection<Topic>("name")
const topics = collection<Topic>("name") lists = collection<List>("naddr")
searchTopics = derived(this.topics, $topics =>
fuzzy($topics.values(), {keys: ["name"], threshold: 0.3})
)
const lists = collection<List>("naddr") getLists = (f: (l: List) => boolean) =>
this.lists.get().filter(l => !l.deleted_at && (f ? f(l) : true))
return {topics, lists} initialize(engine: Engine) {
} const processTopics = (e: Event) => {
static contributeSelectors({Content}) {
const getLists = f => Content.lists.get().filter(l => !l.deleted_at && (f ? f(l) : true))
const searchTopics = derived(Content.topics, $topics =>
fuzzy($topics.values(), {keys: ["name"], threshold: 0.3})
)
return {getLists, searchTopics}
}
static initialize({Events, Content}) {
const processTopics = e => {
const tagTopics = Tags.from(e).topics() const tagTopics = Tags.from(e).topics()
const contentTopics = Array.from(e.content.toLowerCase().matchAll(/#(\w{2,100})/g)).map( const contentTopics = Array.from(e.content.toLowerCase().matchAll(/#(\w{2,100})/g)).map(
nth(1) nth(1)
@ -33,45 +26,45 @@ export class Content {
for (const name of tagTopics.concat(contentTopics)) { for (const name of tagTopics.concat(contentTopics)) {
if (name) { if (name) {
const topic = Content.topics.key(name).get() const topic = this.topics.key(name).get()
Content.topics.key(name).merge({count: inc(topic?.count || 0)}) this.topics.key(name).merge({count: inc(topic?.count || 0)})
} }
} }
} }
Events.addHandler(1, processTopics) engine.components.Events.addHandler(1, processTopics)
Events.addHandler(42, processTopics) engine.components.Events.addHandler(42, processTopics)
Events.addHandler(30001, e => { engine.components.Events.addHandler(30001, (e: Event) => {
const {pubkey, kind, created_at} = e const {pubkey, kind, created_at} = e
const name = Tags.from(e).getMeta("d") const name = Tags.from(e).getMeta("d")
const naddr = nip19.naddrEncode({identifier: name, pubkey, kind}) const naddr = nip19.naddrEncode({identifier: name, pubkey, kind})
const list = Content.lists.key(naddr).get() const list = this.lists.key(naddr).get()
if (created_at < list?.updated_at) { if (created_at < list?.updated_at) {
return return
} }
Content.lists.key(naddr).merge({ this.lists.key(naddr).merge({
...list, ...list,
name, name,
pubkey, pubkey,
tags: e.tags, tags: e.tags,
updated_at: created_at, updated_at: created_at,
created_at: list?.created_at || created_at, created_at: list?.created_at || created_at,
deleted_at: null, deleted_at: undefined,
}) })
}) })
Events.addHandler(5, e => { engine.components.Events.addHandler(5, (e: Event) => {
Tags.from(e) Tags.from(e)
.type("a") .type("a")
.values() .values()
.all() .all()
.forEach(naddr => { .forEach(naddr => {
const list = Content.lists.key(naddr) const list = this.lists.key(naddr)
if (list.exists()) { if (list.exists()) {
list.merge({deleted_at: e.created_at}) list.merge({deleted_at: e.created_at})

View File

@ -1,73 +1,79 @@
import {nip04} from "nostr-tools" import {nip04} from "nostr-tools"
import {switcherFn, sleep, tryFunc} from "hurdak" import {switcherFn, sleep, tryFunc} from "hurdak"
import {tryJson} from "src/util/misc" import {tryJson} from "src/util/misc"
import type {Engine} from "src/engine/Engine"
import type {KeyState} from "src/engine/types"
export class Crypt { export class Crypt {
static contributeActions({Keys}, emit) { engine: Engine
async function encrypt(pubkey, message) {
const {method, privkey} = Keys.current.get()
return switcherFn(method, { async encrypt(pubkey: string, message: string) {
extension: () => Keys.withExtension(ext => ext.nip04.encrypt(pubkey, message)), const {method, privkey} = this.engine.components.Keys.current.get() as KeyState
privkey: () => nip04.encrypt(privkey, pubkey, message),
bunker: async () => {
const ndk = await Keys.getNDK()
const user = ndk.getUser({hexpubkey: pubkey})
return ndk.signer.encrypt(user, message) return switcherFn(method, {
}, extension: () =>
}) this.engine.components.Keys.withExtension((ext: any) => ext.nip04.encrypt(pubkey, message)),
} privkey: () => nip04.encrypt(privkey as string, pubkey, message),
bunker: async () => {
const ndk = await this.engine.components.Keys.getNDK()
const user = ndk.getUser({hexpubkey: pubkey})
async function decrypt(pubkey, message) { return ndk.signer.encrypt(user, message)
const {method, privkey} = Keys.current.get() },
})
}
return switcherFn(method, { async decrypt(pubkey: string, message: string) {
extension: () => const {method, privkey} = this.engine.components.Keys.current.get() as KeyState
Keys.withExtension(ext => {
return new Promise(async resolve => {
let result
// Alby gives us a bunch of bogus errors, try multiple times return switcherFn(method, {
for (let i = 0; i < 3; i++) { extension: () =>
result = await tryFunc(() => ext.nip04.decrypt(pubkey, message)) this.engine.components.Keys.withExtension((ext: any) => {
return new Promise(async resolve => {
let result
if (result) { // Alby gives us a bunch of bogus errors, try multiple times
break for (let i = 0; i < 3; i++) {
} result = await tryFunc(() => ext.nip04.decrypt(pubkey, message))
await sleep(30) if (result) {
break
} }
resolve(result || `<Failed to decrypt message>`) await sleep(30)
}) }
}),
privkey: () => {
return (
tryFunc(() => nip04.decrypt(privkey, pubkey, message)) || `<Failed to decrypt message>`
)
},
bunker: async () => {
const ndk = await Keys.getNDK()
const user = ndk.getUser({hexpubkey: pubkey})
return ndk.signer.decrypt(user, message) resolve(result || `<Failed to decrypt message>`)
}, })
}) }),
} privkey: () => {
return (
tryFunc(() => nip04.decrypt(privkey as string, pubkey, message)) ||
`<Failed to decrypt message>`
)
},
bunker: async () => {
const ndk = await this.engine.components.Keys.getNDK()
const user = ndk.getUser({hexpubkey: pubkey})
async function encryptJson(data) { return ndk.signer.decrypt(user, message)
const {pubkey} = Keys.current.get() },
})
}
return encrypt(pubkey, JSON.stringify(data)) async encryptJson(data: any) {
} const {pubkey} = this.engine.components.Keys.current.get() as KeyState
async function decryptJson(data) { return this.encrypt(pubkey, JSON.stringify(data))
const {pubkey} = Keys.current.get() }
return tryJson(async () => JSON.parse(await decrypt(pubkey, data))) async decryptJson(data: string) {
} const {pubkey} = this.engine.components.Keys.current.get() as KeyState
return {encrypt, decrypt, encryptJson, decryptJson} return tryJson(async () => JSON.parse(await this.decrypt(pubkey, data)))
}
initialize(engine: Engine) {
this.engine = engine
} }
} }

View File

@ -1,59 +1,51 @@
import {nip19} from "nostr-tools" import {nip19} from "nostr-tools"
import {ellipsize} from "hurdak" import {ellipsize} from "hurdak"
import {tryJson, now, fuzzy} from "src/util/misc" import {tryJson, now, fuzzy} from "src/util/misc"
import type {Profile} from "src/engine/types" import {collection, derived} from "src/engine/util/store"
import {collection, derived} from "../util/store" import type {Engine} from "src/engine/Engine"
import type {Event, Profile} from "src/engine/types"
export class Directory { export class Directory {
static contributeState() { profiles = collection<Profile>("pubkey")
const profiles = collection<Profile>("pubkey")
return {profiles} getProfile = (pubkey: string): Profile => this.profiles.key(pubkey).get() || {pubkey}
}
static contributeSelectors({Directory}) { getNamedProfiles = () => this.profiles.get().filter(p => p.name || p.nip05 || p.display_name)
const getProfile = (pubkey: string): Profile => Directory.profiles.key(pubkey).get() || {pubkey}
const getNamedProfiles = () => displayProfile = ({display_name, name, pubkey}: Profile) => {
Directory.profiles.get().filter(p => p.name || p.nip05 || p.display_name) if (display_name) {
return ellipsize(display_name, 60)
const displayProfile = ({display_name, name, pubkey}: Profile) => {
if (display_name) {
return ellipsize(display_name, 60)
}
if (name) {
return ellipsize(name, 60)
}
try {
return nip19.npubEncode(pubkey).slice(-8)
} catch (e) {
console.error(e)
return ""
}
} }
const displayPubkey = pubkey => displayProfile(getProfile(pubkey)) if (name) {
return ellipsize(name, 60)
}
const searchProfiles = derived(Directory.profiles, $profiles => { try {
return fuzzy(getNamedProfiles(), { return nip19.npubEncode(pubkey).slice(-8)
keys: ["name", "display_name", {name: "nip05", weight: 0.5}, {name: "about", weight: 0.1}], } catch (e) {
threshold: 0.3, console.error(e)
})
})
return {getProfile, getNamedProfiles, displayProfile, displayPubkey, searchProfiles} return ""
}
} }
static initialize({Events, Directory}) { displayPubkey = (pubkey: string) => this.displayProfile(this.getProfile(pubkey))
Events.addHandler(0, e => {
searchProfiles = derived(this.profiles, $profiles => {
return fuzzy(this.getNamedProfiles(), {
keys: ["name", "display_name", {name: "nip05", weight: 0.5}, {name: "about", weight: 0.1}],
threshold: 0.3,
})
})
initialize(engine: Engine) {
engine.components.Events.addHandler(0, (e: Event) => {
tryJson(() => { tryJson(() => {
const kind0 = JSON.parse(e.content) const kind0 = JSON.parse(e.content)
const profile = Directory.profiles.key(e.pubkey) const profile = this.profiles.key(e.pubkey)
if (e.created_at < profile.get()?.created_at) { if (e.created_at < (profile.get()?.created_at || Infinity)) {
return return
} }

View File

@ -1,36 +1,28 @@
import type {Event} from "src/engine/types" import type {Event} from "src/engine/types"
import {pushToKey} from "src/util/misc" import {pushToKey} from "src/util/misc"
import {Worker} from "../util/Worker" import {Worker} from "src/engine/util/Worker"
import {collection} from "../util/store" import {collection} from "src/engine/util/store"
import type {Engine} from "src/engine/Engine"
export const ANY_KIND = "Events/ANY_KIND" export const ANY_KIND = "Events/ANY_KIND"
export class Events { export class Events {
static contributeState() { handlers = {} as Record<string, Array<(e: Event) => void>>
return { queue = new Worker<Event>()
queue: new Worker<Event>(), cache = collection<Event>("id")
cache: collection<Event>("id"), addHandler = (kind: number, f: (e: Event) => void) => pushToKey(this.handlers, kind.toString(), f)
handlers: {},
}
}
static contributeActions({Events}) { initialize(engine: Engine) {
const addHandler = (kind, f) => pushToKey(Events.handlers, kind, f) this.queue.listen(async event => {
if (event.pubkey === engine.components.Keys.pubkey.get()) {
return {addHandler} this.cache.key(event.id).set(event)
}
static initialize({Events, Keys}) {
Events.queue.listen(async event => {
if (event.pubkey === Keys.pubkey.get()) {
Events.cache.key(event.id).set(event)
} }
for (const handler of Events.handlers[ANY_KIND] || []) { for (const handler of this.handlers[ANY_KIND] || []) {
await handler(event) await handler(event)
} }
for (const handler of Events.handlers[event.kind] || []) { for (const handler of this.handlers[event.kind.toString()] || []) {
await handler(event) await handler(event)
} }
}) })

View File

@ -2,67 +2,55 @@ import {propEq, find, reject} from "ramda"
import {nip19, getPublicKey, getSignature, generatePrivateKey} from "nostr-tools" import {nip19, getPublicKey, getSignature, generatePrivateKey} from "nostr-tools"
import NDK, {NDKEvent, NDKNip46Signer, NDKPrivateKeySigner} from "@nostr-dev-kit/ndk" import NDK, {NDKEvent, NDKNip46Signer, NDKPrivateKeySigner} from "@nostr-dev-kit/ndk"
import {switcherFn} from "hurdak" import {switcherFn} from "hurdak"
import {writable, derived} from "../util/store" import {writable, derived} from "src/engine/util/store"
import type {KeyState, Event} from "src/engine/types"
export type LoginMethod = "bunker" | "pubkey" | "privkey" | "extension" import type {Engine} from "src/engine/Engine"
export type KeyState = {
method: LoginMethod
pubkey: string
privkey: string | null
bunkerKey: string | null
}
export class Keys { export class Keys {
static contributeState() { pubkey = writable<string | null>(null)
const pubkey = writable<string | null>() keyState = writable<KeyState[]>([])
current = derived(this.pubkey, k => this.getKeyState(k))
canSign = derived(this.current, keyState =>
["bunker", "privkey", "extension"].includes(keyState?.method)
)
const keyState = writable<KeyState[]>([]) getKeyState = (k: string) => find(propEq("pubkey", k), this.keyState.get())
const getKeyState = k => find(propEq("pubkey", k), keyState.get()) setKeyState = (v: KeyState) =>
this.keyState.update((s: KeyState[]) => reject(propEq("pubkey", v.pubkey), s).concat(v))
const setKeyState = v => keyState.update(s => reject(propEq("pubkey", v.pubkey), s).concat(v)) removeKeyState = (k: string) =>
this.keyState.update((s: KeyState[]) => reject(propEq("pubkey", k), s))
const removeKeyState = k => keyState.update(s => reject(propEq("pubkey", k), s))
const current = derived<KeyState | null>(pubkey, k => getKeyState(k))
const canSign = derived(current, keyState =>
["bunker", "privkey", "extension"].includes(keyState?.method)
)
return {pubkey, keyState, getKeyState, setKeyState, removeKeyState, current, canSign}
}
static contributeSelectors({Keys}) {
const {current} = Keys
withExtension = (() => {
let extensionLock = Promise.resolve() let extensionLock = Promise.resolve()
const getExtension = () => (window as {nostr?: any}).nostr const getExtension = () => (window as {nostr?: any}).nostr
const withExtension = f => { return (f: (ext: any) => void) => {
extensionLock = extensionLock.catch(e => console.error(e)).then(() => f(getExtension())) extensionLock = extensionLock.catch(e => console.error(e)).then(() => f(getExtension()))
return extensionLock return extensionLock
} }
})()
const isKeyValid = key => { isKeyValid = (key: string) => {
// Validate the key before setting it to state by encoding it using bech32. // Validate the key before setting it to state by encoding it using bech32.
// This will error if invalid (this works whether it's a public or a private key) // This will error if invalid (this works whether it's a public or a private key)
try { try {
nip19.npubEncode(key) nip19.npubEncode(key)
} catch (e) { } catch (e) {
return false return false
}
return true
} }
return true
}
getNDK = (() => {
const ndkInstances = new Map() const ndkInstances = new Map()
const prepareNDK = async (token?: string) => { const prepareNDK = async (token?: string) => {
const {pubkey, bunkerKey} = current.get() const {pubkey, bunkerKey} = this.current.get() as KeyState
const localSigner = new NDKPrivateKeySigner(bunkerKey) const localSigner = new NDKPrivateKeySigner(bunkerKey)
const ndk = new NDK({ const ndk = new NDK({
@ -81,73 +69,74 @@ export class Keys {
ndkInstances.set(pubkey, ndk) ndkInstances.set(pubkey, ndk)
} }
const getNDK = async () => { return async (token?: string) => {
const {pubkey} = current.get() const {pubkey} = this.current.get() as KeyState
if (!ndkInstances.has(pubkey)) { if (!ndkInstances.has(pubkey)) {
await prepareNDK() await prepareNDK(token)
} }
return ndkInstances.get(pubkey) return ndkInstances.get(pubkey)
} }
})()
return {withExtension, isKeyValid, getNDK} login = (method: string, key: string | {pubkey: string; token: string}) => {
let pubkey = null
let privkey = null
let bunkerKey = null
if (method === "privkey") {
privkey = key as string
pubkey = getPublicKey(privkey)
} else if (["pubkey", "extension"].includes(method)) {
pubkey = key as string
} else if (method === "bunker") {
pubkey = (key as {pubkey: string}).pubkey
bunkerKey = generatePrivateKey()
this.getNDK((key as {token: string}).token)
}
this.setKeyState({method, pubkey, privkey, bunkerKey})
this.pubkey.set(pubkey)
} }
static contributeActions({Keys}) { sign = async (event: Event) => {
const login = (method, key) => { const {method, privkey} = this.current.get()
let pubkey = null
let privkey = null
let bunkerKey = null
if (method === "privkey") { console.assert(event.id)
privkey = key console.assert(event.pubkey)
pubkey = getPublicKey(key) console.assert(event.created_at)
} else if (["pubkey", "extension"].includes(method)) {
pubkey = key
} else if (method === "bunker") {
pubkey = key.pubkey
bunkerKey = generatePrivateKey()
Keys.getNDK(key.token) return switcherFn(method, {
} bunker: async () => {
const ndk = await this.getNDK()
const ndkEvent = new NDKEvent(ndk, event)
Keys.setKeyState({method, pubkey, privkey, bunkerKey}) await ndkEvent.sign(ndk.signer)
Keys.pubkey.set(pubkey)
return ndkEvent.rawEvent()
},
privkey: () => {
return Object.assign(event, {
sig: getSignature(event, privkey),
})
},
extension: () => this.withExtension(ext => ext.signEvent(event)),
})
}
clear = () => {
const $pubkey = this.pubkey.get()
this.pubkey.set(null)
if ($pubkey) {
this.removeKeyState($pubkey)
} }
}
const sign = async event => { initialize(engine: Engine) {
const {method, privkey} = Keys.current.get()
console.assert(event.id)
console.assert(event.pubkey)
console.assert(event.created_at)
return switcherFn(method, {
bunker: async () => {
const ndk = await Keys.getNDK()
const ndkEvent = new NDKEvent(ndk, event)
await ndkEvent.sign(ndk.signer)
return ndkEvent.rawEvent()
},
privkey: () => {
return Object.assign(event, {
sig: getSignature(event, privkey),
})
},
extension: () => Keys.withExtension(ext => ext.signEvent(event)),
})
}
const clear = () => {
const $pubkey = Keys.pubkey.get()
Keys.pubkey.set(null)
Keys.removeKeyState($pubkey)
}
return {login, sign, clear}
} }
} }

View File

@ -1,94 +1,92 @@
import {Socket} from "paravel" import {Socket} from "paravel"
import {now} from "src/util/misc" import {now} from "src/util/misc"
import {switcher} from "hurdak" import {switcher} from "hurdak"
import {collection} from "src/engine/util/store"
import type {RelayStat} from "src/engine/types" import type {RelayStat} from "src/engine/types"
import {collection} from "../util/store" import type {Engine} from "src/engine/Engine"
import type {Network} from "src/engine/components/Network"
export class Meta { export class Meta {
static contributeState() { Network: Network
const relayStats = collection<RelayStat>("url")
return {relayStats} relayStats = collection<RelayStat>("url")
}
static contributeSelectors({Meta, Network}) { getRelayStats = (url: string) => this.relayStats.key(url).get()
const getRelayStats = url => Meta.relayStats.key(url).get()
const getRelayQuality = url => { getRelayQuality = (url: string): [number, string] => {
const stats = getRelayStats(url) const stats = this.getRelayStats(url)
if (!stats) {
return [0.5, "Not Connected"]
}
if (stats.error) {
return [
0,
switcher(stats.error, {
disconnected: "Disconnected",
unauthorized: "Logging in",
forbidden: "Failed to log in",
}),
]
}
const {timeouts, total_subs: totalSubs, eose_timer: eoseTimer, eose_count: eoseCount} = stats
const timeoutRate = timeouts > 0 ? timeouts / totalSubs : null
const eoseQuality = eoseCount > 0 ? Math.max(1, 500 / (eoseTimer / eoseCount)) : null
if (timeoutRate && timeoutRate > 0.5) {
return [1 - timeoutRate, "Slow connection"]
}
if (eoseQuality && eoseQuality < 0.7) {
return [eoseQuality, "Slow connection"]
}
if (eoseQuality) {
return [eoseQuality, "Connected"]
}
if (Network.pool.get(url).status === Socket.STATUS.READY) {
return [1, "Connected"]
}
if (!stats) {
return [0.5, "Not Connected"] return [0.5, "Not Connected"]
} }
return {getRelayStats, getRelayQuality} if (stats.error) {
return [
0,
switcher(stats.error, {
disconnected: "Disconnected",
unauthorized: "Logging in",
forbidden: "Failed to log in",
}),
]
}
const {timeouts, total_subs: totalSubs, eose_timer: eoseTimer, eose_count: eoseCount} = stats
const timeoutRate = timeouts > 0 ? timeouts / totalSubs : null
const eoseQuality = eoseCount > 0 ? Math.max(1, 500 / (eoseTimer / eoseCount)) : null
if (timeoutRate && timeoutRate > 0.5) {
return [1 - timeoutRate, "Slow connection"]
}
if (eoseQuality && eoseQuality < 0.7) {
return [eoseQuality, "Slow connection"]
}
if (eoseQuality) {
return [eoseQuality, "Connected"]
}
if (this.Network.pool.get(url).status === Socket.STATUS.READY) {
return [1, "Connected"]
}
return [0.5, "Not Connected"]
} }
static initialize({Network, Meta}) { initialize(engine: Engine) {
Network.pool.on("open", ({url}) => { this.Network = engine.components.Network
Meta.relayStats.key(url).merge({last_opened: now(), last_activity: now()})
this.Network.pool.on("open", ({url}: {url: string}) => {
this.relayStats.key(url).merge({last_opened: now(), last_activity: now()})
}) })
Network.pool.on("close", ({url}) => { this.Network.pool.on("close", ({url}: {url: string}) => {
Meta.relayStats.key(url).merge({last_closed: now(), last_activity: now()}) this.relayStats.key(url).merge({last_closed: now(), last_activity: now()})
}) })
Network.pool.on("error:set", (url, error) => { this.Network.pool.on("error:set", (url: string, error: string) => {
Meta.relayStats.key(url).merge({error}) this.relayStats.key(url).merge({error})
}) })
Network.pool.on("error:clear", url => { this.Network.pool.on("error:clear", (url: string) => {
Meta.relayStats.key(url).merge({error: null}) this.relayStats.key(url).merge({error: null})
}) })
Network.emitter.on("publish", urls => { this.Network.emitter.on("publish", (urls: string[]) => {
for (const url of urls) { for (const url of urls) {
Meta.relayStats.key(url).merge({ this.relayStats.key(url).merge({
last_publish: now(), last_publish: now(),
last_activity: now(), last_activity: now(),
}) })
} }
}) })
Network.emitter.on("sub:open", urls => { this.Network.emitter.on("sub:open", (urls: string[]) => {
for (const url of urls) { for (const url of urls) {
const stats = Meta.getRelayStats(url) const stats = this.getRelayStats(url)
Meta.relayStats.key(url).merge({ this.relayStats.key(url).merge({
last_sub: now(), last_sub: now(),
last_activity: now(), last_activity: now(),
total_subs: (stats?.total_subs || 0) + 1, total_subs: (stats?.total_subs || 0) + 1,
@ -97,40 +95,40 @@ export class Meta {
} }
}) })
Network.emitter.on("sub:close", urls => { this.Network.emitter.on("sub:close", (urls: string[]) => {
for (const url of urls) { for (const url of urls) {
const stats = Meta.getRelayStats(url) const stats = this.getRelayStats(url)
Meta.relayStats.key(url).merge({ this.relayStats.key(url).merge({
last_activity: now(), last_activity: now(),
active_subs: stats ? stats.active_subs - 1 : 0, active_subs: stats ? stats.active_subs - 1 : 0,
}) })
} }
}) })
Network.emitter.on("event", ({url}) => { this.Network.emitter.on("event", ({url}: {url: string}) => {
const stats = Meta.getRelayStats(url) const stats = this.getRelayStats(url)
Meta.relayStats.key(url).merge({ this.relayStats.key(url).merge({
last_activity: now(), last_activity: now(),
events_count: (stats.events_count || 0) + 1, events_count: (stats.events_count || 0) + 1,
}) })
}) })
Network.emitter.on("eose", (url, ms) => { this.Network.emitter.on("eose", (url: string, ms: number) => {
const stats = Meta.getRelayStats(url) const stats = this.getRelayStats(url)
Meta.relayStats.key(url).merge({ this.relayStats.key(url).merge({
last_activity: now(), last_activity: now(),
eose_count: (stats.eose_count || 0) + 1, eose_count: (stats.eose_count || 0) + 1,
eose_timer: (stats.eose_timer || 0) + ms, eose_timer: (stats.eose_timer || 0) + ms,
}) })
}) })
Network.emitter.on("timeout", (url, ms) => { this.Network.emitter.on("timeout", (url: string, ms: number) => {
const stats = Meta.getRelayStats(url) const stats = this.getRelayStats(url)
Meta.relayStats.key(url).merge({ this.relayStats.key(url).merge({
last_activity: now(), last_activity: now(),
timeouts: (stats.timeouts || 0) + 1, timeouts: (stats.timeouts || 0) + 1,
}) })

View File

@ -5,10 +5,29 @@ import {ensurePlural, union, difference} from "hurdak"
import {warn, error, log} from "src/util/logger" import {warn, error, log} from "src/util/logger"
import {normalizeRelayUrl} from "src/util/nostr" import {normalizeRelayUrl} from "src/util/nostr"
import type {Event, Filter} from "src/engine/types" import type {Event, Filter} from "src/engine/types"
import type {Engine} from "src/engine/Engine"
import type {CursorOpts} from "src/engine/util/Cursor"
import type {FeedOpts} from "src/engine/util/Feed"
import {Cursor, MultiCursor} from "src/engine/util/Cursor" import {Cursor, MultiCursor} from "src/engine/util/Cursor"
import {Subscription} from "src/engine/util/Subscription" import {Subscription} from "src/engine/util/Subscription"
import {Feed} from "src/engine/util/Feed" import {Feed} from "src/engine/util/Feed"
export type Progress = {
succeeded: Set<string>
failed: Set<string>
timeouts: Set<string>
completed: Set<string>
pending: Set<string>
}
export type PublishOpts = {
relays: string[]
event: Event
onProgress: (p: Progress) => void
timeout?: number
verb?: string
}
export type SubscribeOpts = { export type SubscribeOpts = {
relays: string[] relays: string[]
filter: Filter[] | Filter filter: Filter[] | Filter
@ -19,7 +38,7 @@ export type SubscribeOpts = {
shouldProcess?: boolean shouldProcess?: boolean
} }
const getUrls = relays => { const getUrls = (relays: string[]) => {
if (relays.length === 0) { if (relays.length === 0) {
error(`Attempted to connect to zero urls`) error(`Attempted to connect to zero urls`)
} }
@ -34,269 +53,260 @@ const getUrls = relays => {
} }
export class Network { export class Network {
static contributeState() { engine: Engine
const authHandler = null pool = new Pool()
const emitter = new EventEmitter() authHandler: (url: string, challenge: string) => void
const pool = new Pool() emitter = new EventEmitter()
return {authHandler, emitter, pool} relayHasError = (url: string) => Boolean(this.pool.get(url, {autoConnect: false})?.error)
getExecutor = (urls: string[], {bypassBoot = false} = {}) => {
if (this.engine.Env.FORCE_RELAYS?.length > 0) {
urls = this.engine.Env.FORCE_RELAYS
}
let target
const muxUrl = this.engine.components.User.getSetting("multiplextr_url")
// Try to use our multiplexer, but if it fails to connect fall back to relays. If
// we're only connecting to a single relay, just do it directly, unless we already
// have a connection to the multiplexer open, in which case we're probably doing
// AUTH with a single relay.
if (muxUrl && (urls.length > 1 || this.pool.has(muxUrl))) {
const socket = this.pool.get(muxUrl)
if (!socket.error) {
target = new Plex(urls, socket)
}
}
if (!target) {
target = new Relays(urls.map(url => this.pool.get(url)))
}
const executor = new Executor(target)
executor.handleAuth({
onAuth(url: string, challenge: string) {
this.emitter.emit("error:set", url, "unauthorized")
return this.authHandler?.(url, challenge)
},
onOk(url: string, id: string, ok: boolean, message: string) {
this.emitter.emit("error:clear", url, ok ? null : "forbidden")
// Once we get a good auth response don't wait to send stuff to the relay
if (ok) {
this.pool.get(url)
this.pool.booted = true
}
},
})
// Eagerly connect and handle AUTH
executor.target.sockets.forEach((socket: any) => {
const {limitation} = this.engine.components.Nip65.getRelayInfo(socket.url)
const waitForBoot = limitation?.payment_required || limitation?.auth_required
// This happens automatically, but kick it off anyway
socket.connect()
// Delay REQ/EVENT until AUTH flow happens. Highly hacky, as this relies on
// overriding the `shouldDeferWork` property of the socket. We do it this way
// so that we're not blocking sending to all the other public relays
if (!bypassBoot && waitForBoot && socket.status === Socket.STATUS.PENDING) {
socket.shouldDeferWork = () => {
return socket.booted && socket.status !== Socket.STATUS.READY
}
setTimeout(() => Object.assign(socket, {booted: true}), 2000)
}
})
return executor
} }
static contributeSelectors({Network}) { publish = ({relays, event, onProgress, timeout = 3000, verb = "EVENT"}: PublishOpts) => {
const relayHasError = url => Boolean(Network.pool.get(url, {autoConnect: false})?.error) const urls = getUrls(relays)
const executor = this.getExecutor(urls, {bypassBoot: verb === "AUTH"})
return {relayHasError} this.emitter.emit("publish", urls)
}
static contributeActions(engine) { log(`Publishing to ${urls.length} relays`, event, urls)
const {Network, User, Events, Nip65, Env} = engine
const getExecutor = (urls, {bypassBoot = false} = {}) => { return new Promise(resolve => {
if (Env.FORCE_RELAYS?.length > 0) { const timeouts = new Set<string>()
urls = Env.FORCE_RELAYS const succeeded = new Set<string>()
const failed = new Set<string>()
const getProgress = () => {
const completed = union(timeouts, succeeded, failed)
const pending = difference(new Set(urls), completed)
return {succeeded, failed, timeouts, completed, pending}
} }
let target const attemptToResolve = () => {
const progress = getProgress()
const muxUrl = User.getSetting("multiplextr_url") if (progress.pending.size === 0) {
log(`Finished publishing to ${urls.length} relays`, event, progress)
// Try to use our multiplexer, but if it fails to connect fall back to relays. If resolve(progress)
// we're only connecting to a single relay, just do it directly, unless we already
// have a connection to the multiplexer open, in which case we're probably doing
// AUTH with a single relay.
if (muxUrl && (urls.length > 1 || Network.pool.has(muxUrl))) {
const socket = Network.pool.get(muxUrl)
if (!socket.error) {
target = new Plex(urls, socket)
}
}
if (!target) {
target = new Relays(urls.map(url => Network.pool.get(url)))
}
const executor = new Executor(target)
executor.handleAuth({
onAuth(url, challenge) {
Network.emitter.emit("error:set", url, "unauthorized")
return Network.authHandler?.(url, challenge)
},
onOk(url, id, ok, message) {
Network.emitter.emit("error:clear", url, ok ? null : "forbidden")
// Once we get a good auth response don't wait to send stuff to the relay
if (ok) {
Network.pool.get(url)
Network.pool.booted = true
}
},
})
// Eagerly connect and handle AUTH
executor.target.sockets.forEach(socket => {
const {limitation} = Nip65.getRelayInfo(socket.url)
const waitForBoot = limitation?.payment_required || limitation?.auth_required
// This happens automatically, but kick it off anyway
socket.connect()
// Delay REQ/EVENT until AUTH flow happens. Highly hacky, as this relies on
// overriding the `shouldDeferWork` property of the socket. We do it this way
// so that we're not blocking sending to all the other public relays
if (!bypassBoot && waitForBoot && socket.status === Socket.STATUS.PENDING) {
socket.shouldDeferWork = () => {
return socket.booted && socket.status !== Socket.STATUS.READY
}
setTimeout(() => Object.assign(socket, {booted: true}), 2000)
}
})
return executor
}
const publish = ({relays, event, onProgress, timeout = 3000, verb = "EVENT"}) => {
const urls = getUrls(relays)
const executor = getExecutor(urls, {bypassBoot: verb === "AUTH"})
Network.emitter.emit("publish", urls)
log(`Publishing to ${urls.length} relays`, event, urls)
return new Promise(resolve => {
const timeouts = new Set()
const succeeded = new Set()
const failed = new Set()
const getProgress = () => {
const completed = union(timeouts, succeeded, failed)
const pending = difference(urls, completed)
return {succeeded, failed, timeouts, completed, pending}
}
const attemptToResolve = () => {
const progress = getProgress()
if (progress.pending.size === 0) {
log(`Finished publishing to ${urls.length} relays`, event, progress)
resolve(progress)
sub.unsubscribe()
executor.target.cleanup()
} else if (onProgress) {
onProgress(progress)
}
}
setTimeout(() => {
for (const url of urls) {
if (!succeeded.has(url) && !failed.has(url)) {
timeouts.add(url)
}
}
attemptToResolve()
}, timeout)
const sub = executor.publish(event, {
verb,
onOk: url => {
succeeded.add(url)
timeouts.delete(url)
failed.delete(url)
attemptToResolve()
},
onError: url => {
failed.add(url)
timeouts.delete(url)
attemptToResolve()
},
})
// Report progress to start
attemptToResolve()
})
}
const subscribe = ({
relays,
filter,
onEose,
onEvent,
onClose,
timeout,
shouldProcess = true,
}: SubscribeOpts) => {
const urls = getUrls(relays)
const executor = getExecutor(urls)
const filters = ensurePlural(filter)
const subscription = new Subscription()
const now = Date.now()
const seen = new Map()
const eose = new Set()
log(`Starting subscription with ${relays.length} relays`, {filters, relays})
Network.emitter.emit("sub:open", urls)
subscription.on("close", () => {
sub.unsubscribe()
executor.target.cleanup()
Network.emitter.emit("sub:close", urls)
onClose?.()
})
if (timeout) {
setTimeout(subscription.close, timeout)
}
const sub = executor.subscribe(filters, {
onEvent: (url, event) => {
const seen_on = seen.get(event.id)
if (seen_on) {
if (!seen_on.includes(url)) {
seen_on.push(url)
}
return
}
Object.assign(event, {
seen_on: [url],
content: event.content || "",
})
seen.set(event.id, event.seen_on)
try {
if (!verifySignature(event)) {
return
}
} catch (e) {
console.error(e)
return
}
if (!matchFilters(filters, event)) {
return
}
Network.emitter.emit("event", {url, event})
if (shouldProcess) {
Events.queue.push(event)
}
onEvent?.(event)
},
onEose: url => {
onEose?.(url)
// Keep track of relay timing stats, but only for the first eose we get
if (!eose.has(url)) {
Network.emitter.emit("eose", url, Date.now() - now)
}
eose.add(url)
if (timeout && eose.size === relays.length) {
subscription.close()
}
},
})
return subscription
}
const count = async filter => {
const filters = ensurePlural(filter)
const executor = getExecutor(Env.COUNT_RELAYS)
return new Promise(resolve => {
const sub = executor.count(filters, {
onCount: (url, {count}) => resolve(count),
})
setTimeout(() => {
resolve(0)
sub.unsubscribe() sub.unsubscribe()
executor.target.cleanup() executor.target.cleanup()
}, 3000) } else if (onProgress) {
onProgress(progress)
}
}
setTimeout(() => {
for (const url of urls) {
if (!succeeded.has(url) && !failed.has(url)) {
timeouts.add(url)
}
}
attemptToResolve()
}, timeout)
const sub = executor.publish(event, {
verb,
onOk: (url: string) => {
succeeded.add(url)
timeouts.delete(url)
failed.delete(url)
attemptToResolve()
},
onError: (url: string) => {
failed.add(url)
timeouts.delete(url)
attemptToResolve()
},
}) })
// Report progress to start
attemptToResolve()
})
}
subscribe = ({
relays,
filter,
onEose,
onEvent,
onClose,
timeout,
shouldProcess = true,
}: SubscribeOpts) => {
const urls = getUrls(relays)
const executor = this.getExecutor(urls)
const filters = ensurePlural(filter)
const subscription = new Subscription()
const now = Date.now()
const seen = new Map()
const eose = new Set()
log(`Starting subscription with ${relays.length} relays`, {filters, relays})
this.emitter.emit("sub:open", urls)
subscription.on("close", () => {
sub.unsubscribe()
executor.target.cleanup()
this.emitter.emit("sub:close", urls)
onClose?.()
})
if (timeout) {
setTimeout(subscription.close, timeout)
} }
const cursor = opts => new Cursor({...opts, subscribe}) const sub = executor.subscribe(filters, {
onEvent: (url: string, event: Event) => {
const seen_on = seen.get(event.id)
const multiCursor = ({relays, ...opts}) => if (seen_on) {
new MultiCursor(relays.map(relay => cursor({relay, ...opts}))) if (!seen_on.includes(url)) {
seen_on.push(url)
}
const feed = opts => new Feed({engine, ...opts}) return
}
return {subscribe, publish, count, cursor, multiCursor, feed} Object.assign(event, {
seen_on: [url],
content: event.content || "",
})
seen.set(event.id, event.seen_on)
try {
if (!verifySignature(event)) {
return
}
} catch (e) {
console.error(e)
return
}
if (!matchFilters(filters, event)) {
return
}
this.emitter.emit("event", {url, event})
if (shouldProcess) {
this.engine.components.Events.queue.push(event)
}
onEvent?.(event)
},
onEose: (url: string) => {
onEose?.(url)
// Keep track of relay timing stats, but only for the first eose we get
if (!eose.has(url)) {
this.emitter.emit("eose", url, Date.now() - now)
}
eose.add(url)
if (timeout && eose.size === relays.length) {
subscription.close()
}
},
})
return subscription
}
count = async (filter: Filter | Filter[]) => {
const filters = ensurePlural(filter)
const executor = this.getExecutor(this.engine.Env.COUNT_RELAYS)
return new Promise(resolve => {
const sub = executor.count(filters, {
onCount: (url: string, {count}: {count: number}) => resolve(count),
})
setTimeout(() => {
resolve(0)
sub.unsubscribe()
executor.target.cleanup()
}, 3000)
})
}
cursor = (opts: Partial<CursorOpts>) => new Cursor({...opts, Network: this} as CursorOpts)
multiCursor = ({relays, ...opts}: Partial<CursorOpts> & {relays: string[]}) =>
new MultiCursor(relays.map((relay: string) => this.cursor({relay, ...opts} as CursorOpts)))
feed = (opts: Partial<FeedOpts>) => new Feed({engine: this.engine, ...opts} as FeedOpts)
initialize(engine: Engine) {
this.engine = engine
} }
} }

View File

@ -2,104 +2,86 @@ import {ensurePlural} from "hurdak"
import {now} from "src/util/misc" import {now} from "src/util/misc"
import {Tags} from "src/util/nostr" import {Tags} from "src/util/nostr"
import type {GraphEntry} from "src/engine/types" import type {GraphEntry} from "src/engine/types"
import {collection} from "../util/store" import type {Engine} from "src/engine/Engine"
import {collection} from "src/engine/util/store"
export class Nip02 { export class Nip02 {
static contributeState() { graph = collection<GraphEntry>("pubkey")
const graph = collection<GraphEntry>("pubkey")
return {graph} getPetnames = (pubkey: string) => this.graph.key(pubkey).get()?.petnames || []
getMutedTags = (pubkey: string) => this.graph.key(pubkey).get()?.mutes || []
getFollowsSet = (pubkeys: string | string[]) => {
const follows = new Set<string>()
for (const pubkey of ensurePlural(pubkeys)) {
for (const tag of this.getPetnames(pubkey)) {
follows.add(tag[1])
}
}
return follows
} }
static contributeActions({Nip02}) { getMutesSet = (pubkeys: string | string[]) => {
const getPetnames = pubkey => Nip02.graph.key(pubkey).get()?.petnames || [] const mutes = new Set<string>()
const getMutedTags = pubkey => Nip02.graph.key(pubkey).get()?.mutes || [] for (const pubkey of ensurePlural(pubkeys)) {
for (const tag of this.getMutedTags(pubkey)) {
const getFollowsSet = pubkeys => { mutes.add(tag[1])
const follows = new Set()
for (const pubkey of ensurePlural(pubkeys)) {
for (const tag of getPetnames(pubkey)) {
follows.add(tag[1])
}
} }
return follows
} }
const getMutesSet = pubkeys => { return mutes
const mutes = new Set()
for (const pubkey of ensurePlural(pubkeys)) {
for (const tag of getMutedTags(pubkey)) {
mutes.add(tag[1])
}
}
return mutes
}
const getFollows = pubkeys => Array.from(getFollowsSet(pubkeys))
const getMutes = pubkeys => Array.from(getMutesSet(pubkeys))
const getNetworkSet = (pubkeys, includeFollows = false) => {
const follows = getFollowsSet(pubkeys)
const network = includeFollows ? follows : new Set()
for (const pubkey of getFollows(follows)) {
if (!follows.has(pubkey)) {
network.add(pubkey)
}
}
return network
}
const getNetwork = pubkeys => Array.from(getNetworkSet(pubkeys))
const isFollowing = (a, b) => getFollowsSet(a).has(b)
const isIgnoring = (a, b) => getMutesSet(a).has(b)
return {
getPetnames,
getMutedTags,
getFollowsSet,
getMutesSet,
getFollows,
getMutes,
getNetworkSet,
getNetwork,
isFollowing,
isIgnoring,
}
} }
static initialize({Events, Nip02}) { getFollows = (pubkeys: string | string[]) => Array.from(this.getFollowsSet(pubkeys))
Events.addHandler(3, e => {
const entry = Nip02.graph.key(e.pubkey).get() getMutes = (pubkeys: string | string[]) => Array.from(this.getMutesSet(pubkeys))
getNetworkSet = (pubkeys: string | string[], includeFollows = false) => {
const follows = this.getFollowsSet(pubkeys)
const network = includeFollows ? follows : new Set<string>()
for (const pubkey of this.getFollows(Array.from(follows))) {
if (!follows.has(pubkey)) {
network.add(pubkey)
}
}
return network
}
getNetwork = (pubkeys: string | string[]) => Array.from(this.getNetworkSet(pubkeys))
isFollowing = (a: string, b: string) => this.getFollowsSet(a).has(b)
isIgnoring = (a: string, b: string) => this.getMutesSet(a).has(b)
initialize(engine: Engine) {
engine.components.Events.addHandler(3, e => {
const entry = this.graph.key(e.pubkey).get()
if (e.created_at < entry?.petnames_updated_at) { if (e.created_at < entry?.petnames_updated_at) {
return return
} }
Nip02.graph.key(e.pubkey).merge({ this.graph.key(e.pubkey).merge({
updated_at: now(), updated_at: now(),
petnames_updated_at: e.created_at, petnames_updated_at: e.created_at,
petnames: Tags.from(e).type("p").all(), petnames: Tags.from(e).type("p").all(),
}) })
}) })
Events.addHandler(10000, e => { engine.components.Events.addHandler(10000, e => {
const entry = Nip02.graph.key(e.pubkey).get() const entry = this.graph.key(e.pubkey).get()
if (e.created_at < entry?.mutes_updated_at) { if (e.created_at < entry?.mutes_updated_at) {
return return
} }
Nip02.graph.key(e.pubkey).merge({ this.graph.key(e.pubkey).merge({
updated_at: now(), updated_at: now(),
mutes_updated_at: e.created_at, mutes_updated_at: e.created_at,
mutes: Tags.from(e).type(["e", "p"]).all(), mutes: Tags.from(e).type(["e", "p"]).all(),

View File

@ -2,51 +2,47 @@ import {tryFunc} from "hurdak"
import {find, last, uniq, pluck} from "ramda" import {find, last, uniq, pluck} from "ramda"
import {tryJson} from "src/util/misc" import {tryJson} from "src/util/misc"
import {Tags, appDataKeys} from "src/util/nostr" import {Tags, appDataKeys} from "src/util/nostr"
import type {Contact, Message} from "src/engine/types" import type {Contact, Profile, Message, Event} from "src/engine/types"
import {collection, derived} from "../util/store" import type {Engine} from "src/engine/Engine"
import {collection, derived} from "src/engine/util/store"
const getHints = e => pluck("url", Tags.from(e).relays()) const getHints = (e: Event) => pluck("url", Tags.from(e).relays())
const messageIsNew = ({last_checked, last_received, last_sent}: Contact) => const messageIsNew = ({last_checked, last_received, last_sent}: Contact) =>
last_received > Math.max(last_sent || 0, last_checked || 0) last_received > Math.max(last_sent || 0, last_checked || 0)
export class Nip04 { export class Nip04 {
static contributeState() { engine: Engine
const contacts = collection<Contact>("pubkey") contacts = collection<Contact>("pubkey")
const messages = collection<Message>("id") messages = collection<Message>("id")
const hasNewMessages = derived( hasNewMessages = derived(
contacts, this.contacts,
find(e => e.last_sent > 0 && messageIsNew(e)) find((e: Contact) => e.last_sent > 0 && messageIsNew(e))
) )
return {contacts, messages, hasNewMessages} searchContacts = this.messages.derived($messages => {
} const pubkeySet = new Set(pluck("pubkey", $messages))
const searchProfiles = this.engine.components.Directory.searchProfiles.get()
static contributeSelectors({Nip04, Directory}) { return (q: string) =>
const searchContacts = Nip04.messages.derived($messages => { searchProfiles(q)
const pubkeySet = new Set(pluck("pubkey", $messages)) .filter((p: Profile) => pubkeySet.has(p.pubkey))
const searchProfiles = Directory.searchProfiles.get() .map((p: Profile) => this.contacts.key(p.pubkey).get())
})
return q => initialize(engine: Engine) {
searchProfiles(q) this.engine = engine
.filter(p => pubkeySet.has(p.pubkey))
.map(p => Nip04.contacts.key(p.pubkey).get())
})
return {messageIsNew, searchContacts} engine.components.Events.addHandler(30078, async e => {
}
static initialize({Events, Nip04, Keys, Crypt}) {
Events.addHandler(30078, async e => {
if (Tags.from(e).getMeta("d") === appDataKeys.NIP04_LAST_CHECKED) { if (Tags.from(e).getMeta("d") === appDataKeys.NIP04_LAST_CHECKED) {
await tryJson(async () => { await tryJson(async () => {
const payload = await Crypt.decryptJson(e.content) const payload = await engine.components.Crypt.decryptJson(e.content)
for (const key of Object.keys(payload)) { for (const key of Object.keys(payload)) {
// Backwards compat from when we used to prefix id/pubkey // Backwards compat from when we used to prefix id/pubkey
const pubkey = last(key.split("/")) const pubkey = last(key.split("/"))
const contact = Nip04.contacts.key(pubkey).get() const contact = this.contacts.key(pubkey).get()
const last_checked = Math.max(payload[pubkey], contact?.last_checked || 0) const last_checked = Math.max(payload[pubkey], contact?.last_checked || 0)
// A bunch of junk got added to this setting. Integer keys, settings, etc // A bunch of junk got added to this setting. Integer keys, settings, etc
@ -54,50 +50,51 @@ export class Nip04 {
continue continue
} }
Nip04.contacts.key(pubkey).merge({last_checked}) this.contacts.key(pubkey).merge({last_checked})
} }
}) })
} }
}) })
Events.addHandler(4, async e => { engine.components.Events.addHandler(4, async e => {
if (!Keys.canSign.get()) { if (!engine.components.Keys.canSign.get()) {
return return
} }
const author = e.pubkey const author = e.pubkey
const recipient = Tags.from(e).type("p").values().first() const recipient = Tags.from(e).type("p").values().first()
if (![author, recipient].includes(Keys.pubkey.get())) { if (![author, recipient].includes(engine.components.Keys.pubkey.get())) {
return return
} }
if (Nip04.messages.key(e.id).get()) { if (this.messages.key(e.id).get()) {
return return
} }
await tryFunc(async () => { await tryFunc(async () => {
const other = Keys.pubkey.get() === author ? recipient : author const other = engine.components.Keys.pubkey.get() === author ? recipient : author
Nip04.messages.key(e.id).set({ this.messages.key(e.id).set({
id: e.id,
contact: other, contact: other,
pubkey: e.pubkey, pubkey: e.pubkey,
created_at: e.created_at, created_at: e.created_at,
content: await Crypt.decrypt(other, e.content), content: await engine.components.Crypt.decrypt(other, e.content),
tags: e.tags, tags: e.tags,
}) })
if (Keys.pubkey.get() === author) { if (engine.components.Keys.pubkey.get() === author) {
const contact = Nip04.contacts.key(recipient).get() const contact = this.contacts.key(recipient).get()
Nip04.contacts.key(recipient).merge({ this.contacts.key(recipient).merge({
last_sent: e.created_at, last_sent: e.created_at,
hints: uniq(getHints(e).concat(contact?.hints || [])), hints: uniq(getHints(e).concat(contact?.hints || [])),
}) })
} else { } else {
const contact = Nip04.contacts.key(author).get() const contact = this.contacts.key(author).get()
Nip04.contacts.key(author).merge({ this.contacts.key(author).merge({
last_received: e.created_at, last_received: e.created_at,
hints: uniq(getHints(e).concat(contact?.hints || [])), hints: uniq(getHints(e).concat(contact?.hints || [])),
}) })

View File

@ -3,35 +3,30 @@ import {nip05} from "nostr-tools"
import {tryFunc} from "hurdak" import {tryFunc} from "hurdak"
import {now, tryJson} from "src/util/misc" import {now, tryJson} from "src/util/misc"
import type {Handle} from "src/engine/types" import type {Handle} from "src/engine/types"
import {collection} from "../util/store" import type {Engine} from "src/engine/Engine"
import {collection} from "src/engine/util/store"
export class Nip05 { export class Nip05 {
static contributeState() { handles = collection<Handle>("pubkey")
return {
handles: collection<Handle>("pubkey"),
}
}
static contributeSelectors({Nip05}) { getHandle = (pubkey: string) => this.handles.key(pubkey).get()
const getHandle = pubkey => Nip05.handles.key(pubkey).get()
const displayHandle = handle => displayHandle = (handle: Handle) =>
handle.address.startsWith("_@") ? last(handle.address.split("@")) : handle.address handle.address.startsWith("_@") ? last(handle.address.split("@")) : handle.address
return {getHandle, displayHandle} initialize(engine: Engine) {
} engine.components.Events.addHandler(0, e => {
static initialize({Events, Nip05}) {
Events.addHandler(0, e => {
tryJson(async () => { tryJson(async () => {
const kind0 = JSON.parse(e.content) const kind0 = JSON.parse(e.content)
const handle = Nip05.handles.key(e.pubkey) const handle = this.handles.key(e.pubkey)
if (!kind0.nip05 || e.created_at < handle.get()?.created_at) { if (!kind0.nip05 || e.created_at < (handle.get()?.created_at || Infinity)) {
return return
} }
const profile = await tryFunc(() => nip05.queryProfile(kind0.nip05), true) const profile = (await tryFunc(() => nip05.queryProfile(kind0.nip05))) as null | {
pubkey: string
}
if (profile?.pubkey !== e.pubkey) { if (profile?.pubkey !== e.pubkey) {
return return

View File

@ -1,72 +1,63 @@
import {find, last, pick, uniq} from "ramda" import {find, last, pick, uniq} from "ramda"
import {tryJson, fuzzy, now} from "src/util/misc" import {tryJson, fuzzy, now} from "src/util/misc"
import {Tags, appDataKeys, channelAttrs} from "src/util/nostr" import {Tags, appDataKeys, channelAttrs} from "src/util/nostr"
import type {Channel, Message} from "src/engine/types" import type {Channel, Event, Message} from "src/engine/types"
import {collection, derived} from "../util/store" import type {Engine} from "src/engine/Engine"
import {collection, derived} from "src/engine/util/store"
import type {Readable} from "src/engine/util/store"
const messageIsNew = ({last_checked, last_received, last_sent}: Channel) => const messageIsNew = ({last_checked, last_received, last_sent}: Channel) =>
last_received > Math.max(last_sent || 0, last_checked || 0) last_received > Math.max(last_sent || 0, last_checked || 0)
export class Nip28 { export class Nip28 {
static contributeState() { channels = collection<Channel>("id")
const channels = collection<Channel>("id") messages = collection<Message>("id")
const messages = collection<Message>("id")
const hasNewMessages = derived( hasNewMessages = derived(
channels, this.channels,
find(e => { find((c: Channel) => c.joined && messageIsNew(c))
return e.type === "public" && e.joined > 0 && messageIsNew(e) )
getSearchChannels = (channels: Readable<Channel[]>) =>
channels.derived($channels => {
return fuzzy($channels, {
keys: ["name", {name: "about", weight: 0.5}],
threshold: 0.3,
}) })
) })
return {channels, messages, hasNewMessages} searchChannels = this.getSearchChannels(this.channels)
}
static contributeSelectors({Nip28}) { initialize(engine: Engine) {
const getSearchChannels = channels => engine.components.Events.addHandler(40, (e: Event) => {
channels.derived($channels => { const channel = this.channels.key(e.id).get()
return fuzzy($channels, {
keys: ["name", {name: "about", weight: 0.5}],
threshold: 0.3,
})
})
const searchChannels = getSearchChannels(Nip28.channels)
return {messageIsNew, getSearchChannels, searchChannels}
}
static initialize({Events, Nip28, Keys, Crypt}) {
Events.addHandler(40, e => {
const channel = Nip28.channels.key(e.id).get()
if (e.created_at < channel?.updated_at) { if (e.created_at < channel?.updated_at) {
return return
} }
const content = tryJson(() => pick(channelAttrs, JSON.parse(e.content))) const content = tryJson(() => pick(channelAttrs, JSON.parse(e.content))) as Partial<Channel>
if (!content?.name) { if (!content?.name) {
return return
} }
Nip28.channels.key(e.id).merge({ this.channels.key(e.id).merge({
type: "public", ...content,
pubkey: e.pubkey, pubkey: e.pubkey,
updated_at: now(), updated_at: now(),
hints: Tags.from(e).relays(), hints: Tags.from(e).relays(),
...content,
}) })
}) })
Events.addHandler(41, e => { engine.components.Events.addHandler(41, (e: Event) => {
const channelId = Tags.from(e).getMeta("e") const channelId = Tags.from(e).getMeta("e")
if (!channelId) { if (!channelId) {
return return
} }
const channel = Nip28.channels.key(channelId).get() const channel = this.channels.key(channelId).get()
if (e.created_at < channel?.updated_at) { if (e.created_at < channel?.updated_at) {
return return
@ -76,29 +67,29 @@ export class Nip28 {
return return
} }
const content = tryJson(() => pick(channelAttrs, JSON.parse(e.content))) const content = tryJson(() => pick(channelAttrs, JSON.parse(e.content))) as Partial<Channel>
if (!content?.name) { if (!content?.name) {
return return
} }
Nip28.channels.key(channelId).merge({ this.channels.key(channelId).merge({
...content,
pubkey: e.pubkey, pubkey: e.pubkey,
updated_at: now(), updated_at: now(),
hints: Tags.from(e).relays(), hints: Tags.from(e).relays(),
...content,
}) })
}) })
Events.addHandler(30078, async e => { engine.components.Events.addHandler(30078, async (e: Event) => {
if (Tags.from(e).getMeta("d") === appDataKeys.NIP28_LAST_CHECKED) { if (Tags.from(e).getMeta("d") === appDataKeys.NIP28_LAST_CHECKED) {
await tryJson(async () => { await tryJson(async () => {
const payload = await Crypt.decryptJson(e.content) const payload = await engine.components.Crypt.decryptJson(e.content)
for (const key of Object.keys(payload)) { for (const key of Object.keys(payload)) {
// Backwards compat from when we used to prefix id/pubkey // Backwards compat from when we used to prefix id/pubkey
const id = last(key.split("/")) const id = last(key.split("/"))
const channel = Nip28.channels.key(id).get() const channel = this.channels.key(id).get()
const last_checked = Math.max(payload[id], channel?.last_checked || 0) const last_checked = Math.max(payload[id], channel?.last_checked || 0)
// A bunch of junk got added to this setting. Integer keys, settings, etc // A bunch of junk got added to this setting. Integer keys, settings, etc
@ -106,35 +97,35 @@ export class Nip28 {
continue continue
} }
Nip28.channels.key(id).merge({last_checked}) this.channels.key(id).merge({last_checked})
} }
}) })
} }
}) })
Events.addHandler(30078, async e => { engine.components.Events.addHandler(30078, async (e: Event) => {
if (Tags.from(e).getMeta("d") === appDataKeys.NIP28_ROOMS_JOINED) { if (Tags.from(e).getMeta("d") === appDataKeys.NIP28_ROOMS_JOINED) {
await tryJson(async () => { await tryJson(async () => {
const channelIds = await Crypt.decryptJson(e.content) const channelIds = await engine.components.Crypt.decryptJson(e.content)
// Just a bug from when I was building the feature, remove someday // Just a bug from when I was building the feature, remove someday
if (!Array.isArray(channelIds)) { if (!Array.isArray(channelIds)) {
return return
} }
Nip28.channels.get().forEach(channel => { this.channels.get().forEach(channel => {
if (channel.joined && !channelIds.includes(channel.id)) { if (channel.joined && !channelIds.includes(channel.id)) {
Nip28.channels.key(channel.id).merge({joined: false}) this.channels.key(channel.id).merge({joined: false})
} else if (!channel.joined && channelIds.includes(channel.id)) { } else if (!channel.joined && channelIds.includes(channel.id)) {
Nip28.channels.key(channel.id).merge({joined: true}) this.channels.key(channel.id).merge({joined: true})
} }
}) })
}) })
} }
}) })
Events.addHandler(42, e => { engine.components.Events.addHandler(42, (e: Event) => {
if (Nip28.messages.key(e.id).exists()) { if (this.messages.key(e.id).exists()) {
return return
} }
@ -145,10 +136,10 @@ export class Nip28 {
return return
} }
const channel = Nip28.channels.key(channelId).get() const channel = this.channels.key(channelId).get()
const hints = uniq(tags.relays().concat(channel?.hints || [])) const hints = uniq(tags.relays().concat(channel?.hints || []))
Nip28.messages.key(e.id).merge({ this.messages.key(e.id).merge({
channel: channelId, channel: channelId,
pubkey: e.pubkey, pubkey: e.pubkey,
created_at: e.created_at, created_at: e.created_at,
@ -156,10 +147,10 @@ export class Nip28 {
tags: e.tags, tags: e.tags,
}) })
if (e.pubkey === Keys.pubkey.get()) { if (e.pubkey === engine.components.Keys.pubkey.get()) {
Nip28.channels.key(channelId).merge({last_sent: e.created_at, hints}) this.channels.key(channelId).merge({last_sent: e.created_at, hints})
} else { } else {
Nip28.channels.key(channelId).merge({last_received: e.created_at, hints}) this.channels.key(channelId).merge({last_received: e.created_at, hints})
} }
}) })
} }

View File

@ -2,13 +2,19 @@ import {Fetch, tryFunc} from "hurdak"
import {now, tryJson, hexToBech32, bech32ToHex} from "src/util/misc" import {now, tryJson, hexToBech32, bech32ToHex} from "src/util/misc"
import {invoiceAmount} from "src/util/lightning" import {invoiceAmount} from "src/util/lightning"
import {Tags} from "src/util/nostr" import {Tags} from "src/util/nostr"
import type {Zapper} from "src/engine/types" import type {Engine} from "src/engine/Engine"
import {collection} from "../util/store" import type {Zapper, Event} from "src/engine/types"
import {collection} from "src/engine/util/store"
const getLnUrl = address => { type ZapEvent = Event & {
invoiceAmount: number
request: Event
}
const getLnUrl = (address: string): string => {
// Try to parse it as a lud06 LNURL // Try to parse it as a lud06 LNURL
if (address.startsWith("lnurl1")) { if (address.startsWith("lnurl1")) {
return tryFunc(() => bech32ToHex(address)) return tryFunc(() => bech32ToHex(address)) as string
} }
// Try to parse it as a lud16 address // Try to parse it as a lud16 address
@ -22,70 +28,68 @@ const getLnUrl = address => {
} }
export class Nip57 { export class Nip57 {
static contributeState() { zappers = collection<Zapper>("pubkey")
const zappers = collection<Zapper>("pubkey")
return {zappers} processZaps = (zaps: Event[], pubkey: string) => {
} const zapper = this.zappers.key(pubkey).get()
static contributeActions({Nip57}) { if (!zapper) {
const processZaps = (zaps, pubkey) => { return []
const zapper = Nip57.zappers.key(pubkey).get()
if (!zapper) {
return []
}
return zaps
.map(zap => {
const zapMeta = Tags.from(zap).asMeta()
return tryJson(() => ({
...zap,
invoiceAmount: invoiceAmount(zapMeta.bolt11),
request: JSON.parse(zapMeta.description),
}))
})
.filter(zap => {
if (!zap) {
return false
}
// Don't count zaps that the user sent himself
if (zap.request.pubkey === pubkey) {
return false
}
const {invoiceAmount, request} = zap
const reqMeta = Tags.from(request).asMeta()
// Verify that the zapper actually sent the requested amount (if it was supplied)
if (reqMeta.amount && parseInt(reqMeta.amount) !== invoiceAmount) {
return false
}
// If the sending client provided an lnurl tag, verify that too
if (reqMeta.lnurl && reqMeta.lnurl !== zapper.lnurl) {
return false
}
// Verify that the zap note actually came from the recipient's zapper
if (zapper.nostrPubkey !== zap.pubkey) {
return false
}
return true
})
} }
return {processZaps} return zaps
.map((zap: Event) => {
const zapMeta = Tags.from(zap).asMeta() as {
bolt11: string
description: string
}
return tryJson(() => ({
...zap,
invoiceAmount: invoiceAmount(zapMeta.bolt11),
request: JSON.parse(zapMeta.description),
})) as ZapEvent
})
.filter((zap: ZapEvent) => {
if (!zap) {
return false
}
// Don't count zaps that the user sent himself
if (zap.request.pubkey === pubkey) {
return false
}
const {invoiceAmount, request} = zap
const reqMeta = Tags.from(request).asMeta() as {
amount?: string
lnurl?: string
}
// Verify that the zapper actually sent the requested amount (if it was supplied)
if (reqMeta.amount && parseInt(reqMeta.amount) !== invoiceAmount) {
return false
}
// If the sending client provided an lnurl tag, verify that too
if (reqMeta.lnurl && reqMeta.lnurl !== zapper.lnurl) {
return false
}
// Verify that the zap note actually came from the recipient's zapper
if (zapper.nostrPubkey !== zap.pubkey) {
return false
}
return true
})
} }
static initialize({Events, Nip57}) { initialize(engine: Engine) {
Events.addHandler(0, e => { engine.components.Events.addHandler(0, (e: Event) => {
tryJson(async () => { tryJson(async () => {
const kind0 = JSON.parse(e.content) const kind0 = JSON.parse(e.content)
const zapper = Nip57.zappers.key(e.pubkey) const zapper = this.zappers.key(e.pubkey)
const address = (kind0.lud16 || kind0.lud06 || "").toLowerCase() const address = (kind0.lud16 || kind0.lud06 || "").toLowerCase()
if (!address || e.created_at < zapper.get()?.created_at) { if (!address || e.created_at < zapper.get()?.created_at) {
@ -98,7 +102,7 @@ export class Nip57 {
return return
} }
const result = await tryFunc(() => Fetch.fetchJson(url), true) const result = (await tryFunc(() => Fetch.fetchJson(url))) as any
if (!result?.allowsNostr || !result?.nostrPubkey) { if (!result?.allowsNostr || !result?.nostrPubkey) {
return return

View File

@ -3,229 +3,215 @@ import {first, chain, Fetch} from "hurdak"
import {fuzzy, tryJson, now} from "src/util/misc" import {fuzzy, tryJson, now} from "src/util/misc"
import {warn} from "src/util/logger" import {warn} from "src/util/logger"
import {normalizeRelayUrl, findReplyId, isShareableRelay, Tags} from "src/util/nostr" import {normalizeRelayUrl, findReplyId, isShareableRelay, Tags} from "src/util/nostr"
import type {Relay, RelayInfo, RelayPolicy} from "src/engine/types" import type {Engine} from "src/engine/Engine"
import {derived, collection} from "../util/store" import type {Event, Relay, RelayInfo, RelayPolicy, RelayPolicyEntry} from "src/engine/types"
import {derived, collection} from "src/engine/util/store"
export class Nip65 { export class Nip65 {
static contributeState() { engine: Engine
const relays = collection<Relay>("url") relays = collection<Relay>("url")
const policies = collection<RelayPolicy>("pubkey") policies = collection<RelayPolicy>("pubkey")
return {relays, policies} addRelay = (url: string) => {
} if (isShareableRelay(url)) {
const relay = this.relays.key(url).get()
static contributeActions({Env, Nip65, Network, Meta, User}) { this.relays.key(url).merge({
const addRelay = url => { count: inc(relay?.count || 0),
if (isShareableRelay(url)) { first_seen: relay?.first_seen || now(),
const relay = Nip65.relays.key(url).get() info: {
last_checked: 0,
Nip65.relays.key(url).merge({ },
count: inc(relay?.count || 0), })
first_seen: relay?.first_seen || now(),
info: {
last_checked: 0,
},
})
}
}
const setPolicy = ({pubkey, created_at}, relays) => {
if (relays?.length > 0) {
if (created_at < Nip65.policies.key(pubkey).get()?.created_at) {
return
}
Nip65.policies.key(pubkey).merge({
created_at,
updated_at: now(),
relays: uniqBy(prop("url"), relays).map(relay => {
addRelay(relay.url)
return {read: true, write: true, ...relay}
}),
})
}
}
const getRelay = (url: string): Relay => Nip65.relays.key(url).get() || {url}
const getRelayInfo = (url: string): RelayInfo => getRelay(url)?.info || {}
const displayRelay = ({url}) => last(url.split("://"))
const searchRelays = derived(Nip65.relays, $relays => fuzzy($relays.values(), {keys: ["url"]}))
const getSearchRelays = () => {
const searchableRelayUrls = Nip65.relays
.get()
.filter(r => (r.info?.supported_nips || []).includes(50))
.map(prop("url"))
return uniq(Env.SEARCH_RELAYS.concat(searchableRelayUrls)).slice(0, 8)
}
const getPubkeyRelays = (pubkey, mode = null) => {
const relays = Nip65.policies.key(pubkey).get()?.relays || []
return mode ? relays.filter(prop(mode)) : relays
}
const getPubkeyRelayUrls = (pubkey, mode = null) => pluck("url", getPubkeyRelays(pubkey, mode))
// Smart relay selection
//
// From Mike Dilger:
// 1) Other people's write relays — pull events from people you follow,
// including their contact lists
// 2) Other people's read relays — push events that tag them (replies or just tagging).
// However, these may be authenticated, use with caution
// 3) Your write relays —- write events you post to your microblog feed for the
// world to see. ALSO write your contact list. ALSO read back your own contact list.
// 4) Your read relays —- read events that tag you. ALSO both write and read
// client-private data like client configuration events or anything that the world
// doesn't need to see.
// 5) Advertise relays — write and read back your own relay list
const selectHints = (limit, hints) => {
const seen = new Set()
const ok = []
const bad = []
for (const url of chain(hints, User.getRelayUrls("write"), Env.DEFAULT_RELAYS)) {
if (seen.has(url)) {
continue
}
seen.add(url)
// Filter out relays that appear to be broken or slow
if (!isShareableRelay(url)) {
bad.push(url)
} else if (Network.relayHasError(url) || first(Meta.getRelayQuality(url)) < 0.5) {
bad.push(url)
} else {
ok.push(url)
}
if (ok.length > limit) {
break
}
}
// If we don't have enough hints, use the broken ones
return ok.concat(bad).slice(0, limit)
}
const hintSelector =
generateHints =>
(limit, ...args) =>
selectHints(limit, generateHints(...args))
const getPubkeyHints = hintSelector(function* (pubkey, mode = "write") {
const other = mode === "write" ? "read" : "write"
yield* getPubkeyRelayUrls(pubkey, mode)
yield* getPubkeyRelayUrls(pubkey, other)
})
const getEventHints = hintSelector(function* (event) {
yield* event.seen_on || []
yield* getPubkeyHints(null, event.pubkey)
})
// If we're looking for an event's children, the read relays the author has
// advertised would be the most reliable option, since well-behaved clients
// will write replies there. However, this may include spam, so we may want
// to read from the current user's network's read relays instead.
const getReplyHints = hintSelector(function* (event) {
yield* getPubkeyRelayUrls(event.pubkey, "write")
yield* event.seen_on || []
yield* getPubkeyRelayUrls(event.pubkey, "read")
})
// If we're looking for an event's parent, tags are the most reliable hint,
// but we can also look at where the author of the note reads from
const getParentHints = hintSelector(function* (event) {
const parentId = findReplyId(event)
yield* Tags.from(event).equals(parentId).relays()
yield* event.seen_on || []
yield* getPubkeyHints(null, event.pubkey, "read")
})
// If we're replying or reacting to an event, we want the author to know, as well as
// anyone else who is tagged in the original event or the reply. Get everyone's read
// relays. Limit how many per pubkey we publish to though. We also want to advertise
// our content to our followers, so publish to our write relays as well.
const getPublishHints = (limit, event, extraRelays = []) => {
const tags = Tags.from(event)
const pubkeys = tags.type("p").values().all().concat(event.pubkey)
const hintGroups = pubkeys.map(pubkey => getPubkeyHints(3, pubkey, "read"))
return mergeHints(limit, hintGroups.concat([extraRelays]))
}
const mergeHints = (limit, groups) => {
const scores = {} as Record<string, any>
for (const hints of groups) {
hints.forEach((hint, i) => {
const score = 1 / (i + 1) / hints.length
if (!scores[hint]) {
scores[hint] = {score: 0, count: 0}
}
scores[hint].score += score
scores[hint].count += 1
})
}
// Use the log-sum-exp and a weighted sum
for (const score of Object.values(scores)) {
const weight = Math.log(groups.length / score.count)
score.score = weight + Math.log1p(Math.exp(score.score - score.count))
}
return sortBy(([hint, {score}]) => -score, Object.entries(scores))
.map(nth(0))
.slice(0, limit)
}
return {
addRelay,
setPolicy,
getRelay,
getRelayInfo,
displayRelay,
searchRelays,
getSearchRelays,
getPubkeyRelays,
getPubkeyRelayUrls,
selectHints,
hintSelector,
getPubkeyHints,
getEventHints,
getReplyHints,
getParentHints,
getPublishHints,
mergeHints,
} }
} }
static initialize({Env, Events, Nip65}) { setPolicy = ({pubkey, created_at}: {pubkey: string, created_at: number}, relays: RelayPolicyEntry[]) => {
Events.addHandler(2, e => { if (relays?.length > 0) {
if (created_at < this.policies.key(pubkey).get()?.created_at) {
return
}
this.policies.key(pubkey).merge({
created_at,
updated_at: now(),
relays: uniqBy(prop("url"), relays).map((relay: RelayPolicyEntry) => {
this.addRelay(relay.url)
return {read: true, write: true, ...relay}
}),
})
}
}
getRelay = (url: string): Relay => this.relays.key(url).get() || {url}
getRelayInfo = (url: string): RelayInfo => this.getRelay(url)?.info || {}
displayRelay = ({url}: Relay) => last(url.split("://"))
searchRelays = derived(this.relays, $relays => fuzzy($relays.values(), {keys: ["url"]}))
getSearchRelays = () => {
const searchableRelayUrls = this.relays
.get()
.filter(r => (r.info?.supported_nips || []).includes(50))
.map(prop("url"))
return uniq(this.engine.Env.SEARCH_RELAYS.concat(searchableRelayUrls)).slice(0, 8)
}
getPubkeyRelays = (pubkey: string, mode: string = null) => {
const relays = this.policies.key(pubkey).get()?.relays || []
return mode ? relays.filter(prop(mode)) : relays
}
getPubkeyRelayUrls = (pubkey: string, mode: string = null) =>
pluck("url", this.getPubkeyRelays(pubkey, mode))
// Smart relay selection
//
// From Mike Dilger:
// 1) Other people's write relays — pull events from people you follow,
// including their contact lists
// 2) Other people's read relays — push events that tag them (replies or just tagging).
// However, these may be authenticated, use with caution
// 3) Your write relays —- write events you post to your microblog feed for the
// world to see. ALSO write your contact list. ALSO read back your own contact list.
// 4) Your read relays —- read events that tag you. ALSO both write and read
// client-private data like client configuration events or anything that the world
// doesn't need to see.
// 5) Advertise relays — write and read back your own relay list
selectHints = (limit: number, hints: Iterable<string>) => {
const seen = new Set()
const ok = []
const bad = []
for (const url of chain(
hints,
this.engine.components.User.getRelayUrls("write"),
this.engine.Env.DEFAULT_RELAYS
)) {
if (seen.has(url)) {
continue
}
seen.add(url)
// Filter out relays that appear to be broken or slow
if (!isShareableRelay(url)) {
bad.push(url)
} else if (
this.engine.components.Network.relayHasError(url) ||
this.engine.components.Meta.getRelayQuality(url)[0] < 0.5
) {
bad.push(url)
} else {
ok.push(url)
}
if (ok.length > limit) {
break
}
}
// If we don't have enough hints, use the broken ones
return ok.concat(bad).slice(0, limit)
}
hintSelector =
(generateHints: (...args: any[]) => Iterable<string>) =>
(limit: number, ...args: any[]) =>
this.selectHints(limit, generateHints.call(this, ...args))
getPubkeyHints = this.hintSelector(function* (this: Nip65, pubkey: string, mode = "write") {
const other = mode === "write" ? "read" : "write"
yield* this.getPubkeyRelayUrls(pubkey, mode)
yield* this.getPubkeyRelayUrls(pubkey, other)
})
getEventHints = this.hintSelector(function* (this: Nip65, event: Event) {
yield* event.seen_on || []
yield* this.getPubkeyHints(null, event.pubkey)
})
// If we're looking for an event's children, the read relays the author has
// advertised would be the most reliable option, since well-behaved clients
// will write replies there. However, this may include spam, so we may want
// to read from the current user's network's read relays instead.
getReplyHints = this.hintSelector(function* (this: Nip65, event) {
yield* this.getPubkeyRelayUrls(event.pubkey, "write")
yield* event.seen_on || []
yield* this.getPubkeyRelayUrls(event.pubkey, "read")
})
// If we're looking for an event's parent, tags are the most reliable hint,
// but we can also look at where the author of the note reads from
getParentHints = this.hintSelector(function* (this: Nip65, event) {
const parentId = findReplyId(event)
yield* Tags.from(event).equals(parentId).relays()
yield* event.seen_on || []
yield* this.getPubkeyHints(null, event.pubkey, "read")
})
// If we're replying or reacting to an event, we want the author to know, as well as
// anyone else who is tagged in the original event or the reply. Get everyone's read
// relays. Limit how many per pubkey we publish to though. We also want to advertise
// our content to our followers, so publish to our write relays as well.
getPublishHints = (limit: number, event: Event, extraRelays: string[] = []) => {
const tags = Tags.from(event)
const pubkeys = tags.type("p").values().all().concat(event.pubkey)
const hintGroups = pubkeys.map(pubkey => this.getPubkeyHints(3, pubkey, "read"))
return this.mergeHints(limit, hintGroups.concat([extraRelays]))
}
mergeHints = (limit: number, groups: string[][]) => {
const scores = {} as Record<string, any>
for (const hints of groups) {
hints.forEach((hint, i) => {
const score = 1 / (i + 1) / hints.length
if (!scores[hint]) {
scores[hint] = {score: 0, count: 0}
}
scores[hint].score += score
scores[hint].count += 1
})
}
// Use the log-sum-exp and a weighted sum
for (const score of Object.values(scores)) {
const weight = Math.log(groups.length / score.count)
score.score = weight + Math.log1p(Math.exp(score.score - score.count))
}
return sortBy(([hint, {score}]) => -score, Object.entries(scores))
.map(nth(0))
.slice(0, limit)
}
initialize(engine: Engine) {
this.engine = engine
engine.components.Events.addHandler(2, e => {
if (isShareableRelay(e.content)) { if (isShareableRelay(e.content)) {
Nip65.addRelay(normalizeRelayUrl(e.content)) this.addRelay(normalizeRelayUrl(e.content))
} }
}) })
Events.addHandler(3, e => { engine.components.Events.addHandler(3, e => {
Nip65.setPolicy( this.setPolicy(
e, e,
tryJson(() => { tryJson<RelayPolicyEntry[]>(() => {
Object.entries(JSON.parse(e.content || "")) return Object.entries(JSON.parse(e.content || ""))
.filter(([url]) => isShareableRelay(url)) .filter(([url]) => isShareableRelay(url))
.map(([url, conditions]) => { .map(([url, conditions]) => {
// @ts-ignore // @ts-ignore
@ -235,12 +221,12 @@ export class Nip65 {
return {url: normalizeRelayUrl(url), write, read} return {url: normalizeRelayUrl(url), write, read}
}) })
}) }) as RelayPolicyEntry[]
) )
}) })
Events.addHandler(10002, e => { engine.components.Events.addHandler(10002, e => {
Nip65.setPolicy( this.setPolicy(
e, e,
Tags.from(e) Tags.from(e)
.type("r") .type("r")
@ -258,17 +244,17 @@ export class Nip65 {
) )
}) })
;(async () => { ;(async () => {
const {DEFAULT_RELAYS, FORCE_RELAYS, DUFFLEPUD_URL} = Env const {DEFAULT_RELAYS, FORCE_RELAYS, DUFFLEPUD_URL} = engine.Env
// Throw some hardcoded defaults in there // Throw some hardcoded defaults in there
DEFAULT_RELAYS.forEach(Nip65.addRelay) DEFAULT_RELAYS.forEach(this.addRelay)
// Load relays from nostr.watch via dufflepud // Load relays from nostr.watch via dufflepud
if (FORCE_RELAYS.length === 0 && DUFFLEPUD_URL) { if (FORCE_RELAYS.length === 0 && DUFFLEPUD_URL) {
try { try {
const json = await Fetch.fetchJson(DUFFLEPUD_URL + "/relay") const json = await Fetch.fetchJson(DUFFLEPUD_URL + "/relay")
json.relays.filter(isShareableRelay).forEach(Nip65.addRelay) json.relays.filter(isShareableRelay).forEach(this.addRelay)
} catch (e) { } catch (e) {
warn("Failed to fetch relays list", e) warn("Failed to fetch relays list", e)
} }

View File

@ -1,49 +1,51 @@
import type {Event} from "src/engine/types"
import {getEventHash} from "nostr-tools" import {getEventHash} from "nostr-tools"
import type {UnsignedEvent} from "nostr-tools"
import {assoc} from "ramda" import {assoc} from "ramda"
import {doPipe} from "hurdak" import {doPipe} from "hurdak"
import {now} from "src/util/misc" import {now} from "src/util/misc"
import {Worker} from "../util/Worker" import type {Progress} from "src/engine/components/Network"
import type {Engine} from "src/engine/Engine"
import type {Event} from "src/engine/types"
export class Outbox { export class Outbox {
static contributeState() { engine: Engine
return {
queue: new Worker<Event>(), prepEvent = async (rawEvent: Partial<Event>): Promise<Event> => {
if (rawEvent.sig) {
return rawEvent as Event
} }
const event = {
...rawEvent,
created_at: now(),
pubkey: this.engine.components.Keys.pubkey.get(),
}
event.id = getEventHash(event as UnsignedEvent)
return this.engine.components.Keys.sign(event as Event)
} }
static contributeActions({Keys, Network, User, Events}) { publish = async (
const prepEvent = async rawEvent => { rawEvent: Partial<Event>,
return await doPipe(rawEvent, [ relays: string[] = null,
assoc("created_at", now()), onProgress: (p: Progress) => void = null,
assoc("pubkey", Keys.pubkey.get()), verb = "EVENT"
e => ({...e, id: getEventHash(e)}), ) => {
Keys.sign, const event = rawEvent.sig ? (rawEvent as Event) : await this.prepEvent(rawEvent)
])
if (!relays) {
relays = this.engine.components.User.getRelayUrls("write")
} }
const publish = async (event, relays = null, onProgress = null, verb = "EVENT") => { // return console.log(event)
if (!event.sig) {
event = await prepEvent(event)
}
if (!relays) { this.engine.components.Events.queue.push(event)
relays = User.getRelayUrls("write")
}
// return console.log(event) return [event, this.engine.components.Network.publish({event, relays, onProgress, verb})]
const promise = Network.publish({event, relays, onProgress, verb})
Events.queue.push(event)
return [event, promise]
}
return {prepEvent, publish}
} }
static initialize({Outbox}) { initialize(engine: Engine) {
Outbox.queue.listen(({event}) => Outbox.publish(event)) this.engine = engine
} }
} }

View File

@ -3,6 +3,7 @@ import {chunk, seconds, ensurePlural} from "hurdak"
import {personKinds, appDataKeys} from "src/util/nostr" import {personKinds, appDataKeys} from "src/util/nostr"
import {now} from "src/util/misc" import {now} from "src/util/misc"
import type {Filter} from "src/engine/types" import type {Filter} from "src/engine/types"
import type {Engine} from "src/engine/Engine"
export type LoadPeopleOpts = { export type LoadPeopleOpts = {
relays?: string[] relays?: string[]
@ -11,76 +12,78 @@ export type LoadPeopleOpts = {
} }
export class PubkeyLoader { export class PubkeyLoader {
static contributeActions({Directory, Nip65, User, Network}) { engine: Engine
const attemptedPubkeys = new Set()
const getStalePubkeys = pubkeys => { attemptedPubkeys = new Set()
const stale = new Set()
const since = now() - seconds(3, "hour")
for (const pubkey of pubkeys) { getStalePubkeys = (pubkeys: string[]) => {
if (stale.has(pubkey) || attemptedPubkeys.has(pubkey)) { const stale = new Set()
continue const since = now() - seconds(3, "hour")
}
attemptedPubkeys.add(pubkey) for (const pubkey of pubkeys) {
if (stale.has(pubkey) || this.attemptedPubkeys.has(pubkey)) {
if (Directory.profiles.key(pubkey).get()?.updated_at || 0 > since) { continue
continue
}
stale.add(pubkey)
} }
return stale this.attemptedPubkeys.add(pubkey)
if (this.engine.components.Directory.profiles.key(pubkey).get()?.updated_at || 0 > since) {
continue
}
stale.add(pubkey)
} }
const load = async ( return Array.from(stale)
pubkeyGroups, }
{relays, force, kinds = personKinds}: LoadPeopleOpts = {}
) => {
const rawPubkeys = ensurePlural(pubkeyGroups).reduce((a, b) => a.concat(b), [])
const pubkeys = force ? uniq(rawPubkeys) : getStalePubkeys(rawPubkeys)
const getChunkRelays = chunk => { load = async (
if (relays?.length > 0) { pubkeyGroups: string | string[],
return relays {relays, force, kinds = personKinds}: LoadPeopleOpts = {}
} ) => {
const rawPubkeys = ensurePlural(pubkeyGroups).reduce((a, b) => a.concat(b), [])
const pubkeys = force ? uniq(rawPubkeys) : this.getStalePubkeys(rawPubkeys)
return Nip65.mergeHints( const getChunkRelays = (chunk: string[]) => {
User.getSetting("relay_limit"), if (relays?.length > 0) {
chunk.map(pubkey => Nip65.getPubkeyHints(3, pubkey)) return relays
)
} }
const getChunkFilter = chunk => { return this.engine.components.Nip65.mergeHints(
const filter = [] as Filter[] this.engine.components.User.getSetting("relay_limit"),
chunk.map(pubkey => this.engine.components.Nip65.getPubkeyHints(3, pubkey))
filter.push({kinds: without([30078], kinds), authors: chunk})
// Add a separate filter for app data so we're not pulling down other people's stuff,
// or obsolete events of our own.
if (kinds.includes(30078)) {
filter.push({kinds: [30078], authors: chunk, "#d": Object.values(appDataKeys)})
}
return filter
}
await Promise.all(
pluck(
"complete",
chunk(256, pubkeys).map(chunk =>
Network.subscribe({
relays: getChunkRelays(chunk),
filter: getChunkFilter(chunk),
timeout: 10_000,
})
)
)
) )
} }
return {load} const getChunkFilter = (chunk: string[]) => {
const filter = [] as Filter[]
filter.push({kinds: without([30078], kinds), authors: chunk})
// Add a separate filter for app data so we're not pulling down other people's stuff,
// or obsolete events of our own.
if (kinds.includes(30078)) {
filter.push({kinds: [30078], authors: chunk, "#d": Object.values(appDataKeys)})
}
return filter
}
await Promise.all(
pluck(
"complete",
chunk(256, pubkeys).map((chunk: string[]) =>
this.engine.components.Network.subscribe({
relays: getChunkRelays(chunk),
filter: getChunkFilter(chunk),
timeout: 10_000,
})
)
)
)
}
initialize(engine: Engine) {
this.engine = engine
} }
} }

View File

@ -1,161 +1,154 @@
import {prop, pluck, splitAt, path as getPath, sortBy} from "ramda" import {prop, pluck, splitAt, path as getPath, sortBy} from "ramda"
import {sleep, defer, chunk, randomInt, throttle} from "hurdak" import {sleep, defer, chunk, randomInt, throttle} from "hurdak"
import {Storage as LocalStorage} from "hurdak" import {Storage as LocalStorage} from "hurdak"
import {writable} from "../util/store" import type {Channel, Contact} from "src/engine/types"
import {IndexedDB} from "../util/indexeddb" import type {Engine} from "src/engine/Engine"
import {writable} from "src/engine/util/store"
import type {Writable, Collection} from "src/engine/util/store"
import {IndexedDB} from "src/engine/util/indexeddb"
const localStorageKeys = ["Alerts.lastChecked", "Keys.pubkey", "Keys.keyState", "User.settings"] const localStorageKeys = ["Alerts.lastChecked", "Keys.pubkey", "Keys.keyState", "User.settings"]
const policy = (key, max, sort) => ({key, max, sort}) const sortChannels = sortBy((e: Channel) =>
e.joined ? 0 : -Math.max(e.last_checked || 0, e.last_sent || 0)
)
const sortChannels = sortBy(e => (e.joined ? 0 : -Math.max(e.last_checked || 0, e.last_sent || 0))) const sortContacts = sortBy((e: Contact) => -Math.max(e.last_checked || 0, e.last_sent || 0))
const sortContacts = sortBy(e => -Math.max(e.last_checked || 0, e.last_sent || 0)) const policy = (key: string, max: number, sort: (xs: any[]) => any[]) => ({key, max, sort})
const getCollectionPolicies = ({Storage}) => [ const getStore = (key: string, engine: Engine) =>
policy("Alerts.events", 500, sortBy(prop("created_at"))), getPath(key.split("."), engine.components) as Collection<any>
policy("Nip28.channels", 1000, sortChannels),
policy("Nip28.messages", 10000, sortBy(prop("created_at"))),
policy("Nip04.contacts", 1000, sortContacts),
policy("Nip04.messages", 10000, sortBy(prop("created_at"))),
policy("Content.topics", 1000, sortBy(prop("count"))),
policy("Content.lists", 500, Storage.sortByPubkeyWhitelist(prop("updated_at"))),
policy("Directory.profiles", 5000, Storage.sortByPubkeyWhitelist(prop("updated_at"))),
policy("Events.cache", 5000, Storage.sortByPubkeyWhitelist(prop("created_at"))),
policy("Nip02.graph", 5000, Storage.sortByPubkeyWhitelist(prop("updated_at"))),
policy("Nip05.handles", 5000, Storage.sortByPubkeyWhitelist(prop("updated_at"))),
policy("Nip57.zappers", 5000, Storage.sortByPubkeyWhitelist(prop("updated_at"))),
policy("Nip65.relays", 2000, prop("count")),
policy("Nip65.policies", 5000, Storage.sortByPubkeyWhitelist(prop("updated_at"))),
]
// Sync helpers export class Storage {
engine: Engine
db: IndexedDB
ready = defer()
dead = writable(false)
const syncScalars = (engine, keys) => { close = () => {
for (const key of keys) { this.dead.set(true)
const store = getPath(key.split("."), engine)
if (Object.hasOwn(localStorage, key)) { return this.db?.close()
store.set(LocalStorage.getJson(key)) }
clear = () => {
this.dead.set(true)
localStorage.clear()
return this.db?.delete()
}
getPubkeyWhitelist = () => {
const pubkeys = this.engine.components.Keys.keyState.get().map(prop("pubkey"))
return [new Set(pubkeys), this.engine.components.Nip02.getFollowsSet(pubkeys)]
}
sortByPubkeyWhitelist = (fallback: (x: any) => number) => (rows: Record<string, any>[]) => {
const [pubkeys, follows] = this.getPubkeyWhitelist()
return sortBy(x => {
if (pubkeys.has(x.pubkey)) {
return Number.MAX_SAFE_INTEGER
}
if (follows.has(x.pubkey)) {
return Number.MAX_SAFE_INTEGER - 1
}
return fallback(x)
}, rows)
}
async initialize(engine: Engine) {
this.engine = engine
for (const key of localStorageKeys) {
const store = getStore(key, engine)
if (Object.hasOwn(localStorage, key)) {
store.set(LocalStorage.getJson(key))
}
store.subscribe(throttle(300, $value => LocalStorage.setJson(key, $value)))
} }
store.subscribe(throttle(300, $value => LocalStorage.setJson(key, $value))) if (window.indexedDB) {
} const policies = [
} policy("Alerts.events", 500, sortBy(prop("created_at"))),
policy("Nip28.channels", 1000, sortChannels),
policy("Nip28.messages", 10000, sortBy(prop("created_at"))),
policy("Nip04.contacts", 1000, sortContacts),
policy("Nip04.messages", 10000, sortBy(prop("created_at"))),
policy("Content.topics", 1000, sortBy(prop("count"))),
policy("Content.lists", 500, this.sortByPubkeyWhitelist(prop("updated_at"))),
policy("Directory.profiles", 5000, this.sortByPubkeyWhitelist(prop("updated_at"))),
policy("Events.cache", 5000, this.sortByPubkeyWhitelist(prop("created_at"))),
policy("Nip02.graph", 5000, this.sortByPubkeyWhitelist(prop("updated_at"))),
policy("Nip05.handles", 5000, this.sortByPubkeyWhitelist(prop("updated_at"))),
policy("Nip57.zappers", 5000, this.sortByPubkeyWhitelist(prop("updated_at"))),
policy("Nip65.relays", 2000, prop("count")),
policy("Nip65.policies", 5000, this.sortByPubkeyWhitelist(prop("updated_at"))),
]
const syncCollections = async (engine, policies) => { this.db = new IndexedDB(
for (const {key} of policies) { "nostr-engine/Storage",
const store = getPath(key.split("."), engine) 1,
policies.map(({key}) => {
const store = getStore(key, engine)
store.set(await engine.Storage.db.getAll(key)) return {
name: key,
opts: {
keyPath: store.pk,
},
}
})
)
store.subscribe( window.addEventListener("beforeunload", () => this.close())
throttle(randomInt(3000, 5000), async rows => {
if (engine.Storage.dead.get()) { await this.db.open()
for (const {key} of policies) {
const store = getStore(key, engine)
store.set(await this.db.getAll(key))
store.subscribe(
throttle(randomInt(3000, 5000), async <T>(rows: T) => {
if (this.dead.get()) {
return
}
// Do it in small steps to avoid clogging stuff up
for (const records of chunk(100, rows as any[])) {
await this.db.bulkPut(key, records)
await sleep(50)
}
})
)
}
// Every so often randomly prune a store
setInterval(() => {
const {key, max, sort} = policies[Math.floor(policies.length * Math.random())]
const store = getStore(key, engine)
const data = store.get()
if (data.length < max * 1.1) {
return return
} }
// Do it in small steps to avoid clogging stuff up const [discard, keep] = splitAt(max, sort(data))
for (const records of chunk(100, rows)) {
await engine.Storage.db.bulkPut(key, records)
await sleep(50)
}
})
)
}
// Every so often randomly prune a store store.set(keep)
setInterval(() => { this.db.bulkDelete(key, pluck(store.pk, discard))
const {key, max, sort} = policies[Math.floor(policies.length * Math.random())] }, 30_000)
const store = getPath(key.split("."), engine)
const data = store.get()
if (data.length < max * 1.1) {
return
} }
const [discard, keep] = splitAt(max, sort(data)) this.ready.resolve()
store.set(keep)
engine.Storage.db.bulkDelete(key, pluck(store.pk, discard))
}, 30_000)
}
export class Storage {
static contributeState() {
const ready = defer()
const dead = writable(false)
return {db: null, ready, dead}
}
static contributeActions({Storage, Nip02, Keys}) {
const close = () => {
Storage.dead.set(true)
return Storage.db?.close()
}
const clear = () => {
Storage.dead.set(true)
localStorage.clear()
return Storage.db?.delete()
}
const getPubkeyWhitelist = () => {
const pubkeys = Keys.keyState.get().map(prop("pubkey"))
return [new Set(pubkeys), Nip02.getFollowsSet(pubkeys)]
}
const sortByPubkeyWhitelist = fallback => rows => {
const [pubkeys, follows] = getPubkeyWhitelist()
return sortBy(x => {
if (pubkeys.has(x.pubkey)) {
return Number.MAX_SAFE_INTEGER
}
if (follows.has(x.pubkey)) {
return Number.MAX_SAFE_INTEGER - 1
}
return fallback(x)
}, rows)
}
return {close, clear, getPubkeyWhitelist, sortByPubkeyWhitelist}
}
static async initialize(engine) {
syncScalars(engine, localStorageKeys)
if (window.indexedDB) {
const policies = getCollectionPolicies(engine)
const indexedDBStores = policies.map(({key}) => {
const store = getPath(key.split("."), engine)
return {
name: key,
opts: {
keyPath: store.pk,
},
}
})
engine.Storage.db = new IndexedDB("nostr-engine/Storage", 1, indexedDBStores)
window.addEventListener("beforeunload", () => engine.Storage.close())
await engine.Storage.db.open()
await syncCollections(engine, policies)
}
engine.Storage.ready.resolve()
} }
} }

View File

@ -1,273 +1,236 @@
import {when, prop, uniq, pluck, fromPairs, whereEq, find, slice, reject} from "ramda" import {when, prop, uniq, pluck, fromPairs, whereEq, find, slice, reject} from "ramda"
import {now} from "src/util/misc" import {now} from "src/util/misc"
import {Tags, appDataKeys, normalizeRelayUrl, findReplyId, findRootId} from "src/util/nostr" import {Tags, appDataKeys, normalizeRelayUrl, findReplyId, findRootId} from "src/util/nostr"
import {writable} from "../util/store" import type {RelayPolicyEntry, List, Event} from "src/engine/types"
import {writable} from "src/engine/util/store"
import type {Writable} from "src/engine/util/store"
import type {Engine} from "src/engine/Engine"
export class User { export class User {
static contributeState({Env}) { engine: Engine
const settings = writable<any>({ settings: Writable<Record<string, any>>
getPubkey = () => this.engine.components.Keys.pubkey.get()
getStateKey = () => (this.engine.components.Keys.canSign.get() ? this.getPubkey() : "anonymous")
// Settings
getSetting = (k: string) => this.settings.get()[k]
dufflepud = (path: string) => `${this.getSetting("dufflepud_url")}/${path}`
setSettings = async (settings: Record<string, any>) => {
this.settings.update($settings => ({...$settings, ...settings}))
if (this.engine.components.Keys.canSign.get()) {
const d = appDataKeys.USER_SETTINGS
const v = await this.engine.components.Crypt.encryptJson(settings)
return this.engine.components.Outbox.publish(this.engine.components.Builder.setAppData(d, v))
}
}
setAppData = async (d: string, content: any) => {
const v = await this.engine.components.Crypt.encryptJson(content)
return this.engine.components.Outbox.publish(this.engine.components.Builder.setAppData(d, v))
}
// Nip65
getRelays = (mode?: string) =>
this.engine.components.Nip65.getPubkeyRelays(this.getStateKey(), mode)
getRelayUrls = (mode?: string) =>
this.engine.components.Nip65.getPubkeyRelayUrls(this.getStateKey(), mode)
setRelays = (relays: RelayPolicyEntry[]) => {
if (this.engine.components.Keys.canSign.get()) {
return this.engine.components.Outbox.publish(this.engine.components.Builder.setRelays(relays))
} else {
this.engine.components.Nip65.setPolicy(
{pubkey: this.getStateKey(), created_at: now()},
relays
)
}
}
addRelay = (url: string) => this.setRelays(this.getRelays().concat({url, read: true, write: true}))
removeRelay = (url: string) =>
this.setRelays(reject(whereEq({url: normalizeRelayUrl(url)}), this.getRelays()))
setRelayPolicy = (url: string, policy: Partial<RelayPolicyEntry>) =>
this.setRelays(this.getRelays().map(when(whereEq({url}), p => ({...p, ...policy}))))
// Nip02
getPetnames = () => this.engine.components.Nip02.getPetnames(this.getStateKey())
getMutedTags = () => this.engine.components.Nip02.getMutedTags(this.getStateKey())
getFollowsSet = () => this.engine.components.Nip02.getFollowsSet(this.getStateKey())
getMutesSet = () => this.engine.components.Nip02.getMutesSet(this.getStateKey())
getFollows = () => this.engine.components.Nip02.getFollows(this.getStateKey())
getMutes = () => this.engine.components.Nip02.getMutes(this.getStateKey())
getNetworkSet = () => this.engine.components.Nip02.getNetworkSet(this.getStateKey())
getNetwork = () => this.engine.components.Nip02.getNetwork(this.getStateKey())
isFollowing = (pubkey: string) => this.engine.components.Nip02.isFollowing(this.getStateKey(), pubkey)
isIgnoring = (pubkeyOrEventId: string) =>
this.engine.components.Nip02.isIgnoring(this.getStateKey(), pubkeyOrEventId)
setProfile = ($profile: Record<string, any>) =>
this.engine.components.Outbox.publish(this.engine.components.Builder.setProfile($profile))
setPetnames = async ($petnames: string[][]) => {
if (this.engine.components.Keys.canSign.get()) {
await this.engine.components.Outbox.publish(
this.engine.components.Builder.setPetnames($petnames)
)
} else {
this.engine.components.Nip02.graph.key(this.getStateKey()).merge({
updated_at: now(),
petnames_updated_at: now(),
petnames: $petnames,
})
}
}
follow = (pubkey: string) =>
this.setPetnames(
this.getPetnames()
.filter(t => t[1] !== pubkey)
.concat([this.engine.components.Builder.mention(pubkey)])
)
unfollow = (pubkey: string) =>
this.setPetnames(reject((t: string[]) => t[1] === pubkey, this.getPetnames()))
isMuted = (e: Event) => {
const m = this.getMutesSet()
return find(t => m.has(t), [e.id, e.pubkey, findReplyId(e), findRootId(e)])
}
applyMutes = (events: Event[]) => reject(this.isMuted, events)
setMutes = async ($mutes: string[][]) => {
if (this.engine.components.Keys.canSign.get()) {
await this.engine.components.Outbox.publish(
this.engine.components.Builder.setMutes($mutes.map(t => t.slice(0, 2)))
)
} else {
this.engine.components.Nip02.graph.key(this.getStateKey()).merge({
updated_at: now(),
mutes_updated_at: now(),
mutes: $mutes,
})
}
}
mute = (type: string, value: string) =>
this.setMutes(reject((t: string[]) => t[1] === value, this.getMutedTags()).concat([[type, value]]))
unmute = (target: string) => this.setMutes(reject((t: string[]) => t[1] === target, this.getMutedTags()))
// Lists
getLists = (f?: (l: List) => boolean) =>
this.engine.components.Content.getLists(
l => l.pubkey === this.getStateKey() && (f ? f(l) : true)
)
putList = (name: string, params: string[][], relays: string[]) =>
this.engine.components.Outbox.publish(
this.engine.components.Builder.createList([["d", name]].concat(params).concat(relays))
)
removeList = (naddr: string) =>
this.engine.components.Outbox.publish(this.engine.components.Builder.deleteNaddrs([naddr]))
// Messages
markAllMessagesRead = () => {
const lastChecked = fromPairs(
uniq(pluck("contact", this.engine.components.Nip04.messages.get())).map(k => [k, now()])
)
return this.setAppData(appDataKeys.NIP04_LAST_CHECKED, lastChecked)
}
setContactLastChecked = (pubkey: string) => {
const lastChecked = fromPairs(
this.engine.components.Nip04.contacts
.get()
.filter(prop("last_checked"))
.map(r => [r.id, r.last_checked])
)
return this.setAppData(appDataKeys.NIP04_LAST_CHECKED, {...lastChecked, [pubkey]: now()})
}
// Channels
setChannelLastChecked = (id: string) => {
const lastChecked = fromPairs(
this.engine.components.Nip28.channels
.get()
.filter(prop("last_checked"))
.map(r => [r.id, r.last_checked])
)
return this.setAppData(appDataKeys.NIP28_LAST_CHECKED, {...lastChecked, [id]: now()})
}
saveChannels = () =>
this.setAppData(
appDataKeys.NIP28_ROOMS_JOINED,
pluck("id", this.engine.components.Nip28.channels.get().filter(whereEq({joined: true})))
)
joinChannel = (id: string) => {
this.engine.components.Nip28.channels.key(id).merge({joined: false})
return this.saveChannels()
}
leaveChannel = (id: string) => {
this.engine.components.Nip28.channels.key(id).merge({joined: false})
this.engine.components.Nip28.messages.reject(m => m.channel === id)
return this.saveChannels()
}
initialize(engine: Engine) {
this.engine = engine
this.settings = writable<Record<string, any>>({
last_updated: 0, last_updated: 0,
relay_limit: 10, relay_limit: 10,
default_zap: 21, default_zap: 21,
show_media: true, show_media: true,
report_analytics: true, report_analytics: true,
dufflepud_url: Env.DUFFLEPUD_URL, dufflepud_url: engine.Env.DUFFLEPUD_URL,
multiplextr_url: Env.MULTIPLEXTR_URL, multiplextr_url: engine.Env.MULTIPLEXTR_URL,
}) })
return {settings} engine.components.Events.addHandler(30078, async e => {
}
static contributeActions({
Builder,
Content,
Crypt,
Directory,
Events,
Keys,
Network,
Outbox,
Nip02,
Nip04,
Nip28,
Nip65,
User,
}) {
const getPubkey = () => Keys.pubkey.get()
const getStateKey = () => (Keys.canSign.get() ? getPubkey() : "anonymous")
// Settings
const getSetting = k => User.settings.get()[k]
const dufflepud = path => `${getSetting("dufflepud_url")}/${path}`
const setSettings = async settings => {
User.settings.update($settings => ({...$settings, ...settings}))
if (Keys.canSign.get()) {
const d = appDataKeys.USER_SETTINGS
const v = await Crypt.encryptJson(settings)
return Outbox.queue.push({event: Builder.setAppData(d, v)})
}
}
const setAppData = async (d, content) => {
const v = await Crypt.encryptJson(content)
return Outbox.queue.push({event: Builder.setAppData(d, v)})
}
// Nip65
const getRelays = (mode?: string) => Nip65.getPubkeyRelays(getStateKey(), mode)
const getRelayUrls = (mode?: string) => Nip65.getPubkeyRelayUrls(getStateKey(), mode)
const setRelays = relays => {
if (Keys.canSign.get()) {
return Outbox.queue.push({event: Builder.setRelays(relays)})
} else {
Nip65.setPolicy({pubkey: getStateKey(), created_at: now()}, relays)
}
}
const addRelay = url => setRelays(getRelays().concat({url, read: true, write: true}))
const removeRelay = url =>
setRelays(reject(whereEq({url: normalizeRelayUrl(url)}), getRelays()))
const setRelayPolicy = (url, policy) =>
setRelays(getRelays().map(when(whereEq({url}), p => ({...p, ...policy}))))
// Nip02
const getPetnames = () => Nip02.getPetnames(getStateKey())
const getMutedTags = () => Nip02.getMutedTags(getStateKey())
const getFollowsSet = () => Nip02.getFollowsSet(getStateKey())
const getMutesSet = () => Nip02.getMutesSet(getStateKey())
const getFollows = () => Nip02.getFollows(getStateKey())
const getMutes = () => Nip02.getMutes(getStateKey())
const getNetworkSet = () => Nip02.getNetworkSet(getStateKey())
const getNetwork = () => Nip02.getNetwork(getStateKey())
const isFollowing = pubkey => Nip02.isFollowing(getStateKey(), pubkey)
const isIgnoring = pubkeyOrEventId => Nip02.isIgnoring(getStateKey(), pubkeyOrEventId)
const setProfile = $profile => Outbox.queue.push({event: Builder.setProfile($profile)})
const setPetnames = async $petnames => {
if (Keys.canSign.get()) {
await Outbox.queue.push({event: Builder.setPetnames($petnames)})
} else {
Nip02.graph.key(getStateKey()).merge({
updated_at: now(),
petnames_updated_at: now(),
petnames: $petnames,
})
}
}
const follow = pubkey =>
setPetnames(
getPetnames()
.filter(t => t[1] !== pubkey)
.concat([Builder.mention(pubkey)])
)
const unfollow = pubkey => setPetnames(reject(t => t[1] === pubkey, getPetnames()))
const isMuted = e => {
const m = getMutesSet()
return find(t => m.has(t), [e.id, e.pubkey, findReplyId(e), findRootId(e)])
}
const applyMutes = events => reject(isMuted, events)
const setMutes = async $mutes => {
if (Keys.canSign.get()) {
await Outbox.queue.push({event: Builder.setMutes($mutes.map(slice(0, 2)))})
} else {
Nip02.graph.key(getStateKey()).merge({
updated_at: now(),
mutes_updated_at: now(),
mutes: $mutes,
})
}
}
const mute = (type, value) =>
setMutes(reject(t => t[1] === value, getMutedTags()).concat([[type, value]]))
const unmute = target => setMutes(reject(t => t[1] === target, getMutedTags()))
// Content
const getLists = f => Content.getLists(l => l.pubkey === getStateKey() && (f ? f(l) : true))
const putList = (name, params, relays) =>
Outbox.queue.push({
event: Builder.createList([["d", name]].concat(params).concat(relays)),
})
const removeList = naddr => Outbox.queue.push({event: Builder.deleteNaddrs([naddr])})
// Messages
const markAllMessagesRead = () => {
const lastChecked = fromPairs(
uniq(pluck("contact", Nip04.messages.get())).map(k => [k, now()])
)
return setAppData(appDataKeys.NIP04_LAST_CHECKED, lastChecked)
}
const setContactLastChecked = pubkey => {
const lastChecked = fromPairs(
Nip04.contacts
.get()
.filter(prop("last_checked"))
.map(r => [r.id, r.last_checked])
)
return setAppData(appDataKeys.NIP04_LAST_CHECKED, {...lastChecked, [pubkey]: now()})
}
// Nip28
const setChannelLastChecked = id => {
const lastChecked = fromPairs(
Nip28.channels
.get()
.filter(prop("last_checked"))
.map(r => [r.id, r.last_checked])
)
return setAppData(appDataKeys.NIP28_LAST_CHECKED, {...lastChecked, [id]: now()})
}
const saveChannels = () =>
setAppData(
appDataKeys.NIP28_ROOMS_JOINED,
pluck("id", Nip28.channels.get().filter(whereEq({joined: true})))
)
const joinChannel = id => {
Nip28.channels.key(id).merge({joined: false})
return saveChannels()
}
const leaveChannel = id => {
Nip28.channels.key(id).merge({joined: false})
Nip28.messages.reject(m => m.channel === id)
return saveChannels()
}
return {
getPubkey,
getStateKey,
getSetting,
dufflepud,
setSettings,
getRelays,
getRelayUrls,
setRelays,
addRelay,
removeRelay,
setRelayPolicy,
getPetnames,
getMutedTags,
getFollowsSet,
getMutesSet,
getFollows,
getMutes,
getNetworkSet,
getNetwork,
isFollowing,
isIgnoring,
setProfile,
setPetnames,
follow,
unfollow,
isMuted,
applyMutes,
setMutes,
mute,
unmute,
getLists,
putList,
removeList,
markAllMessagesRead,
setContactLastChecked,
setChannelLastChecked,
joinChannel,
leaveChannel,
}
}
static initialize({Events, Crypt, User}) {
Events.addHandler(30078, async e => {
if ( if (
Tags.from(e).getMeta("d") === "coracle/settings/v1" && Tags.from(e).getMeta("d") === "coracle/settings/v1" &&
e.created_at > User.getSetting("last_updated") e.created_at > this.getSetting("last_updated")
) { ) {
const updates = await Crypt.decryptJson(e.content) const updates = await engine.components.Crypt.decryptJson(e.content)
if (updates) { if (updates) {
User.settings.update($settings => ({ this.settings.update($settings => ({
...$settings, ...$settings,
...updates, ...updates,
last_updated: e.created_at, last_updated: e.created_at,

View File

@ -1,73 +1,21 @@
import {Alerts} from "./components/Alerts" export * from "./types"
import {Builder} from "./components/Builder" export {Engine} from "./Engine"
import {Content} from "./components/Content" export {Alerts} from "./components/Alerts"
import {Crypt} from "./components/Crypt" export {Builder} from "./components/Builder"
import {Directory} from "./components/Directory" export {Content} from "./components/Content"
import {Events} from "./components/Events" export {Crypt} from "./components/Crypt"
import {Keys} from "./components/Keys" export {Directory} from "./components/Directory"
import {Meta} from "./components/Meta" export {Events} from "./components/Events"
import {Network} from "./components/Network" export {Keys} from "./components/Keys"
import {Nip02} from "./components/Nip02" export {Meta} from "./components/Meta"
import {Nip04} from "./components/Nip04" export {Network} from "./components/Network"
import {Nip05} from "./components/Nip05" export {Nip02} from "./components/Nip02"
import {Nip28} from "./components/Nip28" export {Nip04} from "./components/Nip04"
import {Nip57} from "./components/Nip57" export {Nip05} from "./components/Nip05"
import {Nip65} from "./components/Nip65" export {Nip28} from "./components/Nip28"
import {Outbox} from "./components/Outbox" export {Nip57} from "./components/Nip57"
import {PubkeyLoader} from "./components/PubkeyLoader" export {Nip65} from "./components/Nip65"
import {Storage} from "./components/Storage" export {Outbox} from "./components/Outbox"
import {User} from "./components/User" export {PubkeyLoader} from "./components/PubkeyLoader"
export {Storage} from "./components/Storage"
export const createEngine = (engine, components) => { export {User} from "./components/User"
for (const component of components) {
engine[component.name] = {}
}
const componentState = components.map(c => [c, c.contributeState?.(engine)])
for (const [component, state] of componentState) {
Object.assign(engine[component.name], state)
}
const componentSelectors = components.map(c => [c, c.contributeSelectors?.(engine)])
for (const [component, selectors] of componentSelectors) {
Object.assign(engine[component.name], selectors)
}
const componentActions = components.map(c => [c, c.contributeActions?.(engine)])
for (const [component, actions] of componentActions) {
Object.assign(engine[component.name], actions)
}
for (const component of components) {
component.initialize?.(engine)
}
return engine
}
export const createDefaultEngine = Env => {
return createEngine({Env}, [
Alerts,
Builder,
Content,
Crypt,
Directory,
Events,
Keys,
Meta,
Network,
Nip02,
Nip04,
Nip05,
Nip28,
Nip57,
Nip65,
Outbox,
PubkeyLoader,
Storage,
User,
])
}

View File

@ -6,7 +6,7 @@ export type Event = NostrToolsEvent & {
export type DisplayEvent = Event & { export type DisplayEvent = Event & {
zaps: Event[] zaps: Event[]
replies: Event[] replies: DisplayEvent[]
reactions: Event[] reactions: Event[]
matchesFilter?: boolean matchesFilter?: boolean
} }
@ -47,6 +47,7 @@ export type RelayInfo = {
contact?: string contact?: string
description?: string description?: string
last_checked?: number last_checked?: number
supported_nips?: number[]
limitation?: { limitation?: {
payment_required?: boolean payment_required?: boolean
auth_required?: boolean auth_required?: boolean
@ -60,17 +61,24 @@ export type Relay = {
info?: RelayInfo info?: RelayInfo
} }
export type RelayPolicyEntry = {
url: string
read: boolean
write: boolean
}
export type RelayPolicy = { export type RelayPolicy = {
pubkey: string pubkey: string
created_at: number created_at: number
updated_at: number updated_at: number
relays: {url: string; read: boolean; write: boolean}[] relays: RelayPolicyEntry[]
} }
export type RelayStat = { export type RelayStat = {
url: string url: string
error?: string error?: string
last_opened?: number last_opened?: number
last_closed?: number
last_activity?: number last_activity?: number
last_publish?: number last_publish?: number
last_sub?: number last_sub?: number
@ -106,6 +114,7 @@ export type Profile = {
export type Channel = { export type Channel = {
id: string id: string
name?: string
pubkey: string pubkey: string
updated_at: number updated_at: number
last_sent?: number last_sent?: number
@ -127,7 +136,8 @@ export type Contact = {
export type Message = { export type Message = {
id: string id: string
channel: string contact?: string
channel?: string
pubkey: string pubkey: string
created_at: number created_at: number
content: string content: string
@ -148,3 +158,20 @@ export type List = {
created_at: number created_at: number
deleted_at?: number deleted_at?: number
} }
export type Env = {
DUFFLEPUD_URL: string
MULTIPLEXTR_URL: string
FORCE_RELAYS: string[]
COUNT_RELAYS: string[]
SEARCH_RELAYS: string[]
DEFAULT_RELAYS: string[]
ENABLE_ZAPS: boolean
}
export type KeyState = {
method: string
pubkey: string
privkey: string | null
bunkerKey: string | null
}

View File

@ -1,12 +1,14 @@
import {all, prop, mergeLeft, identity, sortBy} from "ramda" import {all, prop, mergeLeft, identity, sortBy} from "ramda"
import {ensurePlural, first} from "hurdak" import {ensurePlural, first} from "hurdak"
import {now} from "src/util/misc" import {now} from "src/util/misc"
import type {Filter, Event} from "../types" import type {Filter, Event} from "src/engine/types"
import type {Subscription} from "src/engine/util/Subscription"
import type {Network} from "src/engine/components/Network"
export type CursorOpts = { export type CursorOpts = {
relay: string relay: string
filter: Filter | Filter[] filter: Filter | Filter[]
subscribe: (opts: any) => void Network: Network
onEvent?: (e: Event) => void onEvent?: (e: Event) => void
} }
@ -23,7 +25,7 @@ export class Cursor {
this.loading = false this.loading = false
} }
load(n) { load(n: number) {
const limit = n - this.buffer.length const limit = n - this.buffer.length
// If we're already loading, or we have enough buffered, do nothing // If we're already loading, or we have enough buffered, do nothing
@ -38,11 +40,11 @@ export class Cursor {
let count = 0 let count = 0
return this.opts.subscribe({ return this.opts.Network.subscribe({
timeout: 4000, timeout: 4000,
relays: [relay], relays: [relay],
filter: ensurePlural(filter).map(mergeLeft({until, limit})), filter: ensurePlural(filter).map(mergeLeft({until, limit})),
onEvent: event => { onEvent: (event: Event) => {
this.until = Math.min(until, event.created_at) this.until = Math.min(until, event.created_at)
this.buffer.push(event) this.buffer.push(event)
@ -87,7 +89,7 @@ export class MultiCursor {
this.#cursors = cursors this.#cursors = cursors
} }
load(limit) { load(limit: number) {
return this.#cursors.map(c => c.load(limit)).filter(identity) return this.#cursors.map(c => c.load(limit)).filter(identity)
} }
@ -99,7 +101,7 @@ export class MultiCursor {
return this.#cursors.reduce((n, c) => n + c.buffer.length, 0) return this.#cursors.reduce((n, c) => n + c.buffer.length, 0)
} }
take(n) { take(n: number): [Subscription[], Event[]] {
const events = [] const events = []
while (events.length < n) { while (events.length < n) {

View File

@ -1,6 +1,7 @@
import {matchFilters} from "nostr-tools" import {matchFilters} from "nostr-tools"
import {throttle} from "throttle-debounce" import {throttle} from "throttle-debounce"
import { import {
map,
omit, omit,
pick, pick,
pluck, pluck,
@ -17,14 +18,17 @@ import {
reject, reject,
} from "ramda" } from "ramda"
import {ensurePlural, seconds, sleep, batch, union, chunk, doPipe} from "hurdak" import {ensurePlural, seconds, sleep, batch, union, chunk, doPipe} from "hurdak"
import {now} from "src/util/misc" import {now, pushToKey} from "src/util/misc"
import {findReplyId, Tags, noteKinds} from "src/util/nostr" import {findReplyId, Tags, noteKinds} from "src/util/nostr"
import {collection} from "./store" import {collection} from "./store"
import type {Collection} from "./store" import {Cursor, MultiCursor} from "src/engine/util/Cursor"
import {Cursor, MultiCursor} from "./Cursor" import type {Collection} from "src/engine/util/store"
import type {Event, DisplayEvent, Filter} from "../types" import type {Subscription} from "src/engine/util/Subscription"
import type {Event, DisplayEvent, Filter} from "src/engine/types"
import type {Engine} from "src/engine/Engine"
const fromDisplayEvent = omit(["zaps", "likes", "replies", "matchesFilter"]) const fromDisplayEvent = (e: DisplayEvent): Event =>
omit(["zaps", "likes", "replies", "matchesFilter"], e)
export type FeedOpts = { export type FeedOpts = {
limit?: number limit?: number
@ -33,7 +37,7 @@ export type FeedOpts = {
filter: Filter | Filter[] filter: Filter | Filter[]
onEvent?: (e: Event) => void onEvent?: (e: Event) => void
shouldLoadParents?: boolean shouldLoadParents?: boolean
engine: any engine: Engine
} }
export class Feed { export class Feed {
@ -67,7 +71,7 @@ export class Feed {
// Utils // Utils
addSubs(key, subs) { addSubs(key: string, subs: Array<Subscription>) {
for (const sub of ensurePlural(subs)) { for (const sub of ensurePlural(subs)) {
this.subs[key].push(sub) this.subs[key].push(sub)
@ -77,7 +81,7 @@ export class Feed {
} }
} }
getAllSubs(only = null) { getAllSubs(only: string[] = []) {
return flatten(Object.values(only ? pick(only, this.subs) : this.subs)) return flatten(Object.values(only ? pick(only, this.subs) : this.subs))
} }
@ -85,24 +89,24 @@ export class Feed {
return this.opts.engine.Env.ENABLE_ZAPS ? [1, 7, 9735] : [1, 7] return this.opts.engine.Env.ENABLE_ZAPS ? [1, 7, 9735] : [1, 7]
} }
matchFilters(e) { matchFilters(e: Event) {
return matchFilters(ensurePlural(this.opts.filter), e) return matchFilters(ensurePlural(this.opts.filter), e)
} }
isTextNote(e) { isTextNote(e: Event) {
return noteKinds.includes(e.kind) return noteKinds.includes(e.kind)
} }
isMissingParent = e => { isMissingParent = (e: Event) => {
const parentId = findReplyId(e) const parentId = findReplyId(e)
return parentId && this.matchFilters(e) && !this.context.key(parentId).exists() return parentId && this.matchFilters(e) && !this.context.key(parentId).exists()
} }
preprocessEvents = events => { preprocessEvents = (events: Event[]) => {
const {User} = this.opts.engine const {User} = this.opts.engine.components
events = reject(e => this.seen.has(e.id) || User.isMuted(e), events) events = reject((e: Event) => this.seen.has(e.id) || User.isMuted(e), events)
for (const event of events) { for (const event of events) {
this.seen.add(event.id) this.seen.add(event.id)
@ -111,19 +115,19 @@ export class Feed {
return events return events
} }
mergeHints(groups) { mergeHints(groups: string[][]) {
const {Nip65, User} = this.opts.engine const {Nip65, User} = this.opts.engine.components
return Nip65.mergeHints(User.getSetting("relay_limit"), groups) return Nip65.mergeHints(User.getSetting("relay_limit"), groups)
} }
applyContext(notes, context, substituteParents = false) { applyContext(notes: Event[], context: Event[], substituteParents = false) {
const parentIds = new Set(notes.map(findReplyId).filter(identity)) const parentIds = new Set(notes.map(findReplyId).filter(identity))
const forceShow = union(new Set(pluck("id", notes)), parentIds) const forceShow = union(new Set(pluck("id", notes)), parentIds)
const contextById = {} const contextById = {} as Record<string, Event>
const zapsByParentId = {} const zapsByParentId = {} as Record<string, Event[]>
const reactionsByParentId = {} const reactionsByParentId = {} as Record<string, Event[]>
const repliesByParentId = {} const repliesByParentId = {} as Record<string, Event[]>
for (const event of context.concat(notes)) { for (const event of context.concat(notes)) {
const parentId = findReplyId(event) const parentId = findReplyId(event)
@ -135,28 +139,25 @@ export class Feed {
contextById[event.id] = event contextById[event.id] = event
if (event.kind === 9735) { if (event.kind === 9735) {
zapsByParentId[parentId] = zapsByParentId[parentId] || [] pushToKey(zapsByParentId, parentId, event)
zapsByParentId[parentId].push(event)
} else if (event.kind === 7) { } else if (event.kind === 7) {
reactionsByParentId[parentId] = reactionsByParentId[parentId] || [] pushToKey(reactionsByParentId, parentId, event)
reactionsByParentId[parentId].push(event)
} else { } else {
repliesByParentId[parentId] = repliesByParentId[parentId] || [] pushToKey(repliesByParentId, parentId, event)
repliesByParentId[parentId].push(event)
} }
} }
const annotate = (note: DisplayEvent) => { const annotate = (note: Event): DisplayEvent => {
const {replies = [], reactions = [], zaps = []} = note const {replies = [], reactions = [], zaps = []} = note as DisplayEvent
const combinedZaps = zaps.concat(zapsByParentId[note.id] || []) const combinedZaps = zaps.concat(zapsByParentId[note.id] || [])
const combinedReactions = reactions.concat(reactionsByParentId[note.id] || []) const combinedReactions = reactions.concat(reactionsByParentId[note.id] || [])
const combinedReplies = replies.concat(repliesByParentId[note.id] || []) const combinedReplies = replies.concat(map(annotate, repliesByParentId[note.id] || []))
return { return {
...note, ...note,
zaps: uniqBy(prop("id"), combinedZaps), zaps: uniqBy(prop("id"), combinedZaps),
reactions: uniqBy(prop("id"), combinedReactions), reactions: uniqBy(prop("id"), combinedReactions),
replies: sortBy(e => -e.created_at, uniqBy(prop("id"), combinedReplies.map(annotate))), replies: sortBy((e: Event) => -e.created_at, uniqBy(prop("id"), combinedReplies)),
matchesFilter: forceShow.has(note.id) || this.matchFilters(note), matchesFilter: forceShow.has(note.id) || this.matchFilters(note),
} }
} }
@ -180,17 +181,17 @@ export class Feed {
// Context loaders // Context loaders
loadPubkeys = events => { loadPubkeys = (events: Event[]) => {
this.opts.engine.PubkeyLoader.load( this.opts.engine.components.PubkeyLoader.load(
events.filter(this.isTextNote).flatMap(e => Tags.from(e).pubkeys().concat(e.pubkey)) events.filter(this.isTextNote).flatMap((e: Event) => Tags.from(e).pubkeys().concat(e.pubkey))
) )
} }
loadParents = events => { loadParents = (events: Event[]) => {
const {Network, Nip65} = this.opts.engine const {Network, Nip65} = this.opts.engine.components
const parentsInfo = events const parentsInfo = events
.map(e => ({id: findReplyId(e), hints: Nip65.getParentHints(10, e)})) .map((e: Event) => ({id: findReplyId(e), hints: Nip65.getParentHints(10, e)}))
.filter(({id}) => id && !this.seen.has(id)) .filter(({id}: any) => id && !this.seen.has(id))
if (parentsInfo.length > 0) { if (parentsInfo.length > 0) {
this.addSubs("context", [ this.addSubs("context", [
@ -198,14 +199,14 @@ export class Feed {
timeout: 3000, timeout: 3000,
filter: {ids: pluck("id", parentsInfo)}, filter: {ids: pluck("id", parentsInfo)},
relays: this.mergeHints(pluck("hints", parentsInfo)), relays: this.mergeHints(pluck("hints", parentsInfo)),
onEvent: batch(100, context => this.addContext(context, {depth: 2})), onEvent: batch(100, (context: Event[]) => this.addContext(context, {depth: 2})),
}), }),
]) ])
} }
} }
loadContext = batch(300, eventGroups => { loadContext = batch(300, (eventGroups: any) => {
const {Network, Nip65} = this.opts.engine const {Network, Nip65} = this.opts.engine.components
const groupsByDepth = groupBy(prop("depth"), eventGroups) const groupsByDepth = groupBy(prop("depth"), eventGroups)
for (const [depthStr, groups] of Object.entries(groupsByDepth)) { for (const [depthStr, groups] of Object.entries(groupsByDepth)) {
@ -215,21 +216,22 @@ export class Feed {
continue continue
} }
const events = flatten(pluck("events", groups)).filter(this.isTextNote) const events = flatten(pluck("events", groups as any[])).filter(this.isTextNote) as Event[]
for (const c of chunk(256, events)) { for (const c of chunk(256, events)) {
Network.subscribe({ Network.subscribe({
timeout: 3000, timeout: 3000,
relays: this.mergeHints(c.map(e => Nip65.getReplyHints(10, e))), relays: this.mergeHints(c.map(e => Nip65.getReplyHints(10, e))),
filter: {kinds: this.getReplyKinds(), "#e": pluck("id", c)}, filter: {kinds: this.getReplyKinds(), "#e": pluck("id", c as Event[])},
onEvent: batch(100, context => this.addContext(context, {depth: depth - 1})),
onEvent: batch(100, (context: Event[]) => this.addContext(context, {depth: depth - 1})),
}) })
} }
} }
}) })
listenForContext = throttle(5000, () => { listenForContext = throttle(5000, () => {
const {Network, Nip65} = this.opts.engine const {Network, Nip65} = this.opts.engine.components
if (this.stopped) { if (this.stopped) {
return return
@ -239,7 +241,7 @@ export class Feed {
const contextByParentId = groupBy(findReplyId, this.context.get()) const contextByParentId = groupBy(findReplyId, this.context.get())
const findNotes = events => const findNotes = (events: Event[]): Event[] =>
events events
.filter(this.isTextNote) .filter(this.isTextNote)
.flatMap(e => findNotes(contextByParentId[e.id] || []).concat(e)) .flatMap(e => findNotes(contextByParentId[e.id] || []).concat(e))
@ -249,7 +251,7 @@ export class Feed {
Network.subscribe({ Network.subscribe({
relays: this.mergeHints(c.map(e => Nip65.getReplyHints(10, e))), relays: this.mergeHints(c.map(e => Nip65.getReplyHints(10, e))),
filter: {kinds: this.getReplyKinds(), "#e": pluck("id", c), since: now()}, filter: {kinds: this.getReplyKinds(), "#e": pluck("id", c), since: now()},
onEvent: batch(100, context => this.addContext(context, {depth: 2})), onEvent: batch(100, (context: Event[]) => this.addContext(context, {depth: 2})),
}), }),
]) ])
} }
@ -257,7 +259,7 @@ export class Feed {
// Adders // Adders
addContext = (newEvents, {shouldLoadParents = false, depth = 0}) => { addContext = (newEvents: Event[], {shouldLoadParents = false, depth = 0}) => {
const events = this.preprocessEvents(newEvents) const events = this.preprocessEvents(newEvents)
if (this.opts.onEvent) { if (this.opts.onEvent) {
@ -288,12 +290,12 @@ export class Feed {
const {relays, filter, engine, depth} = this.opts const {relays, filter, engine, depth} = this.opts
// No point in subscribing if we have an end date // No point in subscribing if we have an end date
if (!any(prop("until"), ensurePlural(filter))) { if (!any(prop("until"), ensurePlural(filter) as any[])) {
this.addSubs("main", [ this.addSubs("main", [
engine.Network.subscribe({ engine.components.Network.subscribe({
relays, relays,
filter: ensurePlural(filter).map(assoc("since", since)), filter: ensurePlural(filter).map(assoc("since", since)),
onEvent: batch(1000, context => onEvent: batch(1000, (context: Event[]) =>
this.addContext(context, {shouldLoadParents: true, depth}) this.addContext(context, {shouldLoadParents: true, depth})
), ),
}), }),
@ -306,8 +308,8 @@ export class Feed {
new Cursor({ new Cursor({
relay, relay,
filter, filter,
subscribe: engine.Network.subscribe, Network: engine.components.Network,
onEvent: batch(100, context => onEvent: batch(100, (context: Event[]) =>
this.addContext(context, {shouldLoadParents: true, depth}) this.addContext(context, {shouldLoadParents: true, depth})
), ),
}) })
@ -325,24 +327,22 @@ export class Feed {
} }
} }
hydrate(feed) { hydrate(feed: DisplayEvent[]) {
const {depth} = this.opts const {depth} = this.opts
const notes = [] const notes: DisplayEvent[] = []
const context = [] const context: Event[] = []
const addContext = ({zaps, replies, reactions, ...note}) => { const addContext = (note: DisplayEvent) => {
context.push(fromDisplayEvent(note)) context.push(fromDisplayEvent(note))
zaps.map(zap => context.push(zap)) note.zaps.forEach(zap => context.push(zap))
reactions.map(reaction => context.push(reaction)) note.reactions.forEach(reaction => context.push(reaction))
note.replies.forEach(reply => addContext(reply))
replies.map(addContext)
} }
feed.forEach(note => { feed.forEach(note => {
addContext(note) addContext(note)
notes.push(note)
notes.push(fromDisplayEvent(note))
}) })
this.feed.set(notes) this.feed.set(notes)
@ -401,7 +401,7 @@ export class Feed {
} }
} }
deferReactions = notes => { deferReactions = (notes: Event[]) => {
const [defer, ok] = partition(e => !this.isTextNote(e) && this.isMissingParent(e), notes) const [defer, ok] = partition(e => !this.isTextNote(e) && this.isMissingParent(e), notes)
setTimeout(() => { setTimeout(() => {
@ -415,7 +415,7 @@ export class Feed {
return ok return ok
} }
deferOrphans = notes => { deferOrphans = (notes: Event[]) => {
// If something has a parent id but we haven't found the parent yet, skip it until we have it. // If something has a parent id but we haven't found the parent yet, skip it until we have it.
const [defer, ok] = partition(e => this.isTextNote(e) && this.isMissingParent(e), notes) const [defer, ok] = partition(e => this.isTextNote(e) && this.isMissingParent(e), notes)
@ -424,7 +424,7 @@ export class Feed {
return ok return ok
} }
deferAncient = notes => { deferAncient = (notes: Event[]) => {
// Sometimes relays send very old data very quickly. Pop these off the queue and re-add // Sometimes relays send very old data very quickly. Pop these off the queue and re-add
// them after we have more timely data. They still might be relevant, but order will still // them after we have more timely data. They still might be relevant, but order will still
// be maintained since everything before the cutoff will be deferred the same way. // be maintained since everything before the cutoff will be deferred the same way.
@ -436,7 +436,7 @@ export class Feed {
return ok return ok
} }
addToFeed(notes) { addToFeed(notes: Event[]) {
const context = this.context.get() const context = this.context.get()
const applied = this.applyContext(notes, context, true) const applied = this.applyContext(notes, context, true)
const sorted = sortBy(e => -e.created_at, applied) const sorted = sortBy(e => -e.created_at, applied)

View File

@ -2,15 +2,8 @@ import EventEmitter from "events"
import {defer} from "hurdak" import {defer} from "hurdak"
export class Subscription extends EventEmitter { export class Subscription extends EventEmitter {
closed: boolean closed = false
complete: ReturnType<typeof defer> complete = defer()
constructor() {
super()
this.closed = false
this.complete = defer()
}
close = () => { close = () => {
if (!this.closed) { if (!this.closed) {

View File

@ -1,7 +1,7 @@
export class Worker<T> { export class Worker<T> {
buffer: T[] buffer: T[]
handlers: Array<(x: T) => void> handlers: Array<(x: T) => void>
timeout: NodeJS.Timeout timeout: NodeJS.Timeout | undefined
constructor() { constructor() {
this.buffer = [] this.buffer = []
@ -26,12 +26,12 @@ export class Worker<T> {
} }
} }
push = message => { push = (message: T) => {
this.buffer.push(message) this.buffer.push(message)
this.#enqueueWork() this.#enqueueWork()
} }
listen = handler => { listen = (handler: (x: T) => void) => {
this.handlers.push(handler) this.handlers.push(handler)
} }
} }

View File

@ -8,7 +8,7 @@ type R = Record<string, any>
type M<T> = Map<string, T> type M<T> = Map<string, T>
export interface Readable<T> { export interface Readable<T> {
get: () => T | undefined get: () => T
subscribe: (f: Subscriber) => () => void subscribe: (f: Subscriber) => () => void
derived: <U>(f: (v: T) => U) => Readable<U> derived: <U>(f: (v: T) => U) => Readable<U>
} }
@ -123,7 +123,7 @@ export class Key<T extends R> implements Readable<T> {
this.store = base.derived<T>(m => m.get(key) as T) this.store = base.derived<T>(m => m.get(key) as T)
} }
get = () => this.base.get().get(this.key) get = () => this.base.get().get(this.key) as T
subscribe = (f: Subscriber) => this.store.subscribe(f) subscribe = (f: Subscriber) => this.store.subscribe(f)
@ -151,7 +151,7 @@ export class Key<T extends R> implements Readable<T> {
set = (v: T) => this.update(() => v) set = (v: T) => this.update(() => v)
merge = (d: T) => this.update(v => ({...v, ...d})) merge = (d: Partial<T>) => this.update(v => ({...v, ...d}))
remove = () => remove = () =>
this.base.update(m => { this.base.update(m => {
@ -199,6 +199,7 @@ export const writable = <T>(v: T) => new Writable(v)
export const derived = <T>(stores: Derivable, getValue: (values: any) => T) => export const derived = <T>(stores: Derivable, getValue: (values: any) => T) =>
new Derived(stores, getValue) as Readable<T> new Derived(stores, getValue) as Readable<T>
export const key = <T extends R>(base: Writable<M<T>>, pk: string, key: string) => new Key<T>(base, pk, key) export const key = <T extends R>(base: Writable<M<T>>, pk: string, key: string) =>
new Key<T>(base, pk, key)
export const collection = <T extends R>(pk: string) => new Collection<T>(pk) export const collection = <T extends R>(pk: string) => new Collection<T>(pk)

View File

@ -1,7 +1,7 @@
<script lang="ts"> <script lang="ts">
import cx from "classnames" import cx from "classnames"
export let src export let src: string
export let size = 4 export let size = 4
</script> </script>

View File

@ -53,7 +53,7 @@ export const modal = {
getCurrent() { getCurrent() {
return last(get(modal.stack)) return last(get(modal.stack))
}, },
sync($stack, opts = {}) { sync($stack: any[], opts = {}) {
const hash = $stack.length > 0 ? `#m=${$stack.length}` : "" const hash = $stack.length > 0 ? `#m=${$stack.length}` : ""
if (hash !== window.location.hash) { if (hash !== window.location.hash) {
@ -62,16 +62,16 @@ export const modal = {
return $stack return $stack
}, },
remove(id) { remove(id: string) {
modal.stack.update($stack => modal.sync(reject(whereEq({id}), $stack))) modal.stack.update($stack => modal.sync(reject(whereEq({id}), $stack)))
}, },
push(data) { push(data: {type: string, [k: string]: any}) {
modal.stack.update($stack => modal.sync($stack.concat(data))) modal.stack.update($stack => modal.sync($stack.concat(data)))
}, },
pop() { pop() {
modal.stack.update($stack => modal.sync($stack.slice(0, -1))) modal.stack.update($stack => modal.sync($stack.slice(0, -1)))
}, },
replace(data) { replace(data: {type: string, [k: string]: any}) {
modal.stack.update($stack => $stack.slice(0, -1).concat(data)) modal.stack.update($stack => $stack.slice(0, -1).concat(data))
}, },
clear() { clear() {
@ -84,7 +84,7 @@ export const modal = {
}, },
} }
location.subscribe($location => { location.subscribe(($location: any) => {
const match = $location.hash.match(/\bm=(\d+)/) const match = $location.hash.match(/\bm=(\d+)/)
const i = match ? parseInt(match[1]) : 0 const i = match ? parseInt(match[1]) : 0
@ -93,12 +93,12 @@ location.subscribe($location => {
// Themes // Themes
const THEME = fromPairs(import.meta.env.VITE_THEME.split(",").map(x => x.split(":"))) const THEME = fromPairs(import.meta.env.VITE_THEME.split(",").map((x: string) => x.split(":"))) as Record<string, string>
const prefersDark = window.matchMedia("(prefers-color-scheme: dark)").matches const prefersDark = window.matchMedia("(prefers-color-scheme: dark)").matches
export const theme = synced("ui/theme", prefersDark ? "dark" : "light") export const theme = synced("ui/theme", prefersDark ? "dark" : "light")
export const getThemeColors = $theme => { export const getThemeColors = ($theme: string) => {
for (const x of range(1, 10)) { for (const x of range(1, 10)) {
const lum = $theme === "dark" ? (5 - x) * 25 : (x - 5) * 25 const lum = $theme === "dark" ? (5 - x) * 25 : (x - 5) * 25
@ -108,9 +108,9 @@ export const getThemeColors = $theme => {
return THEME return THEME
} }
export const getThemeColor = ($theme, k) => prop(k, getThemeColors($theme)) export const getThemeColor = ($theme: string, k: string) => prop(k, getThemeColors($theme))
export const getThemeVariables = $theme => export const getThemeVariables = ($theme: string) =>
Object.entries(getThemeColors($theme)) Object.entries(getThemeColors($theme))
.map(([k, v]) => `--${k}: ${v};`) .map(([k, v]) => `--${k}: ${v};`)
.join("\n") .join("\n")

3
src/types.d.ts vendored
View File

@ -1 +1,2 @@
declare module 'fuse.js/dist/fuse.min.js' declare module "fuse.js/dist/fuse.min.js"
declare module "paravel"

View File

@ -2,10 +2,10 @@ const DIVISORS = {
m: BigInt(1e3), m: BigInt(1e3),
u: BigInt(1e6), u: BigInt(1e6),
n: BigInt(1e9), n: BigInt(1e9),
p: BigInt(1e12) p: BigInt(1e12),
} }
const MAX_MILLISATS = BigInt('2100000000000000000') const MAX_MILLISATS = BigInt("2100000000000000000")
const MILLISATS_PER_BTC = BigInt(1e11) const MILLISATS_PER_BTC = BigInt(1e11)
@ -15,25 +15,24 @@ function hrpToMillisat(hrpString: string) {
divisor = hrpString.slice(-1) divisor = hrpString.slice(-1)
value = hrpString.slice(0, -1) value = hrpString.slice(0, -1)
} else if (hrpString.slice(-1).match(/^[^munp0-9]$/)) { } else if (hrpString.slice(-1).match(/^[^munp0-9]$/)) {
throw new Error('Not a valid multiplier for the amount') throw new Error("Not a valid multiplier for the amount")
} else { } else {
value = hrpString value = hrpString
} }
if (!value.match(/^\d+$/)) if (!value.match(/^\d+$/)) throw new Error("Not a valid human readable amount")
throw new Error('Not a valid human readable amount')
const valueBN = BigInt(value) const valueBN = BigInt(value)
const millisatoshisBN = divisor const millisatoshisBN = divisor
? (valueBN * MILLISATS_PER_BTC) / DIVISORS[divisor] ? (valueBN * MILLISATS_PER_BTC) / (DIVISORS as any)[divisor]
: valueBN * MILLISATS_PER_BTC : valueBN * MILLISATS_PER_BTC
if ( if (
(divisor === 'p' && !(valueBN % BigInt(10) === BigInt(0))) || (divisor === "p" && !(valueBN % BigInt(10) === BigInt(0))) ||
millisatoshisBN > MAX_MILLISATS millisatoshisBN > MAX_MILLISATS
) { ) {
throw new Error('Amount is outside of valid range') throw new Error("Amount is outside of valid range")
} }
return millisatoshisBN return millisatoshisBN

View File

@ -125,17 +125,17 @@ export const stringToHue = (value: string) => {
return hash % 360 return hash % 360
} }
export const hsl = (hue: string, {saturation = 100, lightness = 50, opacity = 1} = {}) => export const hsl = (hue: number, {saturation = 100, lightness = 50, opacity = 1} = {}) =>
`hsl(${hue}, ${saturation}%, ${lightness}%, ${opacity})` `hsl(${hue}, ${saturation}%, ${lightness}%, ${opacity})`
export const tryJson = (f: <T>() => T) => export const tryJson = <T>(f: () => T) =>
tryFunc(f, (e: Error) => { tryFunc(f, (e: Error) => {
if (!e.toString().includes("JSON")) { if (!e.toString().includes("JSON")) {
warn(e) warn(e)
} }
}) })
export const tryFetch = (f: <T>() => T) => export const tryFetch = <T>(f: () => T) =>
tryFunc(f, (e: Error) => { tryFunc(f, (e: Error) => {
if (!e.toString().includes("fetch")) { if (!e.toString().includes("fetch")) {
warn(e) warn(e)
@ -219,7 +219,7 @@ export const webSocketURLToPlainOrBase64 = (url: string): string => {
return url return url
} }
export const pushToKey = (xs: any[], k: number, v: any) => { export const pushToKey = <T>(m: Record<string, T[]>, k: string, v: T) => {
xs[k] = xs[k] || [] m[k] = m[k] || []
xs[k].push(v) m[k].push(v)
} }

View File

@ -77,7 +77,7 @@ export class Tags {
any(f: (t: any) => boolean) { any(f: (t: any) => boolean) {
return this.filter(f).exists() return this.filter(f).exists()
} }
type(type: string) { type(type: string | string[]) {
const types = ensurePlural(type) const types = ensurePlural(type)
return new Tags(this.tags.filter(t => types.includes(t[0]))) return new Tags(this.tags.filter(t => types.includes(t[0])))

View File

@ -2,6 +2,7 @@ import {last, pluck, identity} from "ramda"
import {nip19} from "nostr-tools" import {nip19} from "nostr-tools"
import {first, switcherFn} from "hurdak" import {first, switcherFn} from "hurdak"
import {fromNostrURI} from "src/util/nostr" import {fromNostrURI} from "src/util/nostr"
import type {Event} from "src/engine/types"
export const NEWLINE = "newline" export const NEWLINE = "newline"
export const ELLIPSIS = "ellipsis" export const ELLIPSIS = "ellipsis"
@ -15,11 +16,11 @@ export const NOSTR_NPUB = "nostr:npub"
export const NOSTR_NPROFILE = "nostr:nprofile" export const NOSTR_NPROFILE = "nostr:nprofile"
export const NOSTR_NADDR = "nostr:naddr" export const NOSTR_NADDR = "nostr:naddr"
export const urlIsMedia = url => export const urlIsMedia = (url: string) =>
!url.match(/\.(apk|docx|xlsx|csv|dmg)/) && last(url.split("://")).includes("/") !url.match(/\.(apk|docx|xlsx|csv|dmg)/) && last(url.split("://"))?.includes("/")
export const parseContent = ({content, tags = []}) => { export const parseContent = ({content, tags = []}: {content: string; tags: string[][]}) => {
const result = [] const result: any[] = []
let text = content.trim() let text = content.trim()
let buffer = "" let buffer = ""
@ -42,7 +43,7 @@ export const parseContent = ({content, tags = []}) => {
const [tag, value, url] = tags[i] const [tag, value, url] = tags[i]
const relays = [url].filter(identity) const relays = [url].filter(identity)
let type, data, entity let type, data: any, entity
if (tag === "p") { if (tag === "p") {
type = "nprofile" type = "nprofile"
data = {pubkey: value, relays} data = {pubkey: value, relays}
@ -162,13 +163,22 @@ export const parseContent = ({content, tags = []}) => {
return result return result
} }
export const truncateContent = (content, {showEntire, maxLength, showMedia = false}) => { type TruncateContentOpts = {
showEntire: boolean
maxLength: number
showMedia: boolean
}
export const truncateContent = (
content: any[],
{showEntire, maxLength, showMedia = false}: TruncateContentOpts
) => {
if (showEntire) { if (showEntire) {
return content return content
} }
let length = 0 let length = 0
const result = [] const result: any[] = []
const truncateAt = maxLength * 0.6 const truncateAt = maxLength * 0.6
const mediaLength = maxLength / 3 const mediaLength = maxLength / 3
const entityLength = 30 const entityLength = 30
@ -199,7 +209,7 @@ export const truncateContent = (content, {showEntire, maxLength, showMedia = fal
return result return result
} }
export const getLinks = parts => export const getLinks = (parts: any[]) =>
pluck( pluck(
"value", "value",
parts.filter(x => x.type === LINK && x.isMedia) parts.filter(x => x.type === LINK && x.isMedia)

View File

@ -6,6 +6,8 @@
"baseUrl": ".", "baseUrl": ".",
"paths": { "paths": {
"src/*": ["src/*"] "src/*": ["src/*"]
} },
"strictPropertyInitialization": false,
"strictNullChecks": false
} }
} }