Continue to refine cursor

This commit is contained in:
Jonathan Staab 2023-03-08 09:55:56 -06:00
parent da100bdf96
commit ddcf5622b8
6 changed files with 96 additions and 68 deletions

View File

@ -2,6 +2,7 @@
- [ ] Collapse relaycard and relaycardsimple?
- [ ] Create my own version of nostr.how and extension explanation
- [ ] Make new notes thing fixed position
- [ ] Review sampleRelays, seems like we shouldn't be shuffling
- [ ] Go over onboarding process, suggest some good relays for newcomers

View File

@ -37,7 +37,7 @@ const listen = ({relays, filter, onChunk = null, shouldProcess = true, delay = 5
})
}
const load = ({relays, filter, onChunk = null, shouldProcess = true, timeout = 10_000}) => {
const load = ({relays, filter, onChunk = null, shouldProcess = true, timeout = 5000}) => {
return new Promise(resolve => {
const now = Date.now()
const done = new Set()
@ -83,7 +83,7 @@ const load = ({relays, filter, onChunk = null, shouldProcess = true, timeout = 1
const subPromise = pool.subscribe({
relays,
filter,
onEvent: batch(300, chunk => {
onEvent: batch(500, chunk => {
if (shouldProcess) {
sync.processEvents(chunk)
}

View File

@ -4,7 +4,7 @@ import {warn} from 'src/util/logger'
import {filter, pipe, pick, groupBy, objOf, map, assoc, sortBy, uniqBy, prop} from 'ramda'
import {first, createMap} from 'hurdak/lib/hurdak'
import {Tags, isRelay, findReplyId} from 'src/util/nostr'
import {shuffle} from 'src/util/misc'
import {shuffle, fetchJson} from 'src/util/misc'
import database from 'src/agent/database'
import pool from 'src/agent/pool'
import user from 'src/agent/user'
@ -40,7 +40,8 @@ export const initializeRelayList = async () => {
// Load relays from nostr.watch via dufflepud
try {
const url = import.meta.env.VITE_DUFFLEPUD_URL + '/relay'
const relays = prop('relays', await fetch(url).then(r => r.json())).filter(isRelay)
const json = await fetchJson(url)
const relays = json.relays.filter(isRelay)
await database.relays.bulkPatch(createMap('url', map(objOf('url'), relays)))
} catch (e) {

View File

@ -2,7 +2,7 @@ import {uniq, pick, identity, isEmpty} from 'ramda'
import {nip05} from 'nostr-tools'
import {noop, createMap, ensurePlural, chunk, switcherFn} from 'hurdak/lib/hurdak'
import {log} from 'src/util/logger'
import {lnurlEncode, lnurlDecode, tryFetch, now, sleep, tryJson, timedelta, shuffle, hash} from 'src/util/misc'
import {lnurlEncode, tryFunc, lnurlDecode, tryFetch, now, sleep, tryJson, timedelta, shuffle, hash} from 'src/util/misc'
import {Tags, roomAttrs, personKinds, isRelay, isShareableRelay, normalizeRelayUrl} from 'src/util/nostr'
import database from 'src/agent/database'
@ -313,7 +313,7 @@ const verifyZapper = async (pubkey, address) => {
// Try to parse it as a lud06 LNURL or as a lud16 address
if (address.toLowerCase().startsWith('lnurl1')) {
url = lnurlDecode(address)
url = tryFunc(() => lnurlDecode(address))
} else if (address.includes('@')) {
const [name, domain] = address.split('@')

View File

@ -1,6 +1,6 @@
import {bech32, utf8} from '@scure/base'
import {debounce, throttle} from 'throttle-debounce'
import {aperture, path as getPath, allPass, pipe, isNil, complement, equals, is, pluck, sum, identity, sortBy} from "ramda"
import {gt, aperture, path as getPath, allPass, pipe, isNil, complement, equals, is, pluck, sum, identity, sortBy} from "ramda"
import Fuse from "fuse.js/dist/fuse.min.js"
import {writable} from 'svelte/store'
import {isObject, round} from 'hurdak/lib/hurdak'
@ -149,32 +149,50 @@ export const getLastSync = (k, fallback = 0) => {
export class Cursor {
until: number
limit: number
constructor(limit = 10) {
count: number
constructor(limit = 20) {
this.until = now()
this.limit = limit
this.count = 0
}
getFilter() {
return {
// Add a buffer so we can avoid blowing past the most relevant time interval
// (just now) until after a few paginations.
until: this.until + timedelta(3, 'hours'),
// since: this.until - timedelta(8, 'hours'),
until: this.until,
limit: this.limit,
}
}
// Remove events that are significantly older than the average
prune(events) {
const maxDiff = avg(events.map(e => this.until - e.created_at)) * 4
return events.filter(e => this.until - e.created_at < maxDiff)
}
// Calculate a reasonable amount to move our window to avoid fetching too much of the
// same stuff we already got without missing certain time periods due to a mismatch
// in event density between various relays
update(events) {
// update takes all events in a feed and figures out the best place to set `until`
// in order to find older events without re-fetching events that we've already seen.
// There are various edge cases:
// - When we have zero events, there's nothing we can do, presumably we have everything.
// - Sometimes relays send us extremely old events. Use median to avoid too-large gaps
if (events.length > this.limit) {
if (events.length > 2) {
// Keep track of how many requests we've made
this.count += 1
// Find the average gap between events to figure out how regularly people post to this
// feed. Multiply it by the number of events we have but scale down to avoid
// blowing past big gaps due to misbehaving relays skewing the results. Trim off
// outliers and scale based on results/requests to help with that
const timestamps = sortBy(identity, pluck('created_at', events))
const gaps = aperture(2, timestamps).map(([a, b]) => b - a)
const gap = quantile(gaps, 0.2)
const high = quantile(gaps, 0.5)
const gap = avg(gaps.filter(gt(high)))
// If we're just warming up, scale the window down even further to avoid
// blowing past the most relevant time period
const scale = (
Math.min(1, Math.log10(events.length))
* Math.min(1, Math.log10(this.count + 1))
)
// Only paginate part of the way so we can avoid missing stuff
this.until -= Math.round(gap * events.length * 0.5)
this.until -= Math.round(gap * scale * this.limit)
}
}
}
@ -263,13 +281,13 @@ export const stringToColor = (value, {saturation = 100, lightness = 50, opacity
return `hsl(${(hash % 360)}, ${saturation}%, ${lightness}%, ${opacity})`;
}
export const tryFunc = (f, ignore) => {
export const tryFunc = (f, ignore = null) => {
try {
const r = f()
if (is(Promise, r)) {
return r.catch(e => {
if (!e.toString().includes(ignore)) {
if (!ignore || !e.toString().includes(ignore)) {
warn(e)
}
})
@ -277,7 +295,7 @@ export const tryFunc = (f, ignore) => {
return r
}
} catch (e) {
if (!e.toString().includes(ignore)) {
if (!ignore || !e.toString().includes(ignore)) {
warn(e)
}
}

View File

@ -1,6 +1,6 @@
<script lang="ts">
import {onMount} from "svelte"
import {partition, propEq, uniqBy, sortBy, prop} from "ramda"
import {partition, always, propEq, uniqBy, sortBy, prop} from "ramda"
import {slide} from "svelte/transition"
import {quantify} from "hurdak/lib/hurdak"
import {createScroller, now, Cursor} from "src/util/misc"
@ -15,49 +15,17 @@
export let filter
export let relays = []
export let shouldDisplay = null
export let shouldDisplay = always(true)
export let parentsTimeout = 500
let notes = []
let notesBuffer = []
const seen = new Set()
// Add a short buffer so we can get the most possible results for recent notes
const since = now()
const maxNotes = 100
const cursor = new Cursor()
const processNewNotes = async newNotes => {
newNotes = user.muffle(newNotes).filter(n => !seen.has(n.id))
if (shouldDisplay) {
newNotes = newNotes.filter(shouldDisplay)
}
// Load parents before showing the notes so we have hierarchy. Give it a short
// timeout, since this is really just a nice-to-have
const combined = uniqBy(
prop("id"),
newNotes
.filter(propEq("kind", 1))
.concat(await network.loadParents(newNotes, {timeout: parentsTimeout}))
.map(asDisplayEvent)
)
// Stream in additional data
network.streamContext({
depth: 2,
notes: combined,
onChunk: context => {
notes = network.applyContext(notes, user.muffle(context))
},
})
// Show replies grouped by parent whenever possible
const merged = mergeParents(combined)
// Drop the oldest 20% of notes since we often get pretty old stuff
return merged.slice(0, Math.ceil(merged.length * 0.8))
}
const seen = new Set()
const loadBufferedNotes = () => {
// Drop notes at the end if there are a lot
@ -67,18 +35,54 @@
}
const onChunk = async newNotes => {
const chunk = sortBy(e => -e.created_at, await processNewNotes(newNotes))
const [bottom, top] = partition(e => e.created_at < since, chunk)
// Deduplicate and filter out stuff we don't want, apply user preferences
const filtered = user.muffle(newNotes.filter(n => !seen.has(n.id) && shouldDisplay(n)))
for (const note of chunk) {
// Drop the oldest 20% of notes. We sometimes get pretty old stuff since we don't
// use a since on our filter
const pruned = cursor.prune(filtered)
// Keep track of what we've seen
for (const note of pruned) {
seen.add(note.id)
}
// Slice new notes in case someone leaves the tab open for a long time
notes = uniqBy(prop("id"), notes.concat(bottom))
notesBuffer = top.concat(notesBuffer).slice(0, maxNotes)
// Load parents before showing the notes so we have hierarchy. Give it a short
// timeout, since this is really just a nice-to-have
const parents = await network.loadParents(filtered, {timeout: parentsTimeout})
cursor.update(notes)
// Keep track of parents too
for (const note of parents) {
seen.add(note.id)
}
// Combine notes and parents into a single collection
const combined = uniqBy(
prop("id"),
filtered.filter(propEq("kind", 1)).concat(parents).map(asDisplayEvent)
)
// Stream in additional data and merge it in
network.streamContext({
depth: 2,
notes: combined,
onChunk: context => {
context = user.muffle(context)
notesBuffer = network.applyContext(notesBuffer, context)
notes = network.applyContext(notes, context)
},
})
// Show replies grouped by parent whenever possible
const merged = sortBy(e => -e.created_at, mergeParents(combined))
// Split into notes before and after we started loading
const [bottom, top] = partition(e => e.created_at < since, merged)
// Slice new notes in case someone leaves the tab open for a long time
notesBuffer = top.concat(notesBuffer).slice(0, maxNotes)
notes = uniqBy(prop("id"), notes.concat(bottom))
}
onMount(() => {
@ -88,16 +92,20 @@
onChunk,
})
const scroller = createScroller(() => {
const scroller = createScroller(async () => {
if ($modal) {
return
}
return network.load({
// Wait for this page to load before trying again
await network.load({
relays,
filter: mergeFilter(filter, cursor.getFilter()),
onChunk,
})
// Update our cursor
cursor.update(notes)
})
return () => {