Use bigger relay samples

This commit is contained in:
Jonathan Staab 2023-02-18 07:31:07 -06:00
parent 9dc5c01241
commit 7c38ccab67
11 changed files with 41 additions and 29 deletions

View File

@ -6,6 +6,7 @@
- [ ] Test publishing events with zero relays
- [ ] Try lumping tables into a single key each to reduce load/save contention and time
- [ ] Fix turning off link previews, or make sure it applies to images/videos too
- [ ] Allow user to set sample size to manage bandwidth/performance tradeoff
# Snacks

View File

@ -4,7 +4,7 @@ import {chunk} from 'hurdak/lib/hurdak'
import {batch, timedelta, now} from 'src/util/misc'
import {
getRelaysForEventParent, getAllPubkeyWriteRelays, aggregateScores,
getUserReadRelays, getRelaysForEventChildren,
getUserReadRelays, getRelaysForEventChildren, getUserRelays
} from 'src/agent/relays'
import database from 'src/agent/database'
import pool from 'src/agent/pool'
@ -106,7 +106,9 @@ const loadPeople = async (pubkeys, {relays = null, kinds = personKinds, force =
const loadParents = notes => {
const notesWithParent = notes.filter(findReplyId)
const relays = aggregateScores(notesWithParent.map(getRelaysForEventParent)).slice(0, 3)
const relays = aggregateScores(notesWithParent.map(getRelaysForEventParent))
.concat(getUserRelays())
.slice(0, 10)
return load(relays, {kinds: [1], ids: notesWithParent.map(findReplyId)})
}
@ -114,9 +116,11 @@ const loadParents = notes => {
const streamContext = ({notes, updateNotes, depth = 0}) => {
// Some relays reject very large filters, send multiple subscriptions
chunk(256, notes).forEach(chunk => {
const relays = aggregateScores(chunk.map(getRelaysForEventChildren)).slice(0, 3)
const authors = getStalePubkeys(pluck('pubkey', chunk))
const filter = [{kinds: [1, 7], '#e': pluck('id', chunk)}] as Array<object>
const relays = aggregateScores(chunk.map(getRelaysForEventChildren))
.concat(getUserRelays())
.slice(0, 10)
if (authors.length > 0) {
filter.push({kinds: personKinds, authors})

View File

@ -120,8 +120,9 @@ export const getEventPublishRelays = event => {
// Utils
const uniqByUrl = uniqBy(prop('url'))
const sortByScore = sortBy(r => -r.score)
export const uniqByUrl = uniqBy(prop('url'))
export const sortByScore = sortBy(r => -r.score)
export const aggregateScores = relayGroups => {
const scores = {} as Record<string, {

View File

@ -89,7 +89,8 @@ const processProfileEvents = async events => {
if (e.created_at > (person.relays_updated_at || 0)) {
return {
relays_updated_at: e.created_at,
relays: e.tags.map(([url, read, write]) => ({url, read, write})),
relays: e.tags.map(([url, read, write]) =>
({url, read: read !== '!', write: write !== '!'})),
}
}
},

View File

@ -10,7 +10,7 @@
import Note from "src/partials/Note.svelte"
import user from 'src/agent/user'
import network from 'src/agent/network'
import {getUserReadRelays} from 'src/agent/relays'
import {getUserReadRelays, uniqByUrl} from 'src/agent/relays'
import {modal} from "src/app/ui"
import {mergeParents} from "src/app"
@ -29,6 +29,11 @@
.wrap(($profile?.muffle || []).filter(t => Math.random() > parseFloat(last(t))))
.values().all()
// Sample relays in case we have a whole ton of them. Add in user relays in
// case we don't have any
const sampleRelays = () =>
uniqByUrl(relays.concat(getUserReadRelays())).slice(0, 30)
const processNewNotes = async newNotes => {
// Remove people we're not interested in hearing about, sort by created date
newNotes = newNotes.filter(e => !muffle.includes(e.pubkey))
@ -77,10 +82,7 @@
}
onMount(() => {
// Add in our user relays in case they weren't specified above
relays = relays.concat(getUserReadRelays()).slice(0, 3)
const sub = network.listen(relays, {...filter, since}, onChunk)
const sub = network.listen(sampleRelays(), {...filter, since}, onChunk)
const scroller = createScroller(() => {
if ($modal) {
@ -89,7 +91,7 @@
const {limit, until} = cursor
return network.listenUntilEose(relays, {...filter, until, limit}, onChunk)
return network.listenUntilEose(sampleRelays(), {...filter, until, limit}, onChunk)
})
return () => {

View File

@ -21,7 +21,7 @@
messages.lastCheckedByPubkey.update($obj => ({...$obj, [pubkey]: now()}))
const getRelays = () => getAllPubkeyRelays([pubkey, user.getPubkey()]).slice(0, 3)
const getRelays = () => getAllPubkeyRelays([pubkey, user.getPubkey()])
const decryptMessages = async events => {
// Gotta do it in serial because of extension limitations

View File

@ -46,9 +46,8 @@
// Add all the relays we know the person uses, as well as our own
// in case we don't have much information
relays = relays
.concat(getPubkeyWriteRelays(pubkey))
.concat(getUserReadRelays())
.slice(0, 3)
.concat(getPubkeyWriteRelays(pubkey).slice(0, 3))
.concat(getUserReadRelays().slice(0, 3))
// Refresh our person if needed
network.loadPeople([pubkey]).then(() => {

View File

@ -1,13 +1,12 @@
<script lang="ts">
import {pluck, reject, last} from 'ramda'
import {reject, last} from 'ramda'
import {onDestroy} from 'svelte'
import {navigate} from 'svelte-routing'
import {displayList} from 'hurdak/lib/hurdak'
import {sleep} from 'src/util/misc'
import {sleep, shuffle} from 'src/util/misc'
import Content from 'src/partials/Content.svelte'
import Spinner from 'src/partials/Spinner.svelte'
import Heading from 'src/partials/Heading.svelte'
import Anchor from 'src/partials/Anchor.svelte'
import Input from 'src/partials/Input.svelte'
import Modal from 'src/partials/Modal.svelte'
import {getUserReadRelays} from 'src/agent/relays'
import database from 'src/agent/database'
@ -15,19 +14,23 @@
import user from 'src/agent/user'
import {loadAppData} from 'src/app'
let url = ''
let message = null
let mounted = true
let currentRelays = []
let attemptedRelays = new Set()
let knownRelays = database.watch('relays', table => table.all())
let knownRelays = database.watch('relays', table => shuffle(table.all()))
const searchSample = async () => {
if (!mounted) {
return
}
currentRelays = reject(r => attemptedRelays.has(r.url), $knownRelays).slice(0, 10)
currentRelays.forEach(({url}) => attemptedRelays.add(url))
if (currentRelays.length === 0) {
message = `
We weren't able to find your profile data, you'll need to select your
No luck finding your profile data - you'll need to select your
relays manually to continue.`
await sleep(3000)
@ -36,7 +39,6 @@
} else {
await network.loadPeople([user.getPubkey()], {relays: currentRelays})
console.log(user.getProfile(), getUserReadRelays())
if (getUserReadRelays().length > 0) {
message = `Success! Just a moment while we get things set up.`
@ -59,6 +61,10 @@
}
searchSample()
onDestroy(() => {
mounted = false
})
</script>
<Content size="lg" class="text-center">

View File

@ -24,7 +24,6 @@
})
// Prime our database, in case we don't have any people stored yet
console.log(getUserReadRelays())
network.listenUntilEose(getUserReadRelays(), {kinds: personKinds, limit: 300})
</script>
@ -32,7 +31,7 @@
<i slot="before" class="fa-solid fa-search" />
</Input>
{#each (search ? search(q) : []).slice(0, 30) as person (person.pubkey)}
{#each (search ? search(q) : []).slice(0, 50) as person (person.pubkey)}
{#if person.pubkey !== user.getPubkey() && !(hideFollowing && $petnamePubkeys.includes(person.pubkey))}
<PersonInfo {person} />
{/if}

View File

@ -6,8 +6,8 @@
// Get first- and second-order follows. shuffle and slice network so we're not
// sending too many pubkeys. This will also result in some variety.
const authors = shuffle(getUserNetwork()).slice(0, 100)
const relays = getAllPubkeyWriteRelays(authors).slice(0, 3)
const authors = shuffle(getUserNetwork()).slice(0, 256)
const relays = getAllPubkeyWriteRelays(authors)
const filter = {kinds: [1, 7], authors}
</script>

View File

@ -1 +0,0 @@