Improve query speed when loading from dexie by sorting first

This commit is contained in:
Jonathan Staab 2022-12-24 13:50:18 -08:00
parent 92d823f5b2
commit de3f75b2a3
12 changed files with 147 additions and 89 deletions

View File

@ -16,6 +16,7 @@ If you like Coracle and want to support its development, you can donate sats via
- [x] Notifications
- [x] Link previews
- [x] Add notes, follows, likes tab to profile
- [ ] Show relative dates
- [ ] Mentions - render done, now reference in compose
- [ ] Image uploads
- [ ] An actual readme
@ -26,15 +27,23 @@ If you like Coracle and want to support its development, you can donate sats via
# Bugs
- [ ] lil janky
- [ ] Add alerts for replies to posts the user liked
- [ ] With link/image previews, remove the url from the note body if it's on a separate last line
- [ ] Stack views so scroll position isn't lost on navigation
- [ ] Add notification for slow relays
- [ ] Parent notes are having a hard time loading
- [ ] Separating events table into notes/reactions/etc would effectively give us a second index on kind.
- [ ] Clicking on a badge in the popover falls through, and might also crash
- [ ] Add a slider in settings so users can decide whether to go with fast relays, or wait for everyone to complete their queries. Most relevant for NoteDetail
# Changelog
## 0.2.1
- [x] Exclude people from search who have no profile data available
- [x] Speed up note retrieval by sorting first when the filter isn't restrictive
- [x] Only show a certain number of replies on popular notes, with a link at the bottom showing total replies
- [x] Refine algorithm for which relays to drop when they don't send an eose. This helps avoid the "we couldn't find this note" error message on the note detail, since we were giving up too early.
- [x] Improve url detection and shortening
## 0.2.0
- [x] Completely re-worked data synchronization layer, moving from naive just-in-time requests to background listeners, loaders, and a local copy stored in dexie. Events and tags, but not people are deleted from the database on logout, and old events are periodically purged.

BIN
package-lock.json generated

Binary file not shown.

View File

@ -26,6 +26,7 @@
"classnames": "^2.3.2",
"compressorjs": "^1.1.1",
"dexie": "^3.2.2",
"extract-urls": "^1.3.2",
"fuse.js": "^6.6.2",
"hurdak": "github:ConsignCloud/hurdak",
"nostr-tools": "github:fiatjaf/nostr-tools#1b798b2",

View File

@ -82,6 +82,16 @@
'App/alerts',
[{kinds: [1, 7], '#p': [$user.pubkey], since: mostRecentAlert}],
e => {
// Don't alert about people's own stuff
if (e.pubkey === $user.pubkey) {
return
}
// Only notify users about positive reactions
if (e.kind === 7 && !['', '+'].includes(e.content)) {
return
}
mostRecentAlert = Math.max(e.created_at, mostRecentAlert)
}
)

View File

@ -1,9 +1,11 @@
<script>
import cx from 'classnames'
import extractUrls from 'extract-urls'
import {whereEq, find} from 'ramda'
import {slide} from 'svelte/transition'
import {navigate} from 'svelte-routing'
import {hasParent, findLink} from 'src/util/html'
import {quantify} from 'hurdak/lib/hurdak'
import {hasParent} from 'src/util/html'
import {findReply} from "src/util/nostr"
import Preview from 'src/partials/Preview.svelte'
import Anchor from 'src/partials/Anchor.svelte'
@ -21,7 +23,7 @@
let reply = null
const link = $settings.showLinkPreviews ? findLink(note.content) : null
const links = $settings.showLinkPreviews ? extractUrls(note.content) || [] : null
const interactive = !anchorId || anchorId !== note.id
let likes, flags, like, flag
@ -131,20 +133,20 @@
{:else}
<div class="text-ellipsis overflow-hidden flex flex-col gap-2">
<p>{@html note.html}</p>
{#if link}
{#each links.slice(-2) as link}
<div>
<div class="inline-block" on:click={e => e.stopPropagation()}>
<Preview endpoint={`${$settings.dufflepudUrl}/link/preview`} url={link} />
</div>
</div>
{/if}
{/each}
</div>
<div class="flex gap-6 text-light">
<div>
<i
class="fa-solid fa-reply cursor-pointer"
on:click={startReply} />
{note.replies.length}
{note.repliesCount}
</div>
<div class={cx({'text-accent': like})}>
<i
@ -188,4 +190,9 @@
<svelte:self showParent={false} note={r} depth={depth - 1} {invertColors} {anchorId} />
</div>
{/each}
{#if note.repliesCount > 5 && note.replies.length < note.repliesCount}
<div class="ml-10 mt-2 text-light cursor-pointer" on:click={onClick}>
{quantify(note.repliesCount - note.replies.length, 'more reply', 'more replies')} found.
</div>
{/if}
{/if}

View File

@ -1,9 +1,10 @@
import {liveQuery} from 'dexie'
import extractUrls from 'extract-urls'
import {get} from 'svelte/store'
import {pluck, uniq, take, uniqBy, groupBy, concat, without, prop, isNil, identity} from 'ramda'
import {ensurePlural, createMap, ellipsize} from 'hurdak/lib/hurdak'
import {intersection, pluck, sortBy, uniq, uniqBy, groupBy, concat, without, prop, isNil, identity} from 'ramda'
import {ensurePlural, first, createMap, ellipsize} from 'hurdak/lib/hurdak'
import {escapeHtml} from 'src/util/html'
import {filterTags, findReply, findRoot} from 'src/util/nostr'
import {filterTags, getTagValues, findReply, findRoot} from 'src/util/nostr'
import {db} from 'src/relay/db'
import pool from 'src/relay/pool'
import cmd from 'src/relay/cmd'
@ -18,41 +19,42 @@ const lq = f => liveQuery(async () => {
}
})
// Utils for querying dexie - these return collections, not arrays
const prefilterEvents = filter => {
if (filter.ids) {
return db.events.where('id').anyOf(ensurePlural(filter.ids))
}
if (filter.authors) {
return db.events.where('pubkey').anyOf(ensurePlural(filter.authors))
}
if (filter.kinds) {
return db.events.where('kind').anyOf(ensurePlural(filter.kinds))
}
return db.events
}
// Utils for filtering db - nothing below should load events from the network
const filterEvents = filter => {
return prefilterEvents(filter)
.filter(e => {
if (filter.ids && !filter.ids.includes(e.id)) return false
if (filter.authors && !filter.authors.includes(e.pubkey)) return false
if (filter.muffle && filter.muffle.includes(e.pubkey)) return false
if (filter.kinds && !filter.kinds.includes(e.kind)) return false
if (filter.since && filter.since > e.created_at) return false
if (filter.until && filter.until < e.created_at) return false
if (!isNil(filter.content) && filter.content !== e.content) return false
const filterEvents = async ({limit, ...filter}) => {
let events = db.events
return true
})
.reverse()
.sortBy('created_at')
// Sorting is expensive, so prioritize that unless we have a filter that will dramatically
// reduce the number of results so we can do ordering in memory
if (filter.ids) {
events = await db.events.where('id').anyOf(ensurePlural(filter.ids)).reverse().sortBy('created')
} else if (filter.authors) {
events = await db.events.where('pubkey').anyOf(ensurePlural(filter.authors)).reverse().sortBy('created')
} else {
events = await events.orderBy('created_at').reverse().toArray()
}
const result = []
for (const e of events) {
if (filter.ids && !filter.ids.includes(e.id)) continue
if (filter.authors && !filter.authors.includes(e.pubkey)) continue
if (filter.muffle && filter.muffle.includes(e.pubkey)) continue
if (filter.kinds && !filter.kinds.includes(e.kind)) continue
if (filter.since && filter.since > e.created_at) continue
if (filter.until && filter.until < e.created_at) continue
if (filter['#p'] && intersection(filter['#p'], getTagValues(e.tags)).length === 0) continue
if (filter['#e'] && intersection(filter['#e'], getTagValues(e.tags)).length === 0) continue
if (!isNil(filter.content) && filter.content !== e.content) continue
if (filter.customFilter && !filter.customFilter(e)) continue
result.push(e)
if (result.length > limit) {
break
}
}
return result
}
const filterReplies = async (id, filter) => {
@ -96,7 +98,14 @@ const findNote = async (id, {showEntire = false, depth = 1} = {}) => {
return {
...note, reactions, person, html, parent,
replies: depth === 0 ? [] : await Promise.all(replies.map(r => findNote(r.id, {depth: depth - 1}))),
repliesCount: replies.length,
replies: depth === 0
? []
: await Promise.all(
sortBy(e => -e.created_at, replies)
.slice(0, showEntire ? Infinity : 5)
.map(r => findNote(r.id, {depth: depth - 1}))
),
}
}
@ -125,24 +134,43 @@ const annotateChunk = async chunk => {
allNotes
)
return await Promise.all(Object.keys(notesByRoot).map(findNote))
// Re-sort, since events come in order regardless of level in the hierarchy.
// This is really a hack, since a single like can bump an old note back up to the
// top of the feed
return sortBy(e => -e.created_at, await Promise.all(Object.keys(notesByRoot).map(findNote)))
}
const renderNote = async (note, {showEntire = false}) => {
const $people = get(db.people)
const shouldEllipsize = note.content.length > 500 && !showEntire
const content = shouldEllipsize ? ellipsize(note.content, 500) : note.content
const $people = get(db.people)
const peopleByPubkey = createMap(
'pubkey',
filterTags({tag: "p"}, note).map(k => $people[k]).filter(identity)
)
return escapeHtml(content)
.replace(/\n/g, '<br />')
.replace(/https?:\/\/([\w.-]+)[^ ]*/g, (url, domain) => {
return `<a href="${url}" target="_blank noopener" class="underline">${domain}</a>`
})
let content
// Ellipsize
content = shouldEllipsize ? ellipsize(note.content, 500) : note.content
// Escape html
content = escapeHtml(content)
// Extract urls
for (const url of extractUrls(content) || []) {
const $a = document.createElement('a')
$a.href = url
$a.target = "_blank noopener"
$a.className = "underline"
$a.innerText = first(url.replace(/https?:\/\/(www\.)?/, '').split(/[\/\?#]/))
// If the url is on its own line, remove it entirely. Otherwise, replace it with the link
content = content.replace(url, $a.outerHTML)
}
// Mentions
content = content
.replace(/#\[(\d+)\]/g, (tag, i) => {
if (!note.tags[parseInt(i)]) {
return tag
@ -154,26 +182,8 @@ const renderNote = async (note, {showEntire = false}) => {
return `@<a href="/people/${pubkey}/notes" class="underline">${name}</a>`
})
}
const filterAlerts = async (person, limit) => {
const tags = db.tags.where('value').equals(person.pubkey)
const ids = pluck('event', await tags.toArray())
const alerts = take(limit + 1, await filterEvents({kinds: [1, 7], ids}))
return alerts.filter(e => {
// Don't show people's own stuff
if (e.pubkey === person.pubkey) {
return false
}
// Only notify users about positive reactions
if (e.kind === 7 && !['', '+'].includes(e.content)) {
return false
}
return true
})
return content
}
// Synchronization
@ -281,6 +291,6 @@ export const connections = db.connections
export default {
db, pool, cmd, lq, filterEvents, getOrLoadNote, filterReplies, findNote,
annotateChunk, renderNote, filterAlerts, login, addRelay, removeRelay,
annotateChunk, renderNote, login, addRelay, removeRelay,
follow, unfollow, loadNoteContext,
}

View File

@ -40,7 +40,9 @@ class Channel {
const sub = pool.sub({filter, cb: onEvent}, this.name, r => {
eoseRelays.push(r)
if (eoseRelays.length >= relays.length - 2) {
// If we have only a few, wait for all of them, otherwise ignore the slowest 1/5
const threshold = Math.round(relays.length / 10)
if (eoseRelays.length >= relays.length - threshold) {
onEose()
}
})

View File

@ -56,7 +56,25 @@
}
const loadNotes = async limit => {
const events = await relay.filterAlerts($user, limit + 1)
const events = await relay.filterEvents({
limit,
kinds: [1, 7],
'#p': [$user.pubkey],
customFilter: e => {
// Don't show people's own stuff
if (e.pubkey === $user.pubkey) {
return false
}
// Only notify users about positive reactions
if (e.kind === 7 && !['', '+'].includes(e.content)) {
return false
}
return true
}
})
const notes = await relay.annotateChunk(events.filter(propEq('kind', 1)))
const reactions = await Promise.all(
events

View File

@ -70,5 +70,3 @@ export const escapeHtml = html => {
return div.innerHTML
}
export const findLink = t => first(t.match(/https?:\/\/([\w.-]+)[^ ]*/))

View File

@ -1,11 +1,15 @@
<script>
import {prop} from 'ramda'
import {fly} from 'svelte/transition'
import {fuzzy} from "src/util/misc"
import {user, people} from 'src/relay'
export let q
let search = fuzzy(Object.values($people), {keys: ["name", "about", "pubkey"]})
let search = fuzzy(
Object.values($people).filter(prop('name')),
{keys: ["name", "about", "pubkey"]}
)
</script>
<ul class="py-8 flex flex-col gap-2 max-w-xl m-auto">

View File

@ -1,17 +1,14 @@
<script>
import {when, take, propEq} from 'ramda'
import {when, propEq} from 'ramda'
import {onMount, onDestroy} from 'svelte'
import Notes from "src/partials/Notes.svelte"
import {timedelta, Cursor, getLastSync} from 'src/util/misc'
import {timedelta, now, Cursor} from 'src/util/misc'
import {getTagValues} from 'src/util/nostr'
import relay, {user} from 'src/relay'
let sub
const cursor = new Cursor(
getLastSync('views/notes/Global'),
timedelta(1, 'minutes')
)
const cursor = new Cursor(now(), timedelta(1, 'minutes'))
onMount(async () => {
sub = await relay.pool.listenForEvents(
@ -28,10 +25,11 @@
})
const loadNotes = async limit => {
const notes = take(limit + 1, await relay.filterEvents({
const notes = await relay.filterEvents({
limit,
kinds: [1],
muffle: getTagValues($user?.muffle || []),
}))
})
if (notes.length <= limit) {
const [since, until] = cursor.step()

View File

@ -1,5 +1,5 @@
<script>
import {when, take, propEq} from 'ramda'
import {when, propEq} from 'ramda'
import {onMount, onDestroy} from 'svelte'
import Notes from "src/partials/Notes.svelte"
import {timedelta, Cursor, getLastSync} from 'src/util/misc'
@ -36,11 +36,12 @@
})
const loadNotes = async limit => {
const notes = take(limit + 1, await relay.filterEvents({
const notes = await relay.filterEvents({
limit,
kinds: [1],
authors: $network.concat($user.pubkey),
muffle: getTagValues($user?.muffle || []),
}))
})
if (notes.length <= limit) {
const [since, until] = cursor.step()