Transform legacy mentions into bech32, fix loki persistence

This commit is contained in:
Jonathan Staab 2023-04-11 13:01:34 -05:00
parent 5b8243cd49
commit 6c859c9afc
7 changed files with 175 additions and 156 deletions

View File

@ -7,11 +7,8 @@
- [ ] Relays bounty
- [ ] Ability to create custom feeds
- [ ] Add global/following/network tabs to relay detail
- [ ] Fix tag-style event mentions. Probably transform all mentions into entities in parse
- [ ] Some lnurls aren't working npub1y3k2nheva29y9ej8a22e07epuxrn04rvgy28wvs54y57j7vsxxuq0gvp4j
- [ ] Fix performance issues
- [ ] https://github.com/techfort/LokiJS
- Use indexed adapter github.com/techfort/LokiJS/blob/master/tutorials/Persistence%20Adapters.md and partitioning adapter
- Call close onbeforeunload to save pending changes
- Fix force relays on login: http://localhost:5173/messages/npub1l66wvfm7dxhd6wmvpukpjpyhvwtlxzu0qqajqxjfpr4rlfa8hl5qlkfr3q
- [ ] Fix iOS/safari/firefox

View File

@ -1,8 +1,7 @@
import {map, pick, last, uniqBy} from "ramda"
import {get} from "svelte/store"
import {doPipe} from "hurdak/lib/hurdak"
import {parseContent} from "src/util/html"
import {Tags, roomAttrs, displayPerson, findReplyId, findRootId} from "src/util/nostr"
import {parseContent, Tags, roomAttrs, displayPerson, findReplyId, findRootId} from "src/util/nostr"
import {getRelayForPersonHint} from "src/agent/relays"
import {getPersonWithFallback} from "src/agent/db"
import pool from "src/agent/pool"
@ -111,7 +110,7 @@ const processMentions = map(pubkey => {
const tagsFromContent = (content, tags) => {
const seen = new Set(Tags.wrap(tags).values().all())
for (const {type, value} of parseContent(content)) {
for (const {type, value} of parseContent({content})) {
if (type.match(/nostr:(note|nevent)/) && !seen.has(value.id)) {
tags = tags.concat([["e", value.id]])
seen.add(value.id)

View File

@ -1,21 +1,47 @@
import type {Writable} from "svelte/store"
import Loki from "lokijs"
import IncrementalIndexedAdapter from "lokijs/src/incremental-indexeddb-adapter"
import {partition, sortBy, prop, pluck, without, is} from "ramda"
import IncrementalIndexedDBAdapter from "lokijs/src/incremental-indexeddb-adapter"
import {partition, sortBy, prop, always, pluck, without, is} from "ramda"
import {throttle} from "throttle-debounce"
import {writable} from "svelte/store"
import {ensurePlural, createMap} from "hurdak/lib/hurdak"
import {ensurePlural, noop, createMap} from "hurdak/lib/hurdak"
import {log} from "src/util/logger"
import {Tags} from "src/util/nostr"
import user from "src/agent/user"
const loki = new Loki("agent.db", {
const Adapter = window.indexedDB ? IncrementalIndexedDBAdapter : Loki.LokiMemoryAdapter
export const loki = new Loki("agent.db", {
autoload: true,
autosave: true,
adapter: window.indexedDB ? new IncrementalIndexedAdapter() : new Loki.LokiMemoryAdapter(),
autoloadCallback: () => ready.set(true),
autosaveInterval: 4000,
adapter: new Adapter(),
autoloadCallback: () => {
for (const table of Object.values(registry)) {
table.initialize()
}
listener.connect()
ready.set(true)
},
})
window.addEventListener("beforeunload", () => loki.close())
const stubCollection = {
insert: noop,
updateWhere: noop,
removeWhere: noop,
findAndRemove: noop,
clear: noop,
find: always([]),
findOne: always(null),
by: always(null),
count: always(0),
chain: () => stubCollection,
}
// ----------------------------------------------------------------------------
// Database table abstraction around loki
@ -26,24 +52,32 @@ class Table {
pk: string
_max: number
_sort: (xs: Array<Record<string, any>>) => Array<Record<string, any>>
_coll: Loki
_coll?: Loki
_subs: Array<(t: Table) => void>
constructor(name, pk, {max = 500, sort = null} = {}) {
this.name = name
this.pk = pk
this._max = max
this._sort = sort
this._coll = loki.addCollection(name, {unique: [pk]})
this._coll = stubCollection
this._subs = []
registry[name] = this
}
initialize() {
this._coll = loki.addCollection(this.name, {unique: [this.pk]})
this._coll.addListener(["insert", "update"], () => {
for (const cb of this._subs) {
cb(this)
}
})
}
subscribe(cb) {
const keys = ["insert", "update"]
this._subs.push(cb)
this._coll.addListener(keys, cb)
cb(this)
return () => this._coll.removeListener(keys, cb)
return () => {
this._subs = without([cb], this._subs)
}
}
patch(items) {
const [updates, creates] = partition(item => this.get(item[this.pk]), ensurePlural(items))
@ -183,8 +217,6 @@ export const rooms = new Table("rooms", "id")
export const relays = new Table("relays", "url")
export const routes = new Table("routes", "id", {max: 3000, sort: sortByLastSeen})
listener.connect()
export const getPersonWithFallback = pubkey => people.get(pubkey) || {pubkey}
export const getRelayWithFallback = url => relays.get(url) || {url}

View File

@ -1,11 +1,9 @@
<script lang="ts">
import {objOf} from "ramda"
import {objOf, is} from "ramda"
import {navigate} from "svelte-routing"
import {fly} from "svelte/transition"
import {first} from "hurdak/lib/hurdak"
import {warn} from "src/util/logger"
import {parseContent} from "src/util/html"
import {displayPerson, Tags} from "src/util/nostr"
import {displayPerson, parseContent, Tags} from "src/util/nostr"
import MediaSet from "src/partials/MediaSet.svelte"
import Card from "src/partials/Card.svelte"
import Spinner from "src/partials/Spinner.svelte"
@ -25,7 +23,7 @@
const links = []
const entities = []
const shouldTruncate = !showEntire && note.content.length > maxLength * 0.6
const content = parseContent(note.content)
const content = parseContent(note)
let l = 0
for (let i = 0; i < content.length; i++) {
@ -68,7 +66,7 @@
l += value.length
// Content[i] may be undefined if we're on a linebreak that was spliced out
if (content[i] && shouldTruncate && l > maxLength && type !== "newline") {
if (is(String, content[i]?.value) && shouldTruncate && l > maxLength && type !== "newline") {
content[i].value = value.trim()
content.splice(i + 1, content.length, {type: "text", value: "..."})
break
@ -78,20 +76,6 @@
}
}
const getMentionPubkey = text => {
const i = parseInt(first(text.match(/\d+/)))
// Some implementations count only p tags when calculating index, and some
// implementations are 1-indexed
if (note.tags[i]?.[0] === "p") {
return note.tags[i][1]
} else if (note.tags[i - 1]?.[0] === "p") {
return note.tags[i - 1][1]
} else {
return Tags.from(note).type("p").values().nth(i)
}
}
const loadQuote = async ({id, relays}) => {
// Follow relay hints
relays = (relays || []).map(objOf("url")).concat(Tags.from(note).equals(id).relays())
@ -132,15 +116,6 @@
{value.entity.slice(0, 16) + "..."}
{/if}
</Anchor>
{:else if type === "mention"}
{@const pubkey = getMentionPubkey(value)}
{#if pubkey}
@<Anchor href={routes.person(pubkey)}>
{displayPerson(getPersonWithFallback(pubkey))}
</Anchor>
{:else}
{value}
{/if}
{:else}
{value}
{/if}

View File

@ -1,7 +1,6 @@
<script lang="ts">
import {ellipsize} from "hurdak/lib/hurdak"
import {parseContent} from "src/util/html"
import {displayPerson} from "src/util/nostr"
import {displayPerson, parseContent} from "src/util/nostr"
import Anchor from "src/partials/Anchor.svelte"
import {getPersonWithFallback} from "src/agent/db"
@ -9,7 +8,7 @@
export let truncate = false
const about = person?.kind0?.about || ""
const content = parseContent(truncate ? ellipsize(about, 140) : about)
const content = parseContent({content: truncate ? ellipsize(about, 140) : about})
</script>
<p class="overflow-hidden text-ellipsis">

View File

@ -1,5 +1,3 @@
import {nip19} from "nostr-tools"
import {last} from "ramda"
import {bytes} from "hurdak/lib/hurdak"
export const copyToClipboard = text => {
@ -102,106 +100,6 @@ export const noEvent = f => e => {
f()
}
export const parseContent = content => {
const text = content.trim()
const result = []
let buffer = "",
i = 0
const push = (type, text, value = null) => {
if (buffer) {
result.push({type: "text", value: buffer})
buffer = ""
}
result.push({type, value: value || text})
i += text.length
}
for (; i < text.length; ) {
const prev = last(result)
const tail = text.slice(i)
const newLine = tail.match(/^\n+/)
if (newLine) {
push("newline", newLine[0])
continue
}
const mentionMatch = tail.match(/^#\[\d+\]/i)
if (mentionMatch) {
push("mention", mentionMatch[0])
continue
}
const topicMatch = tail.match(/^#\w+/i)
if (topicMatch) {
push("topic", topicMatch[0])
continue
}
const bech32Match = tail.match(/^(nostr:)?n(event|ote|profile|pub)1[\d\w]+/i)
if (bech32Match) {
try {
const entity = bech32Match[0].replace("nostr:", "")
const {type, data} = nip19.decode(entity) as {type: string; data: object}
push(`nostr:${type}`, bech32Match[0], {...data, entity})
continue
} catch (e) {
console.log(e)
// pass
}
}
const urlMatch = tail.match(
/^((http|ws)s?:\/\/)?[-a-z0-9@:%_\+~#=\.]+\.[a-z]{1,6}[-a-z0-9:%_\+~#\?&\/=;\.]*/gi
)
// Skip url if it's just the end of a filepath
if (urlMatch && (prev?.type !== "text" || !prev.value.endsWith("/"))) {
let url = urlMatch[0]
// Skip ellipses and very short non-urls
if (!url.match(/\.\./) && url.length > 4) {
// It's common for punctuation to end a url, trim it off
if (url.match(/[\.\?,:]$/)) {
url = url.slice(0, -1)
}
if (!url.match("://")) {
url = "https://" + url
}
push("link", urlMatch[0], url)
continue
}
}
// Instead of going character by character and re-running all the above regular expressions
// a million times, try to match the next word and add it to the buffer
const wordMatch = tail.match(/^[\w\d]+ ?/i)
if (wordMatch) {
buffer += wordMatch[0]
i += wordMatch[0].length
} else {
buffer += text[i]
i += 1
}
}
if (buffer) {
result.push({type: "text", value: buffer})
}
return result
}
export const isMobile = localStorage.mobile || window.navigator.maxTouchPoints > 1
export const parseHex = hex => {

View File

@ -146,3 +146,122 @@ export const toHex = (data: string): string | null => {
export const mergeFilter = (filter, extra) =>
is(Array, filter) ? filter.map(mergeLeft(extra)) : {...filter, ...extra}
export const parseContent = ({content, tags = []}) => {
const text = content.trim()
const result = []
let buffer = "",
i = 0
const push = (type, text, value = null) => {
if (buffer) {
result.push({type: "text", value: buffer})
buffer = ""
}
result.push({type, value: value || text})
i += text.length
}
for (; i < text.length; ) {
const prev = last(result)
const tail = text.slice(i)
const newLine = tail.match(/^\n+/)
if (newLine) {
push("newline", newLine[0])
continue
}
// Convert legacy mentions to bech32 entities
const mentionMatch = tail.match(/^#\[(\d+)\]/i)
if (mentionMatch) {
const i = parseInt(mentionMatch[1])
if (tags[i]) {
const [tag, value, url] = tags[i]
const relays = [url].filter(identity)
let type, data, entity
if (tag === "p") {
type = "nprofile"
data = {pubkey: value, relays}
entity = nip19.nprofileEncode(data)
} else {
type = "nevent"
data = {id: value, relays, pubkey: null}
entity = nip19.neventEncode(data)
}
push(`nostr:${type}`, mentionMatch[0], {...data, entity})
continue
}
}
const topicMatch = tail.match(/^#\w+/i)
if (topicMatch) {
push("topic", topicMatch[0])
continue
}
const bech32Match = tail.match(/^(nostr:)?n(event|ote|profile|pub)1[\d\w]+/i)
if (bech32Match) {
try {
const entity = bech32Match[0].replace("nostr:", "")
const {type, data} = nip19.decode(entity) as {type: string; data: object}
push(`nostr:${type}`, bech32Match[0], {...data, entity})
continue
} catch (e) {
console.log(e)
// pass
}
}
const urlMatch = tail.match(
/^((http|ws)s?:\/\/)?[-a-z0-9:%_\+~#=\.]+\.[a-z]{1,6}[-a-z0-9:%_\+~#\?&\/=;\.]*/gi
)
// Skip url if it's just the end of a filepath
if (urlMatch && (prev?.type !== "text" || !prev.value.endsWith("/"))) {
let url = urlMatch[0]
// Skip ellipses and very short non-urls
if (!url.match(/\.\./) && url.length > 4) {
// It's common for punctuation to end a url, trim it off
if (url.match(/[\.\?,:]$/)) {
url = url.slice(0, -1)
}
if (!url.match("://")) {
url = "https://" + url
}
push("link", urlMatch[0], url)
continue
}
}
// Instead of going character by character and re-running all the above regular expressions
// a million times, try to match the next word and add it to the buffer
const wordMatch = tail.match(/^[\w\d]+ ?/i)
if (wordMatch) {
buffer += wordMatch[0]
i += wordMatch[0].length
} else {
buffer += text[i]
i += 1
}
}
if (buffer) {
result.push({type: "text", value: buffer})
}
return result
}