diff --git a/ROADMAP.md b/ROADMAP.md
index c5cdc567..f6a9e73c 100644
--- a/ROADMAP.md
+++ b/ROADMAP.md
@@ -1,8 +1,7 @@
# Current
- [ ] Feeds load forever if a modal is open
-- [ ] Support other list types
-- [ ] Use vida to stream development
+- [ ] Support other list types than 30001
- [ ] Fix connection management stuff. Have GPT help
- [ ] Add preview proxy thing
- [ ] White-labeled
diff --git a/src/agent/cmd.ts b/src/agent/cmd.ts
index c7bec439..2508288e 100644
--- a/src/agent/cmd.ts
+++ b/src/agent/cmd.ts
@@ -1,7 +1,8 @@
import {last, pick, uniqBy} from "ramda"
import {get} from "svelte/store"
import {doPipe} from "hurdak/lib/hurdak"
-import {parseContent, Tags, roomAttrs, displayPerson, findRoot, findReply} from "src/util/nostr"
+import {Tags, roomAttrs, displayPerson, findRoot, findReply} from "src/util/nostr"
+import {parseContent} from "src/util/notes"
import {getRelayForPersonHint, getRelayForEventHint} from "src/agent/relays"
import {getPersonWithFallback} from "src/agent/db"
import pool from "src/agent/pool"
diff --git a/src/app/shared/Feed.svelte b/src/app/shared/Feed.svelte
index 52688bef..ce6e25e1 100644
--- a/src/app/shared/Feed.svelte
+++ b/src/app/shared/Feed.svelte
@@ -6,7 +6,7 @@
import {fly} from "svelte/transition"
import {quantify} from "hurdak/lib/hurdak"
import {fuzzy, createScroller, now, timedelta} from "src/util/misc"
- import {asDisplayEvent} from "src/util/nostr"
+ import {asDisplayEvent, noteKinds} from "src/util/nostr"
import Spinner from "src/partials/Spinner.svelte"
import Modal from "src/partials/Modal.svelte"
import Content from "src/partials/Content.svelte"
@@ -42,7 +42,7 @@
const maxNotes = 100
const seen = new Set()
const getModal = () => last(document.querySelectorAll(".modal-content"))
- const canDisplay = e => [1, 1985].includes(e.kind)
+ const canDisplay = e => noteKinds.includes(e.kind)
const setFeedRelay = relay => {
feedRelay = relay
diff --git a/src/app/shared/NoteContent.svelte b/src/app/shared/NoteContent.svelte
index 4e72d916..a4a32b2c 100644
--- a/src/app/shared/NoteContent.svelte
+++ b/src/app/shared/NoteContent.svelte
@@ -3,7 +3,8 @@
import {fly} from "svelte/transition"
import {splice, switcher, switcherFn} from "hurdak/lib/hurdak"
import {warn} from "src/util/logger"
- import {displayPerson, parseContent, getLabelQuality, displayRelay, Tags} from "src/util/nostr"
+ import {displayPerson, getLabelQuality, displayRelay, Tags} from "src/util/nostr"
+ import {parseContent} from "src/util/notes"
import {modal} from "src/partials/state"
import MediaSet from "src/partials/MediaSet.svelte"
import QRCode from "src/partials/QRCode.svelte"
@@ -29,8 +30,6 @@
let content = parseContent(note)
let rating = note.kind === 1985 ? getLabelQuality("review/relay", note) : null
- console.log(content)
-
const links = []
const invoices = []
const ranges = []
diff --git a/src/app/shared/PersonAbout.svelte b/src/app/shared/PersonAbout.svelte
index 75768dd2..d8e497e5 100644
--- a/src/app/shared/PersonAbout.svelte
+++ b/src/app/shared/PersonAbout.svelte
@@ -1,6 +1,7 @@
diff --git a/src/app/state.ts b/src/app/state.ts
index 0596f712..4d04c401 100644
--- a/src/app/state.ts
+++ b/src/app/state.ts
@@ -10,7 +10,7 @@ import {createMap, doPipe, first} from "hurdak/lib/hurdak"
import {warn} from "src/util/logger"
import {hash, shuffle, sleep, clamp} from "src/util/misc"
import {now, timedelta} from "src/util/misc"
-import {Tags, isNotification, userKinds} from "src/util/nostr"
+import {Tags, isNotification, userKinds, noteKinds} from "src/util/nostr"
import {findReplyId} from "src/util/nostr"
import {modal, toast} from "src/partials/state"
import {notifications, watch, userEvents, contacts, rooms} from "src/agent/db"
@@ -157,7 +157,11 @@ const processChats = async (pubkey, events) => {
export const listen = async () => {
const pubkey = user.getPubkey()
const {roomsJoined} = user.getProfile()
- const kinds = enableZaps ? [1, 4, 7, 1985, 9735] : [1, 4, 7, 1985]
+ const kinds = noteKinds.concat([4, 7])
+
+ if (enableZaps) {
+ kinds.push(9735)
+ }
// Only grab notifications since we last checked, with some wiggle room
const since =
@@ -175,7 +179,7 @@ export const listen = async () => {
;(listen as any)._listener = await network.listen({
relays: getUserReadRelays(),
filter: [
- {kinds: [1, 4], authors: [pubkey], since},
+ {kinds: [4], authors: [pubkey], since},
{kinds, "#p": [pubkey], since},
{kinds, "#e": eventIds, since},
{kinds: [42], "#e": roomsJoined, since},
diff --git a/src/app/views/Feeds.svelte b/src/app/views/Feeds.svelte
index def71812..ac36bc18 100644
--- a/src/app/views/Feeds.svelte
+++ b/src/app/views/Feeds.svelte
@@ -2,7 +2,7 @@
import cx from "classnames"
import type {DynamicFilter} from "src/util/types"
import {indexBy, objOf} from "ramda"
- import {Tags} from "src/util/nostr"
+ import {Tags, noteKinds} from "src/util/nostr"
import {modal, theme} from "src/partials/state"
import Anchor from "src/partials/Anchor.svelte"
import Content from "src/partials/Content.svelte"
@@ -16,7 +16,7 @@
let relays = null
let key = Math.random()
let filter = {
- kinds: [1, 1985],
+ kinds: noteKinds,
authors: getUserFollows().length > 0 ? "follows" : "network",
} as DynamicFilter
diff --git a/src/util/nostr.ts b/src/util/nostr.ts
index 0cd3f754..020ac573 100644
--- a/src/util/nostr.ts
+++ b/src/util/nostr.ts
@@ -5,6 +5,7 @@ import {ensurePlural, ellipsize, first} from "hurdak/lib/hurdak"
import {tryJson, avg} from "src/util/misc"
import {invoiceAmount} from "src/util/lightning"
+export const noteKinds = [1, 1985, 30023, 30018, 10001, 1063, 9802]
export const personKinds = [0, 2, 3, 10001, 10002]
export const userKinds = personKinds.concat([10000, 30001, 30078])
export const appDataKeys = [
@@ -191,150 +192,6 @@ export const toHex = (data: string): string | null => {
export const mergeFilter = (filter, extra) =>
is(Array, filter) ? filter.map(mergeLeft(extra)) : {...filter, ...extra}
-export const parseContent = ({content, tags = []}) => {
- const result = []
- let text = content.trim()
- let buffer = ""
-
- const parseNewline = () => {
- const newline = first(text.match(/^\n+/))
-
- if (newline) {
- return ["newline", newline, newline]
- }
- }
-
- const parseMention = () => {
- // Convert legacy mentions to bech32 entities
- const mentionMatch = text.match(/^#\[(\d+)\]/i)
-
- if (mentionMatch) {
- const i = parseInt(mentionMatch[1])
-
- if (tags[i]) {
- const [tag, value, url] = tags[i]
- const relays = [url].filter(identity)
-
- let type, data, entity
- if (tag === "p") {
- type = "nprofile"
- data = {pubkey: value, relays}
- entity = nip19.nprofileEncode(data)
- } else {
- type = "nevent"
- data = {id: value, relays, pubkey: null}
- entity = nip19.neventEncode(data)
- }
-
- return [`nostr:${type}`, mentionMatch[0], {...data, entity}]
- }
- }
- }
-
- const parseTopic = () => {
- const topic = first(text.match(/^#\w+/i))
-
- // Skip numeric topics
- if (topic && !topic.match(/^#\d+$/)) {
- return ["topic", topic, topic.slice(1)]
- }
- }
-
- const parseBech32 = () => {
- const bech32 = first(
- text.match(/^(web\+)?(nostr:)?\/?\/?n(event|ote|profile|pub|addr)1[\d\w]+/i)
- )
-
- if (bech32) {
- try {
- const entity = fromNostrURI(bech32)
- const {type, data} = nip19.decode(entity) as {type: string; data: object}
-
- let value = data
- if (type === "note") {
- value = {id: data}
- } else if (type === "npub") {
- value = {pubkey: data}
- }
-
- return [`nostr:${type}`, bech32, {...value, entity}]
- } catch (e) {
- console.log(e)
- // pass
- }
- }
- }
-
- const parseLNUrl = () => {
- const lnurl = first(text.match(/^ln(bc|url)[\d\w]{50,1000}/i))
-
- if (lnurl) {
- return ["lnurl", lnurl, lnurl]
- }
- }
-
- const parseUrl = () => {
- const raw = first(text.match(/^([a-z\+:]{2,30}:\/\/)?[^\s]+\.[a-z]{2,6}[^\s]*[^\.!?,:\s]/gi))
-
- // Skip url if it's just the end of a filepath
- if (raw) {
- const prev = last(result)
-
- if (prev?.type === "text" && prev.value.endsWith("/")) {
- return
- }
-
- let url = raw
-
- // Skip ellipses and very short non-urls
- if (url.match(/\.\./)) {
- return
- }
-
- if (!url.match("://")) {
- url = "https://" + url
- }
-
- return ["link", raw, url]
- }
- }
-
- while (text) {
- const part =
- parseNewline() ||
- parseMention() ||
- parseTopic() ||
- parseBech32() ||
- parseUrl() ||
- parseLNUrl()
-
- if (part) {
- if (buffer) {
- result.push({type: "text", value: buffer})
- buffer = ""
- }
-
- const [type, raw, value] = part
-
- result.push({type, value})
- text = text.slice(raw.length)
- } else {
- // Instead of going character by character and re-running all the above regular expressions
- // a million times, try to match the next word and add it to the buffer
- const match = first(text.match(/^[\w\d]+ ?/i)) || text[0]
-
- buffer += match
- text = text.slice(match.length)
- }
- }
-
- if (buffer) {
- result.push({type: "text", value: buffer})
- }
-
- return result
-}
-
export const processZaps = (zaps, author) =>
zaps
.map(zap => {
diff --git a/src/util/notes.ts b/src/util/notes.ts
new file mode 100644
index 00000000..bf26dabe
--- /dev/null
+++ b/src/util/notes.ts
@@ -0,0 +1,148 @@
+import {last, identity} from "ramda"
+import {nip19} from "nostr-tools"
+import {first} from "hurdak/lib/hurdak"
+import {fromNostrURI} from "src/util/nostr"
+
+export const parseContent = ({content, tags = []}) => {
+ const result = []
+ let text = content.trim()
+ let buffer = ""
+
+ const parseNewline = () => {
+ const newline = first(text.match(/^\n+/))
+
+ if (newline) {
+ return ["newline", newline, newline]
+ }
+ }
+
+ const parseMention = () => {
+ // Convert legacy mentions to bech32 entities
+ const mentionMatch = text.match(/^#\[(\d+)\]/i)
+
+ if (mentionMatch) {
+ const i = parseInt(mentionMatch[1])
+
+ if (tags[i]) {
+ const [tag, value, url] = tags[i]
+ const relays = [url].filter(identity)
+
+ let type, data, entity
+ if (tag === "p") {
+ type = "nprofile"
+ data = {pubkey: value, relays}
+ entity = nip19.nprofileEncode(data)
+ } else {
+ type = "nevent"
+ data = {id: value, relays, pubkey: null}
+ entity = nip19.neventEncode(data)
+ }
+
+ return [`nostr:${type}`, mentionMatch[0], {...data, entity}]
+ }
+ }
+ }
+
+ const parseTopic = () => {
+ const topic = first(text.match(/^#\w+/i))
+
+ // Skip numeric topics
+ if (topic && !topic.match(/^#\d+$/)) {
+ return ["topic", topic, topic.slice(1)]
+ }
+ }
+
+ const parseBech32 = () => {
+ const bech32 = first(
+ text.match(/^(web\+)?(nostr:)?\/?\/?n(event|ote|profile|pub|addr)1[\d\w]+/i)
+ )
+
+ if (bech32) {
+ try {
+ const entity = fromNostrURI(bech32)
+ const {type, data} = nip19.decode(entity) as {type: string; data: object}
+
+ let value = data
+ if (type === "note") {
+ value = {id: data}
+ } else if (type === "npub") {
+ value = {pubkey: data}
+ }
+
+ return [`nostr:${type}`, bech32, {...value, entity}]
+ } catch (e) {
+ console.log(e)
+ // pass
+ }
+ }
+ }
+
+ const parseLNUrl = () => {
+ const lnurl = first(text.match(/^ln(bc|url)[\d\w]{50,1000}/i))
+
+ if (lnurl) {
+ return ["lnurl", lnurl, lnurl]
+ }
+ }
+
+ const parseUrl = () => {
+ const raw = first(text.match(/^([a-z\+:]{2,30}:\/\/)?[^\s]+\.[a-z]{2,6}[^\s]*[^\.!?,:\s]/gi))
+
+ // Skip url if it's just the end of a filepath
+ if (raw) {
+ const prev = last(result)
+
+ if (prev?.type === "text" && prev.value.endsWith("/")) {
+ return
+ }
+
+ let url = raw
+
+ // Skip ellipses and very short non-urls
+ if (url.match(/\.\./)) {
+ return
+ }
+
+ if (!url.match("://")) {
+ url = "https://" + url
+ }
+
+ return ["link", raw, url]
+ }
+ }
+
+ while (text) {
+ const part =
+ parseNewline() ||
+ parseMention() ||
+ parseTopic() ||
+ parseBech32() ||
+ parseUrl() ||
+ parseLNUrl()
+
+ if (part) {
+ if (buffer) {
+ result.push({type: "text", value: buffer})
+ buffer = ""
+ }
+
+ const [type, raw, value] = part
+
+ result.push({type, value})
+ text = text.slice(raw.length)
+ } else {
+ // Instead of going character by character and re-running all the above regular expressions
+ // a million times, try to match the next word and add it to the buffer
+ const match = first(text.match(/^[\w\d]+ ?/i)) || text[0]
+
+ buffer += match
+ text = text.slice(match.length)
+ }
+ }
+
+ if (buffer) {
+ result.push({type: "text", value: buffer})
+ }
+
+ return result
+}