Put note kinds in a variable

This commit is contained in:
Jonathan Staab 2023-06-16 15:28:55 -07:00
parent 4f0910e0c6
commit 61f44e340a
10 changed files with 169 additions and 159 deletions

View File

@ -1,8 +1,7 @@
# Current
- [ ] Feeds load forever if a modal is open
- [ ] Support other list types
- [ ] Use vida to stream development
- [ ] Support other list types than 30001
- [ ] Fix connection management stuff. Have GPT help
- [ ] Add preview proxy thing
- [ ] White-labeled

View File

@ -1,7 +1,8 @@
import {last, pick, uniqBy} from "ramda"
import {get} from "svelte/store"
import {doPipe} from "hurdak/lib/hurdak"
import {parseContent, Tags, roomAttrs, displayPerson, findRoot, findReply} from "src/util/nostr"
import {Tags, roomAttrs, displayPerson, findRoot, findReply} from "src/util/nostr"
import {parseContent} from "src/util/notes"
import {getRelayForPersonHint, getRelayForEventHint} from "src/agent/relays"
import {getPersonWithFallback} from "src/agent/db"
import pool from "src/agent/pool"

View File

@ -6,7 +6,7 @@
import {fly} from "svelte/transition"
import {quantify} from "hurdak/lib/hurdak"
import {fuzzy, createScroller, now, timedelta} from "src/util/misc"
import {asDisplayEvent} from "src/util/nostr"
import {asDisplayEvent, noteKinds} from "src/util/nostr"
import Spinner from "src/partials/Spinner.svelte"
import Modal from "src/partials/Modal.svelte"
import Content from "src/partials/Content.svelte"
@ -42,7 +42,7 @@
const maxNotes = 100
const seen = new Set()
const getModal = () => last(document.querySelectorAll(".modal-content"))
const canDisplay = e => [1, 1985].includes(e.kind)
const canDisplay = e => noteKinds.includes(e.kind)
const setFeedRelay = relay => {
feedRelay = relay

View File

@ -3,7 +3,8 @@
import {fly} from "svelte/transition"
import {splice, switcher, switcherFn} from "hurdak/lib/hurdak"
import {warn} from "src/util/logger"
import {displayPerson, parseContent, getLabelQuality, displayRelay, Tags} from "src/util/nostr"
import {displayPerson, getLabelQuality, displayRelay, Tags} from "src/util/nostr"
import {parseContent} from "src/util/notes"
import {modal} from "src/partials/state"
import MediaSet from "src/partials/MediaSet.svelte"
import QRCode from "src/partials/QRCode.svelte"
@ -29,8 +30,6 @@
let content = parseContent(note)
let rating = note.kind === 1985 ? getLabelQuality("review/relay", note) : null
console.log(content)
const links = []
const invoices = []
const ranges = []

View File

@ -1,6 +1,7 @@
<script lang="ts">
import {ellipsize} from "hurdak/lib/hurdak"
import {displayPerson, parseContent} from "src/util/nostr"
import {displayPerson} from "src/util/nostr"
import {parseContent} from "src/util/notes"
import Anchor from "src/partials/Anchor.svelte"
import {getPersonWithFallback} from "src/agent/db"

View File

@ -1,12 +1,13 @@
<script lang="ts">
import {timedelta} from "src/util/misc"
import {noteKinds} from "src/util/nostr"
import Feed from "src/app/shared/Feed.svelte"
export let pubkey
export let relays
export let invertColors = false
const filter = {kinds: [1, 1985], authors: [pubkey]}
const filter = {kinds: noteKinds, authors: [pubkey]}
</script>
<Feed {relays} {filter} {invertColors} parentsTimeout={3000} delta={timedelta(3, "days")} />

View File

@ -10,7 +10,7 @@ import {createMap, doPipe, first} from "hurdak/lib/hurdak"
import {warn} from "src/util/logger"
import {hash, shuffle, sleep, clamp} from "src/util/misc"
import {now, timedelta} from "src/util/misc"
import {Tags, isNotification, userKinds} from "src/util/nostr"
import {Tags, isNotification, userKinds, noteKinds} from "src/util/nostr"
import {findReplyId} from "src/util/nostr"
import {modal, toast} from "src/partials/state"
import {notifications, watch, userEvents, contacts, rooms} from "src/agent/db"
@ -157,7 +157,11 @@ const processChats = async (pubkey, events) => {
export const listen = async () => {
const pubkey = user.getPubkey()
const {roomsJoined} = user.getProfile()
const kinds = enableZaps ? [1, 4, 7, 1985, 9735] : [1, 4, 7, 1985]
const kinds = noteKinds.concat([4, 7])
if (enableZaps) {
kinds.push(9735)
}
// Only grab notifications since we last checked, with some wiggle room
const since =
@ -175,7 +179,7 @@ export const listen = async () => {
;(listen as any)._listener = await network.listen({
relays: getUserReadRelays(),
filter: [
{kinds: [1, 4], authors: [pubkey], since},
{kinds: [4], authors: [pubkey], since},
{kinds, "#p": [pubkey], since},
{kinds, "#e": eventIds, since},
{kinds: [42], "#e": roomsJoined, since},

View File

@ -2,7 +2,7 @@
import cx from "classnames"
import type {DynamicFilter} from "src/util/types"
import {indexBy, objOf} from "ramda"
import {Tags} from "src/util/nostr"
import {Tags, noteKinds} from "src/util/nostr"
import {modal, theme} from "src/partials/state"
import Anchor from "src/partials/Anchor.svelte"
import Content from "src/partials/Content.svelte"
@ -16,7 +16,7 @@
let relays = null
let key = Math.random()
let filter = {
kinds: [1, 1985],
kinds: noteKinds,
authors: getUserFollows().length > 0 ? "follows" : "network",
} as DynamicFilter

View File

@ -5,6 +5,7 @@ import {ensurePlural, ellipsize, first} from "hurdak/lib/hurdak"
import {tryJson, avg} from "src/util/misc"
import {invoiceAmount} from "src/util/lightning"
export const noteKinds = [1, 1985, 30023, 30018, 10001, 1063, 9802]
export const personKinds = [0, 2, 3, 10001, 10002]
export const userKinds = personKinds.concat([10000, 30001, 30078])
export const appDataKeys = [
@ -191,150 +192,6 @@ export const toHex = (data: string): string | null => {
export const mergeFilter = (filter, extra) =>
is(Array, filter) ? filter.map(mergeLeft(extra)) : {...filter, ...extra}
export const parseContent = ({content, tags = []}) => {
const result = []
let text = content.trim()
let buffer = ""
const parseNewline = () => {
const newline = first(text.match(/^\n+/))
if (newline) {
return ["newline", newline, newline]
}
}
const parseMention = () => {
// Convert legacy mentions to bech32 entities
const mentionMatch = text.match(/^#\[(\d+)\]/i)
if (mentionMatch) {
const i = parseInt(mentionMatch[1])
if (tags[i]) {
const [tag, value, url] = tags[i]
const relays = [url].filter(identity)
let type, data, entity
if (tag === "p") {
type = "nprofile"
data = {pubkey: value, relays}
entity = nip19.nprofileEncode(data)
} else {
type = "nevent"
data = {id: value, relays, pubkey: null}
entity = nip19.neventEncode(data)
}
return [`nostr:${type}`, mentionMatch[0], {...data, entity}]
}
}
}
const parseTopic = () => {
const topic = first(text.match(/^#\w+/i))
// Skip numeric topics
if (topic && !topic.match(/^#\d+$/)) {
return ["topic", topic, topic.slice(1)]
}
}
const parseBech32 = () => {
const bech32 = first(
text.match(/^(web\+)?(nostr:)?\/?\/?n(event|ote|profile|pub|addr)1[\d\w]+/i)
)
if (bech32) {
try {
const entity = fromNostrURI(bech32)
const {type, data} = nip19.decode(entity) as {type: string; data: object}
let value = data
if (type === "note") {
value = {id: data}
} else if (type === "npub") {
value = {pubkey: data}
}
return [`nostr:${type}`, bech32, {...value, entity}]
} catch (e) {
console.log(e)
// pass
}
}
}
const parseLNUrl = () => {
const lnurl = first(text.match(/^ln(bc|url)[\d\w]{50,1000}/i))
if (lnurl) {
return ["lnurl", lnurl, lnurl]
}
}
const parseUrl = () => {
const raw = first(text.match(/^([a-z\+:]{2,30}:\/\/)?[^\s]+\.[a-z]{2,6}[^\s]*[^\.!?,:\s]/gi))
// Skip url if it's just the end of a filepath
if (raw) {
const prev = last(result)
if (prev?.type === "text" && prev.value.endsWith("/")) {
return
}
let url = raw
// Skip ellipses and very short non-urls
if (url.match(/\.\./)) {
return
}
if (!url.match("://")) {
url = "https://" + url
}
return ["link", raw, url]
}
}
while (text) {
const part =
parseNewline() ||
parseMention() ||
parseTopic() ||
parseBech32() ||
parseUrl() ||
parseLNUrl()
if (part) {
if (buffer) {
result.push({type: "text", value: buffer})
buffer = ""
}
const [type, raw, value] = part
result.push({type, value})
text = text.slice(raw.length)
} else {
// Instead of going character by character and re-running all the above regular expressions
// a million times, try to match the next word and add it to the buffer
const match = first(text.match(/^[\w\d]+ ?/i)) || text[0]
buffer += match
text = text.slice(match.length)
}
}
if (buffer) {
result.push({type: "text", value: buffer})
}
return result
}
export const processZaps = (zaps, author) =>
zaps
.map(zap => {

148
src/util/notes.ts Normal file
View File

@ -0,0 +1,148 @@
import {last, identity} from "ramda"
import {nip19} from "nostr-tools"
import {first} from "hurdak/lib/hurdak"
import {fromNostrURI} from "src/util/nostr"
export const parseContent = ({content, tags = []}) => {
const result = []
let text = content.trim()
let buffer = ""
const parseNewline = () => {
const newline = first(text.match(/^\n+/))
if (newline) {
return ["newline", newline, newline]
}
}
const parseMention = () => {
// Convert legacy mentions to bech32 entities
const mentionMatch = text.match(/^#\[(\d+)\]/i)
if (mentionMatch) {
const i = parseInt(mentionMatch[1])
if (tags[i]) {
const [tag, value, url] = tags[i]
const relays = [url].filter(identity)
let type, data, entity
if (tag === "p") {
type = "nprofile"
data = {pubkey: value, relays}
entity = nip19.nprofileEncode(data)
} else {
type = "nevent"
data = {id: value, relays, pubkey: null}
entity = nip19.neventEncode(data)
}
return [`nostr:${type}`, mentionMatch[0], {...data, entity}]
}
}
}
const parseTopic = () => {
const topic = first(text.match(/^#\w+/i))
// Skip numeric topics
if (topic && !topic.match(/^#\d+$/)) {
return ["topic", topic, topic.slice(1)]
}
}
const parseBech32 = () => {
const bech32 = first(
text.match(/^(web\+)?(nostr:)?\/?\/?n(event|ote|profile|pub|addr)1[\d\w]+/i)
)
if (bech32) {
try {
const entity = fromNostrURI(bech32)
const {type, data} = nip19.decode(entity) as {type: string; data: object}
let value = data
if (type === "note") {
value = {id: data}
} else if (type === "npub") {
value = {pubkey: data}
}
return [`nostr:${type}`, bech32, {...value, entity}]
} catch (e) {
console.log(e)
// pass
}
}
}
const parseLNUrl = () => {
const lnurl = first(text.match(/^ln(bc|url)[\d\w]{50,1000}/i))
if (lnurl) {
return ["lnurl", lnurl, lnurl]
}
}
const parseUrl = () => {
const raw = first(text.match(/^([a-z\+:]{2,30}:\/\/)?[^\s]+\.[a-z]{2,6}[^\s]*[^\.!?,:\s]/gi))
// Skip url if it's just the end of a filepath
if (raw) {
const prev = last(result)
if (prev?.type === "text" && prev.value.endsWith("/")) {
return
}
let url = raw
// Skip ellipses and very short non-urls
if (url.match(/\.\./)) {
return
}
if (!url.match("://")) {
url = "https://" + url
}
return ["link", raw, url]
}
}
while (text) {
const part =
parseNewline() ||
parseMention() ||
parseTopic() ||
parseBech32() ||
parseUrl() ||
parseLNUrl()
if (part) {
if (buffer) {
result.push({type: "text", value: buffer})
buffer = ""
}
const [type, raw, value] = part
result.push({type, value})
text = text.slice(raw.length)
} else {
// Instead of going character by character and re-running all the above regular expressions
// a million times, try to match the next word and add it to the buffer
const match = first(text.match(/^[\w\d]+ ?/i)) || text[0]
buffer += match
text = text.slice(match.length)
}
}
if (buffer) {
result.push({type: "text", value: buffer})
}
return result
}