Add repository to indexeddb

This commit is contained in:
Jon Staab 2024-05-08 17:38:15 -07:00
parent 7896c9e5a0
commit c6b3d74163
7 changed files with 75 additions and 46 deletions

View File

@ -28,7 +28,8 @@
let element
let filters: Filter[] = [{ids: []}]
let limit = 0
let opts = {
const {notes, start, load, feedLoader, opts} = new FeedLoader({
feed,
anchor,
onEvent,
@ -40,9 +41,7 @@
shouldDefer: !eager,
shouldLoadParents: true,
shouldHideReplies: Storage.getJson("hideReplies"),
}
const {notes, start, load, feedLoader} = new FeedLoader(opts)
})
const loadMore = async () => {
limit += 5
@ -52,14 +51,14 @@
}
}
const onChange = async opts => {
const onChange = async newOpts => {
limit = 0
feed = opts.feed
Storage.setJson("hideReplies", opts.shouldHideReplies)
start(opts)
feed = newOpts.feed
Storage.setJson("hideReplies", newOpts.shouldHideReplies)
start(newOpts)
if (feedLoader.compiler.canCompile(opts.feed)) {
const requests = await feedLoader.compiler.compile(opts.feed)
if (feedLoader.compiler.canCompile(newOpts.feed)) {
const requests = await feedLoader.compiler.compile(newOpts.feed)
filters = requests.flatMap(r => r.filters || [])
} else {
@ -67,7 +66,7 @@
}
}
onChange(opts)
start({})
onMount(() => {
const scroller = createScroller(loadMore, {element})

View File

@ -1,10 +1,8 @@
import Bugsnag from "@bugsnag/js"
import {uniq} from "ramda"
import {hash} from "hurdak"
import {writable} from "@welshman/lib"
import {ConnectionStatus, NetworkContext} from "@welshman/net"
import type {Feed} from "@welshman/feeds"
import {warn} from "src/util/logger"
import {userKinds} from "src/util/nostr"
import {router} from "src/app/util/router"
import {
@ -21,7 +19,6 @@ import {
getUserRelayUrls,
listenForNotifications,
getSetting,
dufflepud,
} from "src/engine"
// Global state
@ -64,7 +61,7 @@ setTimeout(() => {
export const logUsage = async (path: string) => {
if (getSetting("report_analytics")) {
const {location, plausible} = window
const {location, plausible} = window as any
const pathname = path.replace(/(npub|nprofile|note|nevent|naddr)1[^\/]+/g, (_, m) => `<${m}>`)
plausible("pageview", {u: location.origin + pathname})

View File

@ -60,7 +60,7 @@
<strong>Max relays per request</strong>
<div>{settings.relay_limit} relays</div>
</div>
<Input type="range" class="!bg-transparent" bind:value={settings.relay_limit} min={1} max={30} parse={parseInt} />
<Input type="range" bind:value={settings.relay_limit} min={1} max={30} parse={parseInt} />
<p slot="info">
This controls how many relays to max out at when loading feeds and event context. More is
faster, but will require more bandwidth and processing power.

View File

@ -2,7 +2,7 @@ import {prop, identity, pluck, splitAt, nth, sortBy} from "ramda"
import {sleep, defer, chunk, randomInt, throttle} from "hurdak"
import {Storage as LocalStorage} from "hurdak"
import {writable} from "@welshman/lib"
import type {Writable, Collection} from "@welshman/lib"
import type {IWritable} from "@welshman/lib"
import logger from "src/util/logger"
import {sessions} from "src/engine/session/state"
import {people} from "src/engine/people/state"
@ -143,7 +143,7 @@ export type LocalStorageAdapterOpts = {
export class LocalStorageAdapter {
constructor(
readonly key: string,
readonly store: Writable<any>,
readonly store: IWritable<any>,
readonly opts?: LocalStorageAdapterOpts,
) {}
@ -162,7 +162,8 @@ export class LocalStorageAdapter {
export class IndexedDBAdapter {
constructor(
readonly key: string,
readonly store: Collection<any>,
readonly keyPath: string,
readonly store: IWritable<any>,
readonly max: number,
readonly sort?: (xs: any[]) => any[],
readonly filter?: (x: any) => boolean,
@ -173,13 +174,13 @@ export class IndexedDBAdapter {
return {
name: this.key,
opts: {
keyPath: this.store.pk,
keyPath: this.keyPath,
},
}
}
async initialize(storage: Storage) {
const {key, store} = this
const {key, keyPath, store} = this
const data = await storage.db.getAll(key)
const filter = this.filter || identity
const migrate = this.migrate || identity
@ -193,7 +194,7 @@ export class IndexedDBAdapter {
}
// Do it in small steps to avoid clogging stuff up
for (const records of chunk(100, (rows as any[]).filter(prop(store.pk)))) {
for (const records of chunk(100, (rows as any[]).filter(prop(keyPath)))) {
await storage.db.bulkPut(key, records)
await sleep(50)
@ -206,7 +207,7 @@ export class IndexedDBAdapter {
}
prune(storage) {
const {store, key, max, sort} = this
const {store, key, keyPath, max, sort} = this
const data = store.get()
if (data.length < max * 1.1 || storage.dead.get()) {
@ -217,7 +218,10 @@ export class IndexedDBAdapter {
store.set(keep)
storage.db.bulkDelete(key, pluck(store.pk, discard))
storage.db.bulkDelete(
key,
discard.map(x => x[keyPath]),
)
}
}

View File

@ -13,7 +13,7 @@ import {topics} from "./topics"
import {deletes, seen, _events, isDeleted, publishes} from "./events"
import {pubkey, sessions} from "./session"
import {channels} from "./channels"
import {onAuth, getExecutor, tracker} from "./network"
import {onAuth, getExecutor, tracker, repository} from "./network"
export * from "./core"
export * from "./auth"
@ -48,6 +48,12 @@ const setAdapter = {
load: a => new Set(a || []),
}
const repositoryStore = {
get: () => repository.get(),
set: data => repository.load(data),
subscribe: () => repository.derived(r => r.dump()),
}
// Nip 04 channels weren't getting members set
const migrateChannels = channels => {
return channels.map(c => {
@ -67,35 +73,56 @@ const sessionsAdapter = {
dump: identity,
}
export const storage = new Storage(11, [
export const storage = new Storage(12, [
new LocalStorageAdapter("pubkey", pubkey),
new LocalStorageAdapter("sessions", sessions, sessionsAdapter),
new LocalStorageAdapter("deletes2", deletes, setAdapter),
new IndexedDBAdapter("seen3", seen, 10000, sortBy(prop("created_at"))),
new IndexedDBAdapter("events", _events, 10000, sortByPubkeyWhitelist(prop("created_at"))),
new IndexedDBAdapter("publishes", publishes, 100, sortByPubkeyWhitelist(prop("created_at"))),
new IndexedDBAdapter("labels", _labels, 1000, sortBy(prop("created_at"))),
new IndexedDBAdapter("topics", topics, 1000, sortBy(prop("last_seen"))),
new IndexedDBAdapter("seen3", "id", seen, 10000, sortBy(prop("created_at"))),
new IndexedDBAdapter("events", "id", _events, 10000, sortByPubkeyWhitelist(prop("created_at"))),
new IndexedDBAdapter(
"publishes",
"id",
publishes,
100,
sortByPubkeyWhitelist(prop("created_at")),
),
new IndexedDBAdapter("labels", "id", _labels, 1000, sortBy(prop("created_at"))),
new IndexedDBAdapter("topics", "name", topics, 1000, sortBy(prop("last_seen"))),
new IndexedDBAdapter(
"lists",
"naddr",
_lists,
1000,
sortByPubkeyWhitelist(prop("created_at")),
l => l.address,
),
new IndexedDBAdapter("people", people, 5000, sortByPubkeyWhitelist(prop("last_fetched"))),
new IndexedDBAdapter("relays", relays, 1000, sortBy(prop("count"))),
new IndexedDBAdapter(
"people",
"pubkey",
people,
5000,
sortByPubkeyWhitelist(prop("last_fetched")),
),
new IndexedDBAdapter("relays", "url", relays, 1000, sortBy(prop("count"))),
new IndexedDBAdapter(
"channels",
"id",
channels,
1000,
sortBy(prop("last_checked")),
null,
migrateChannels,
),
new IndexedDBAdapter("groups", groups, 1000, sortBy(prop("count"))),
new IndexedDBAdapter("groupAlerts", groupAlerts, 30, sortBy(prop("created_at"))),
new IndexedDBAdapter("groupRequests", groupRequests, 100, sortBy(prop("created_at"))),
new IndexedDBAdapter("groupSharedKeys", groupSharedKeys, 1000, sortBy(prop("created_at"))),
new IndexedDBAdapter("groupAdminKeys", groupAdminKeys, 1000),
new IndexedDBAdapter("groups", "address", groups, 1000, sortBy(prop("count"))),
new IndexedDBAdapter("groupAlerts", "id", groupAlerts, 30, sortBy(prop("created_at"))),
new IndexedDBAdapter("groupRequests", "id", groupRequests, 100, sortBy(prop("created_at"))),
new IndexedDBAdapter(
"groupSharedKeys",
"pubkey",
groupSharedKeys,
1000,
sortBy(prop("created_at")),
),
new IndexedDBAdapter("groupAdminKeys", "pubkey", groupAdminKeys, 1000),
new IndexedDBAdapter("repository", "id", repositoryStore as any, 10000),
])

View File

@ -217,7 +217,7 @@ export const createAndPublish = async ({
const template = createEvent(kind, {content, tags})
const event = await sign(template, {anonymous, sk})
return publish({event, relays, verb, signal: AbortSignal.timeout(timeout)})
return publish({event, relays, verb, timeout})
}
setInterval(() => {

View File

@ -22,16 +22,17 @@
</script>
<div
class={cx($$props.class, "shadow-inset relative rounded h-7 overflow-hidden", {
class={cx($$props.class, "shadow-inset relative h-7 overflow-hidden rounded", {
"!bg-transparent": $$props.type === "range",
"bg-neutral-900 text-neutral-100": dark,
"bg-white dark:text-neutral-900": !dark,
})}>
<input
{...$$props}
class={cx(
"outline-none px-3 w-full placeholder:text-neutral-400 h-7 bg-transparent pb-px",
{"pl-10": showBefore, "pr-10": showAfter},
)}
class={cx("h-7 w-full bg-transparent px-3 pb-px outline-none placeholder:text-neutral-400", {
"pl-10": showBefore,
"pr-10": showAfter,
})}
value={inputValue}
bind:this={element}
on:input={onInput}
@ -41,14 +42,15 @@
on:input
on:keydown />
{#if showBefore}
<div class="absolute left-0 top-0 flex items-center gap-2 px-3 opacity-75 h-7">
<div class="absolute left-0 top-0 flex h-7 items-center gap-2 px-3 opacity-75">
<div>
<slot name="before" />
</div>
</div>
{/if}
{#if showAfter}
<div class="absolute right-0 top-0 m-px flex items-center gap-2 rounded-full px-3 opacity-75 h-7">
<div
class="absolute right-0 top-0 m-px flex h-7 items-center gap-2 rounded-full px-3 opacity-75">
<div>
<slot name="after" />
</div>