Manual load more
This commit is contained in:
parent
b1f93c9fd8
commit
63454728f8
@ -8,13 +8,11 @@ import Icon from "Icons/Icon";
|
||||
import { dedupeByPubkey, findTag, tagFilterOfTextRepost } from "SnortUtils";
|
||||
import ProfileImage from "Element/ProfileImage";
|
||||
import useTimelineFeed, { TimelineFeed, TimelineSubject } from "Feed/TimelineFeed";
|
||||
import LoadMore from "Element/LoadMore";
|
||||
import Zap from "Element/Zap";
|
||||
import Note from "Element/Note";
|
||||
import NoteReaction from "Element/NoteReaction";
|
||||
import useModeration from "Hooks/useModeration";
|
||||
import ProfilePreview from "Element/ProfilePreview";
|
||||
import Skeleton from "Element/Skeleton";
|
||||
import { UserCache } from "Cache";
|
||||
|
||||
export interface TimelineProps {
|
||||
@ -142,11 +140,11 @@ const Timeline = (props: TimelineProps) => {
|
||||
)}
|
||||
{mainFeed.map(eventElement)}
|
||||
{(props.loadMore === undefined || props.loadMore === true) && (
|
||||
<LoadMore onLoadMore={() => feed.loadMore()} shouldLoadMore={!feed.loading}>
|
||||
<Skeleton width="100%" height="120px" margin="0 0 16px 0" />
|
||||
<Skeleton width="100%" height="120px" margin="0 0 16px 0" />
|
||||
<Skeleton width="100%" height="120px" margin="0 0 16px 0" />
|
||||
</LoadMore>
|
||||
<div className="flex f-center">
|
||||
<button type="button" onClick={() => feed.loadMore()}>
|
||||
<FormattedMessage defaultMessage="Load more" />
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
|
@ -1,5 +1,5 @@
|
||||
import { useCallback, useEffect, useMemo } from "react";
|
||||
import { EventKind, u256, FlatNoteStore, RequestBuilder } from "@snort/system";
|
||||
import { EventKind, FlatNoteStore, RequestBuilder } from "@snort/system";
|
||||
import { useRequestBuilder } from "@snort/system-react";
|
||||
|
||||
import { unixNow, unwrap, tagFilterOfTextRepost } from "SnortUtils";
|
||||
|
@ -9,7 +9,7 @@
|
||||
"license": "GPL-3.0-or-later",
|
||||
"scripts": {
|
||||
"build": "rm -rf dist && tsc",
|
||||
"test": "jest"
|
||||
"test": "jest --runInBand"
|
||||
},
|
||||
"files": [
|
||||
"src",
|
||||
|
@ -61,63 +61,26 @@ export function splitByWriteRelays(cache: RelayCache, filter: ReqFilter): Array<
|
||||
];
|
||||
}
|
||||
|
||||
const allRelays = unwrap(authors).map(a => {
|
||||
return {
|
||||
key: a,
|
||||
relays: cache
|
||||
.getFromCache(a)
|
||||
?.relays?.filter(a => a.settings.write)
|
||||
.sort(() => (Math.random() < 0.5 ? 1 : -1)),
|
||||
};
|
||||
});
|
||||
const topRelays = pickTopRelays(cache, unwrap(authors), PickNRelays);
|
||||
const pickedRelays = dedupe(topRelays.flatMap(a => a.relays));
|
||||
|
||||
const missing = allRelays.filter(a => a.relays === undefined || a.relays.length === 0);
|
||||
const hasRelays = allRelays.filter(a => a.relays !== undefined && a.relays.length > 0);
|
||||
const relayUserMap = hasRelays.reduce((acc, v) => {
|
||||
for (const r of unwrap(v.relays)) {
|
||||
if (!acc.has(r.url)) {
|
||||
acc.set(r.url, new Set([v.key]));
|
||||
} else {
|
||||
unwrap(acc.get(r.url)).add(v.key);
|
||||
}
|
||||
}
|
||||
return acc;
|
||||
}, new Map<string, Set<string>>());
|
||||
|
||||
// selection algo will just pick relays with the most users
|
||||
const topRelays = [...relayUserMap.entries()].sort(([, v], [, v1]) => v1.size - v.size);
|
||||
|
||||
// <relay, key[]> - count keys per relay
|
||||
// <key, relay[]> - pick n top relays
|
||||
// <relay, key[]> - map keys per relay (for subscription filter)
|
||||
|
||||
const userPickedRelays = unwrap(authors).map(k => {
|
||||
// pick top 3 relays for this key
|
||||
const relaysForKey = topRelays
|
||||
.filter(([, v]) => v.has(k))
|
||||
.slice(0, PickNRelays)
|
||||
.map(([k]) => k);
|
||||
return { k, relaysForKey };
|
||||
});
|
||||
|
||||
const pickedRelays = new Set(userPickedRelays.map(a => a.relaysForKey).flat());
|
||||
|
||||
const picked = [...pickedRelays].map(a => {
|
||||
const keysOnPickedRelay = new Set(userPickedRelays.filter(b => b.relaysForKey.includes(a)).map(b => b.k));
|
||||
const picked = pickedRelays.map(a => {
|
||||
const keysOnPickedRelay = dedupe(topRelays.filter(b => b.relays.includes(a)).map(b => b.key));
|
||||
return {
|
||||
relay: a,
|
||||
filter: {
|
||||
...filter,
|
||||
authors: [...keysOnPickedRelay],
|
||||
authors: keysOnPickedRelay,
|
||||
},
|
||||
} as RelayTaggedFilter;
|
||||
});
|
||||
if (missing.length > 0) {
|
||||
const noRelays = dedupe(topRelays.filter(a => a.relays.length === 0).map(a => a.key));
|
||||
if (noRelays.length > 0) {
|
||||
picked.push({
|
||||
relay: "",
|
||||
filter: {
|
||||
...filter,
|
||||
authors: missing.map(a => a.key),
|
||||
authors: noRelays,
|
||||
},
|
||||
});
|
||||
}
|
||||
@ -142,24 +105,20 @@ export function splitFlatByWriteRelays(cache: RelayCache, input: Array<FlatReqFi
|
||||
const pickedRelays = dedupe(topRelays.flatMap(a => a.relays));
|
||||
|
||||
const picked = pickedRelays.map(a => {
|
||||
const keysOnPickedRelay = new Set(userPickedRelays.filter(b => b.relaysForKey.includes(a)).map(b => b.k));
|
||||
const authorsOnRelay = new Set(topRelays.filter(v => v.relays.includes(a)).map(v => v.key));
|
||||
return {
|
||||
relay: a,
|
||||
filter: {
|
||||
...filter,
|
||||
authors: [...keysOnPickedRelay],
|
||||
},
|
||||
} as RelayTaggedFilter;
|
||||
filters: input.filter(v => v.authors && authorsOnRelay.has(v.authors)),
|
||||
} as RelayTaggedFlatFilters;
|
||||
});
|
||||
if (missing.length > 0) {
|
||||
const noRelays = new Set(topRelays.filter(v => v.relays.length === 0).map(v => v.key));
|
||||
if (noRelays.size > 0) {
|
||||
picked.push({
|
||||
relay: "",
|
||||
filter: {
|
||||
...filter,
|
||||
authors: missing.map(a => a.key),
|
||||
},
|
||||
});
|
||||
filters: input.filter(v => !v.authors || noRelays.has(v.authors)),
|
||||
} as RelayTaggedFlatFilters);
|
||||
}
|
||||
|
||||
debug("GOSSIP")("Picked %o", picked);
|
||||
return picked;
|
||||
}
|
||||
|
@ -126,7 +126,7 @@ export class ProfileLoaderService {
|
||||
const empty = couldNotFetch.map(a =>
|
||||
this.#cache.update({
|
||||
pubkey: a,
|
||||
loaded: unixNowMs() - ProfileCacheExpire + 5_000, // expire in 5s
|
||||
loaded: unixNowMs() - ProfileCacheExpire + 30_000, // expire in 30s
|
||||
created: 69,
|
||||
} as MetadataCache)
|
||||
);
|
||||
|
@ -250,7 +250,7 @@ export class Query implements QueryBase {
|
||||
#onProgress() {
|
||||
const isFinished = this.progress === 1;
|
||||
if (this.feed.loading !== isFinished) {
|
||||
this.#log("%s loading=%s, progress=%d", this.id, this.feed.loading, this.progress);
|
||||
this.#log("%s loading=%s, progress=%d, traces=%O", this.id, this.feed.loading, this.progress, this.#tracing);
|
||||
this.feed.loading = isFinished;
|
||||
}
|
||||
}
|
||||
|
@ -4,8 +4,8 @@ import { appendDedupe, sanitizeRelayUrl, unixNowMs } from "@snort/shared";
|
||||
|
||||
import { ReqFilter, u256, HexKey, EventKind } from ".";
|
||||
import { diffFilters } from "./request-splitter";
|
||||
import { RelayCache, splitAllByWriteRelays, splitByWriteRelays } from "./gossip-model";
|
||||
import { mergeSimilar } from "./request-merger";
|
||||
import { RelayCache, splitByWriteRelays, splitFlatByWriteRelays } from "./gossip-model";
|
||||
import { flatMerge, mergeSimilar } from "./request-merger";
|
||||
import { FlatReqFilter, expandFilter } from "./request-expander";
|
||||
|
||||
/**
|
||||
@ -111,10 +111,10 @@ export class RequestBuilder {
|
||||
const ts = unixNowMs() - start;
|
||||
this.#log("buildDiff %s %d ms", this.id, ts);
|
||||
if (diff.changed) {
|
||||
return splitAllByWriteRelays(relays, diff.added).map(a => {
|
||||
return splitFlatByWriteRelays(relays, diff.added).map(a => {
|
||||
return {
|
||||
strategy: RequestStrategy.AuthorsRelays,
|
||||
filters: a.filters,
|
||||
filters: flatMerge(a.filters),
|
||||
relay: a.relay,
|
||||
};
|
||||
});
|
||||
|
@ -1,13 +1,10 @@
|
||||
import { flatFilterEq } from "./utils";
|
||||
import { FlatReqFilter } from "./request-expander";
|
||||
import { flatMerge } from "./request-merger";
|
||||
|
||||
export function diffFilters(prev: Array<FlatReqFilter>, next: Array<FlatReqFilter>, calcRemoved?: boolean) {
|
||||
const added = [];
|
||||
const removed = [];
|
||||
|
||||
prev = [...prev];
|
||||
next = [...next];
|
||||
for (const n of next) {
|
||||
const px = prev.findIndex(a => flatFilterEq(a, n));
|
||||
if (px !== -1) {
|
||||
|
@ -1,9 +1,9 @@
|
||||
import { Connection } from "../src";
|
||||
import { describe, expect } from "@jest/globals";
|
||||
import { Query } from "../src/Query";
|
||||
import { Query } from "../src/query";
|
||||
import { getRandomValues } from "crypto";
|
||||
import { FlatNoteStore } from "../src/NoteCollection";
|
||||
import { RequestStrategy } from "../src/RequestBuilder";
|
||||
import { FlatNoteStore } from "../src/note-collection";
|
||||
import { RequestStrategy } from "../src/request-builder";
|
||||
|
||||
window.crypto = {} as any;
|
||||
window.crypto.getRandomValues = getRandomValues as any;
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { EventExt } from "../src/EventExt";
|
||||
import { EventExt } from "../src/event-ext";
|
||||
|
||||
describe("NIP-10", () => {
|
||||
it("should extract thread", () => {
|
@ -1,4 +1,4 @@
|
||||
import { splitAllByWriteRelays } from "../src/GossipModel";
|
||||
import { splitAllByWriteRelays } from "../src/gossip-model";
|
||||
|
||||
describe("GossipModel", () => {
|
||||
it("should not output empty", () => {
|
@ -1,6 +1,6 @@
|
||||
import { TaggedRawEvent } from "../src/Nostr";
|
||||
import { TaggedRawEvent } from "../src/nostr";
|
||||
import { describe, expect } from "@jest/globals";
|
||||
import { FlatNoteStore, ReplaceableNoteStore } from "../src/NoteCollection";
|
||||
import { FlatNoteStore, ReplaceableNoteStore } from "../src/note-collection";
|
||||
|
||||
describe("NoteStore", () => {
|
||||
describe("flat", () => {
|
@ -1,7 +1,7 @@
|
||||
import { RelayCache } from "../src/GossipModel";
|
||||
import { RequestBuilder, RequestStrategy } from "../src/RequestBuilder";
|
||||
import { RelayCache } from "../src/gossip-model";
|
||||
import { RequestBuilder, RequestStrategy } from "../src/request-builder";
|
||||
import { describe, expect } from "@jest/globals";
|
||||
import { expandFilter } from "../src/RequestExpander";
|
||||
import { expandFilter } from "../src/request-expander";
|
||||
import { bytesToHex } from "@noble/curves/abstract/utils";
|
||||
import { unixNow, unixNowMs } from "@snort/shared";
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { expandFilter } from "../src/RequestExpander";
|
||||
import { expandFilter } from "../src/request-expander";
|
||||
|
||||
describe("RequestExpander", () => {
|
||||
test("expand filter", () => {
|
@ -1,4 +1,4 @@
|
||||
import { eventMatchesFilter } from "../src/RequestMatcher";
|
||||
import { eventMatchesFilter } from "../src/request-matcher";
|
||||
|
||||
describe("RequestMatcher", () => {
|
||||
it("should match simple filter", () => {
|
@ -1,6 +1,6 @@
|
||||
import { ReqFilter } from "../src";
|
||||
import { canMergeFilters, filterIncludes, flatMerge, mergeSimilar, simpleMerge } from "../src/RequestMerger";
|
||||
import { FlatReqFilter, expandFilter } from "../src/RequestExpander";
|
||||
import { canMergeFilters, filterIncludes, flatMerge, mergeSimilar, simpleMerge } from "../src/request-merger";
|
||||
import { FlatReqFilter, expandFilter } from "../src/request-expander";
|
||||
|
||||
describe("RequestMerger", () => {
|
||||
it("should simple merge authors", () => {
|
@ -1,15 +1,15 @@
|
||||
import { ReqFilter } from "../src";
|
||||
import { describe, expect } from "@jest/globals";
|
||||
import { diffFilters } from "../src/RequestSplitter";
|
||||
import { expandFilter } from "../src/RequestExpander";
|
||||
import { diffFilters } from "../src/request-splitter";
|
||||
import { expandFilter } from "../src/request-expander";
|
||||
|
||||
describe("RequestSplitter", () => {
|
||||
test("single filter add value", () => {
|
||||
const a: Array<ReqFilter> = [{ kinds: [0], authors: ["a"] }];
|
||||
const b: Array<ReqFilter> = [{ kinds: [0], authors: ["a", "b"] }];
|
||||
const diff = diffFilters(a.flatMap(expandFilter), b.flatMap(expandFilter), true);
|
||||
expect(diff).toEqual({
|
||||
added: [{ kinds: [0], authors: ["b"] }],
|
||||
expect(diff).toMatchObject({
|
||||
added: [{ kinds: 0, authors: "b" }],
|
||||
removed: [],
|
||||
changed: true,
|
||||
});
|
||||
@ -18,9 +18,9 @@ describe("RequestSplitter", () => {
|
||||
const a: Array<ReqFilter> = [{ kinds: [0], authors: ["a"] }];
|
||||
const b: Array<ReqFilter> = [{ kinds: [0], authors: ["b"] }];
|
||||
const diff = diffFilters(a.flatMap(expandFilter), b.flatMap(expandFilter), true);
|
||||
expect(diff).toEqual({
|
||||
added: [{ kinds: [0], authors: ["b"] }],
|
||||
removed: [{ kinds: [0], authors: ["a"] }],
|
||||
expect(diff).toMatchObject({
|
||||
added: [{ kinds: 0, authors: "b" }],
|
||||
removed: [{ kinds: 0, authors: "a" }],
|
||||
changed: true,
|
||||
});
|
||||
});
|
||||
@ -28,9 +28,12 @@ describe("RequestSplitter", () => {
|
||||
const a: Array<ReqFilter> = [{ kinds: [0], authors: ["a"], since: 100 }];
|
||||
const b: Array<ReqFilter> = [{ kinds: [0], authors: ["a", "b"], since: 101 }];
|
||||
const diff = diffFilters(a.flatMap(expandFilter), b.flatMap(expandFilter), true);
|
||||
expect(diff).toEqual({
|
||||
added: [{ kinds: [0], authors: ["a", "b"], since: 101 }],
|
||||
removed: [{ kinds: [0], authors: ["a"], since: 100 }],
|
||||
expect(diff).toMatchObject({
|
||||
added: [
|
||||
{ kinds: 0, authors: "a", since: 101 },
|
||||
{ kinds: 0, authors: "b", since: 101 },
|
||||
],
|
||||
removed: [{ kinds: 0, authors: "a", since: 100 }],
|
||||
changed: true,
|
||||
});
|
||||
});
|
||||
@ -44,10 +47,10 @@ describe("RequestSplitter", () => {
|
||||
{ kinds: [69], authors: ["a", "c"] },
|
||||
];
|
||||
const diff = diffFilters(a.flatMap(expandFilter), b.flatMap(expandFilter), true);
|
||||
expect(diff).toEqual({
|
||||
expect(diff).toMatchObject({
|
||||
added: [
|
||||
{ kinds: [0], authors: ["b"] },
|
||||
{ kinds: [69], authors: ["c"] },
|
||||
{ kinds: 0, authors: "b" },
|
||||
{ kinds: 69, authors: "c" },
|
||||
],
|
||||
removed: [],
|
||||
changed: true,
|
||||
@ -63,12 +66,15 @@ describe("RequestSplitter", () => {
|
||||
{ kinds: [69], authors: ["c"] },
|
||||
];
|
||||
const diff = diffFilters(a.flatMap(expandFilter), b.flatMap(expandFilter), true);
|
||||
expect(diff).toEqual({
|
||||
expect(diff).toMatchObject({
|
||||
added: [
|
||||
{ kinds: [0], authors: ["b"] },
|
||||
{ kinds: [69], authors: ["c"] },
|
||||
{ kinds: 0, authors: "b" },
|
||||
{ kinds: 69, authors: "c" },
|
||||
],
|
||||
removed: [
|
||||
{ kinds: 0, authors: "a" },
|
||||
{ kinds: 69, authors: "a" },
|
||||
],
|
||||
removed: [{ kinds: [0, 69], authors: ["a"] }],
|
||||
changed: true,
|
||||
});
|
||||
});
|
||||
@ -79,8 +85,8 @@ describe("RequestSplitter", () => {
|
||||
{ kinds: [69], authors: ["c"] },
|
||||
];
|
||||
const diff = diffFilters(a.flatMap(expandFilter), b.flatMap(expandFilter), true);
|
||||
expect(diff).toEqual({
|
||||
added: [{ kinds: [69], authors: ["c"] }],
|
||||
expect(diff).toMatchObject({
|
||||
added: [{ kinds: 69, authors: "c" }],
|
||||
removed: [],
|
||||
changed: true,
|
||||
});
|
@ -1,5 +1,5 @@
|
||||
import { NostrPrefix } from "../src/Links";
|
||||
import { parseNostrLink, tryParseNostrLink } from "../src/NostrLink";
|
||||
import { NostrPrefix } from "../src/links";
|
||||
import { parseNostrLink, tryParseNostrLink } from "../src/nostr-link";
|
||||
|
||||
describe("tryParseNostrLink", () => {
|
||||
it("is a valid nostr link", () => {
|
Loading…
x
Reference in New Issue
Block a user