client.service.ts raw
1 import { ExtendedKind } from '@/constants'
2 import { Pubkey } from '@/domain'
3 import {
4 compareEvents,
5 getReplaceableCoordinate,
6 getReplaceableCoordinateFromEvent,
7 isReplaceableEvent
8 } from '@/lib/event'
9 import { getProfileFromEvent, getRelayListFromEvent } from '@/lib/event-metadata'
10 import { getPubkeysFromPTags, getServersFromServerTags, tagNameEquals } from '@/lib/tag'
11 import { isLocalNetworkUrl, isWebsocketUrl, normalizeUrl } from '@/lib/url'
12 import { isSafari } from '@/lib/utils'
13 import { ISigner, TProfile, TPublishOptions, TRelayList, TSubRequestFilter } from '@/types'
14 import { sha256 } from '@noble/hashes/sha2'
15 import DataLoader from 'dataloader'
16 import dayjs from 'dayjs'
17 import FlexSearch from 'flexsearch'
18 import { LRUCache } from 'lru-cache'
19 import {
20 EventTemplate,
21 Filter,
22 kinds,
23 matchFilters,
24 Event as NEvent,
25 nip19,
26 SimplePool,
27 VerifiedEvent
28 } from 'nostr-tools'
29 import { AbstractRelay } from 'nostr-tools/abstract-relay'
30 import indexedDb from './indexed-db.service'
31 import storage from './local-storage.service'
32 import managedOutboxService from './managed-outbox.service'
33 import nrcCacheRelayService from './nrc/nrc-cache-relay.service'
34 import relayDiscoveryService from './relay-discovery.service'
35 import relayListCacheService from './relay-list-cache.service'
36 import relayStatsService from './relay-stats.service'
37
38 /**
39 * Bootstrap relays used when no user relays are available yet.
40 * Essential for initial login when we need to fetch the user's relay list.
41 */
42 const BOOTSTRAP_RELAYS = [
43 'wss://relay.orly.dev/',
44 'wss://relay.damus.io/',
45 'wss://relay.nostr.band/',
46 'wss://nos.lol/',
47 'wss://nostr.wine/'
48 ]
49
50 type TTimelineRef = [string, number]
51
52 class ClientService extends EventTarget {
53 static instance: ClientService
54
55 signer?: ISigner
56 pubkey?: string
57 currentRelays: string[] = []
58 /** After publishEvent resolves, this promise resolves to URLs of relays that failed */
59 lastPublishFailedRelays: Promise<string[]> = Promise.resolve([])
60 private pool: SimplePool
61
62 private timelines: Record<
63 string,
64 | {
65 refs: TTimelineRef[]
66 filter: TSubRequestFilter
67 urls: string[]
68 }
69 | string[]
70 | undefined
71 > = {}
72 private replaceableEventCacheMap = new Map<string, NEvent>()
73 private eventCacheMap = new Map<string, Promise<NEvent | undefined>>()
74 private eventDataLoader = new DataLoader<string, NEvent | undefined>(
75 (ids) => Promise.all(ids.map((id) => this._fetchEvent(id))),
76 { cacheMap: this.eventCacheMap }
77 )
78 private fetchEventFromBigRelaysDataloader = new DataLoader<string, NEvent | undefined>(
79 this.fetchEventsFromBigRelays.bind(this),
80 { cache: false, batchScheduleFn: (callback) => setTimeout(callback, 50) }
81 )
82
83 private userIndex = new FlexSearch.Index({
84 tokenize: 'forward'
85 })
86
87 constructor() {
88 super()
89 this.pool = new SimplePool()
90 this.pool.trackRelays = true
91 }
92
93 public static getInstance(): ClientService {
94 if (!ClientService.instance) {
95 ClientService.instance = new ClientService()
96 ClientService.instance.init()
97 }
98 return ClientService.instance
99 }
100
101 async init() {
102 await relayStatsService.init()
103 await indexedDb.iterateProfileEvents((profileEvent) => this.addUsernameToIndex(profileEvent))
104 }
105
106 /**
107 * Get fallback relays: discovered relays (if cached) or hardcoded bootstrap relays.
108 * This ensures we always have relays available even before discovery has run.
109 */
110 private getFallbackRelays(): string[] {
111 const count = storage.getFallbackRelayCount()
112 const discovered = relayDiscoveryService.getTopRelays(count)
113 return discovered.length > 0 ? discovered : BOOTSTRAP_RELAYS
114 }
115
116 async determineTargetRelays(
117 event: NEvent,
118 { specifiedRelayUrls, additionalRelayUrls }: TPublishOptions = {}
119 ) {
120 if (event.kind === kinds.Report) {
121 const targetEventId = event.tags.find(tagNameEquals('e'))?.[1]
122 if (targetEventId) {
123 return this.getSeenEventRelayUrls(targetEventId)
124 }
125 }
126
127 // NRC-only config sync: don't publish config events to relays, only sync via NRC
128 const CONFIG_KINDS = [
129 kinds.Contacts, // 3
130 kinds.Mutelist, // 10000
131 kinds.RelayList, // 10002
132 30002, // Relay sets
133 ExtendedKind.FAVORITE_RELAYS, // 10012
134 30078 // Application data (settings sync)
135 ]
136 if (storage.getNrcOnlyConfigSync() && CONFIG_KINDS.includes(event.kind)) {
137 return [] // No relays - NRC will sync this event to paired devices
138 }
139
140 const relaySet = new Set<string>()
141 if (specifiedRelayUrls?.length) {
142 specifiedRelayUrls.forEach((url) => relaySet.add(url))
143 } else {
144 additionalRelayUrls?.forEach((url) => relaySet.add(url))
145
146 // Get user's relay list
147 const userRelayList = await this.fetchRelayList(event.pubkey)
148
149 // Add user's write relays
150 userRelayList.write.forEach((url) => relaySet.add(url))
151
152 // For events that mention others, add recipients' write relays
153 if (![kinds.Contacts, kinds.Mutelist, ExtendedKind.PINNED_USERS].includes(event.kind)) {
154 const mentions: string[] = []
155 event.tags.forEach(([tagName, tagValue]) => {
156 if (
157 ['p', 'P'].includes(tagName) &&
158 !!tagValue &&
159 Pubkey.isValidHex(tagValue) &&
160 !mentions.includes(tagValue)
161 ) {
162 mentions.push(tagValue)
163 }
164 })
165 if (mentions.length > 0) {
166 // Use cached relay lists for recipients
167 const recipientRelays = await relayListCacheService.getWriteRelaysForRecipients(mentions)
168 recipientRelays.slice(0, 10).forEach((url) => relaySet.add(url))
169 }
170 }
171
172 // For comments on external content, use relay hints from tags
173 if (event.kind === ExtendedKind.COMMENT) {
174 const rootITag = event.tags.find(tagNameEquals('I'))
175 if (rootITag) {
176 // Extract relay hints from e-tags or a-tags in the event
177 event.tags.forEach((tag) => {
178 if ((tag[0] === 'e' || tag[0] === 'a' || tag[0] === 'E' || tag[0] === 'A') && tag[2]?.startsWith('wss://')) {
179 relaySet.add(tag[2])
180 }
181 })
182 }
183 }
184 }
185
186 // If no relays found, fall back to user's current relays (not hardcoded big relays)
187 if (!relaySet.size && this.currentRelays.length > 0) {
188 this.currentRelays.forEach((url) => relaySet.add(url))
189 }
190
191 // Gate through managed outbox service — user's own write relays bypass gating
192 const ownRelays = new Set(this.currentRelays)
193 return managedOutboxService.filterRelayUrls(Array.from(relaySet), 'outbox', ownRelays)
194 }
195
196 async determineRelaysByFilter(filter: Filter) {
197 const ownRelays = new Set(this.currentRelays)
198 if (filter.search) {
199 return storage.getSearchRelays()
200 } else if (filter.authors?.length) {
201 const relayLists = await this.fetchRelayLists(filter.authors)
202 const relays = Array.from(new Set(relayLists.flatMap((list) => list.write.slice(0, 5))))
203 const filtered = managedOutboxService.filterRelayUrls(relays, 'inbox', ownRelays)
204 return filtered.length > 0 ? filtered : this.currentRelays
205 } else if (filter['#p']?.length) {
206 const relayLists = await this.fetchRelayLists(filter['#p'])
207 const relays = Array.from(new Set(relayLists.flatMap((list) => list.read.slice(0, 5))))
208 const filtered = managedOutboxService.filterRelayUrls(relays, 'inbox', ownRelays)
209 return filtered.length > 0 ? filtered : this.currentRelays
210 }
211 // Use current relays, falling back to discovered/bootstrap relays
212 return this.currentRelays.length > 0 ? this.currentRelays : this.getFallbackRelays()
213 }
214
215 async publishEvent(
216 relayUrls: string[],
217 event: NEvent
218 ) {
219 const uniqueRelayUrls = Array.from(new Set(relayUrls))
220 const TIMEOUT_MS = 3_000
221
222 // Track per-relay outcomes: 'success' | Error
223 const results = new Map<string, 'pending' | 'success' | Error>(
224 uniqueRelayUrls.map((url) => [url, 'pending'])
225 )
226
227 // Resolves as soon as ONE relay ACKs, or rejects on timeout / all-failed
228 let allSettledPromise: Promise<unknown>
229
230 await new Promise<void>((resolve, reject) => {
231 let resolved = false
232
233 const timer = setTimeout(() => {
234 if (!resolved) {
235 resolved = true
236 reject(new Error('Publish timed out: no relay acknowledged within 3 seconds'))
237 }
238 }, TIMEOUT_MS)
239
240 const onSuccess = (url: string) => {
241 results.set(url, 'success')
242 relayStatsService.recordPublishSuccess(url)
243 if (!resolved) {
244 resolved = true
245 clearTimeout(timer)
246 this.emitNewEvent(event)
247 resolve()
248 }
249 }
250
251 const onError = (url: string, error: Error) => {
252 results.set(url, error)
253 relayStatsService.recordPublishFailure(url)
254 // If all relays have finished and none succeeded, reject
255 const pending = [...results.values()].filter((v) => v === 'pending')
256 if (pending.length === 0 && !resolved) {
257 resolved = true
258 clearTimeout(timer)
259 const failed = [...results.entries()].filter(([, v]) => v !== 'success')
260 reject(
261 new AggregateError(
262 failed.map(
263 ([u, err]) =>
264 new Error(`${u}: ${err instanceof Error ? err.message : String(err)}`)
265 )
266 )
267 )
268 }
269 }
270
271 allSettledPromise = Promise.allSettled(
272 uniqueRelayUrls.map(async (url) => {
273 // eslint-disable-next-line @typescript-eslint/no-this-alias
274 const that = this
275 const relay = await this.pool
276 .ensureRelay(url, { connectionTimeout: TIMEOUT_MS })
277 .catch((err) => {
278 console.debug(`[publishEvent] Failed to connect to ${url}:`, err?.message || err)
279 return undefined
280 })
281 if (!relay) {
282 onError(url, new Error('Cannot connect to relay'))
283 return
284 }
285
286 relay.publishTimeout = TIMEOUT_MS
287 let hasAuthed = false
288
289 const attemptPublish = async (): Promise<void> => {
290 try {
291 await relay.publish(event)
292 that.trackEventSeenOn(event.id, relay)
293 console.debug(`[publishEvent] Success on ${url}`)
294 onSuccess(url)
295 } catch (error) {
296 if (
297 !hasAuthed &&
298 error instanceof Error &&
299 error.message.startsWith('auth-required') &&
300 !!that.signer
301 ) {
302 console.debug(`[publishEvent] Auth required on ${url}, authenticating...`)
303 try {
304 await relay.auth((authEvt: EventTemplate) => that.signer!.signEvent(authEvt))
305 hasAuthed = true
306 return await attemptPublish()
307 } catch (authError) {
308 console.debug(`[publishEvent] Auth failed on ${url}:`, authError)
309 onError(url, authError instanceof Error ? authError : new Error(String(authError)))
310 }
311 } else {
312 console.debug(
313 `[publishEvent] Failed on ${url}:`,
314 error instanceof Error ? error.message : error
315 )
316 onError(url, error instanceof Error ? error : new Error(String(error)))
317 }
318 }
319 }
320
321 return attemptPublish()
322 })
323 )
324 })
325
326 // Store background promise: resolves to failed relay URLs once all relays finish
327 this.lastPublishFailedRelays = allSettledPromise!.then(() => {
328 return [...results.entries()]
329 .filter(([, v]) => v !== 'success')
330 .map(([url]) => url)
331 })
332 }
333
334 emitNewEvent(event: NEvent) {
335 this.dispatchEvent(new CustomEvent('newEvent', { detail: event }))
336 }
337
338 async signHttpAuth(url: string, method: string, description = '') {
339 if (!this.signer) {
340 throw new Error('Please login first to sign the event')
341 }
342 const event = await this.signer?.signEvent({
343 content: description,
344 kind: kinds.HTTPAuth,
345 created_at: dayjs().unix(),
346 tags: [
347 ['u', url],
348 ['method', method]
349 ]
350 })
351 return 'Nostr ' + btoa(JSON.stringify(event))
352 }
353
354 /** =========== Timeline =========== */
355
356 private generateTimelineKey(urls: string[], filter: Filter) {
357 const stableFilter: any = {}
358 Object.entries(filter)
359 .sort()
360 .forEach(([key, value]) => {
361 if (key === 'limit') return
362 if (Array.isArray(value)) {
363 stableFilter[key] = [...value].sort()
364 }
365 stableFilter[key] = value
366 })
367 const paramsStr = JSON.stringify({
368 urls: [...urls].sort(),
369 filter: stableFilter
370 })
371 const encoder = new TextEncoder()
372 const data = encoder.encode(paramsStr)
373 const hashBuffer = sha256(data)
374 const hashArray = Array.from(new Uint8Array(hashBuffer))
375 return hashArray.map((b) => b.toString(16).padStart(2, '0')).join('')
376 }
377
378 private generateMultipleTimelinesKey(subRequests: { urls: string[]; filter: Filter }[]) {
379 const keys = subRequests.map(({ urls, filter }) => this.generateTimelineKey(urls, filter))
380 const encoder = new TextEncoder()
381 const data = encoder.encode(JSON.stringify(keys.sort()))
382 const hashBuffer = sha256(data)
383 const hashArray = Array.from(new Uint8Array(hashBuffer))
384 return hashArray.map((b) => b.toString(16).padStart(2, '0')).join('')
385 }
386
387 async subscribeTimeline(
388 subRequests: { urls: string[]; filter: TSubRequestFilter }[],
389 {
390 onEvents,
391 onNew,
392 onClose
393 }: {
394 onEvents: (events: NEvent[], eosed: boolean) => void
395 onNew: (evt: NEvent) => void
396 onClose?: (url: string, reason: string) => void
397 },
398 {
399 startLogin,
400 needSort = true
401 }: {
402 startLogin?: () => void
403 needSort?: boolean
404 } = {}
405 ) {
406 const newEventIdSet = new Set<string>()
407 const requestCount = subRequests.length
408 const threshold = Math.floor(requestCount / 2)
409 let events: NEvent[] = []
410 let eosedCount = 0
411
412 const subs = await Promise.all(
413 subRequests.map(({ urls, filter }) => {
414 return this._subscribeTimeline(
415 urls,
416 filter,
417 {
418 onEvents: (_events, _eosed) => {
419 if (_eosed) {
420 eosedCount++
421 }
422
423 events = this.mergeTimelines(events, _events)
424
425 if (eosedCount >= threshold) {
426 onEvents(events, eosedCount >= requestCount)
427 }
428 },
429 onNew: (evt) => {
430 if (newEventIdSet.has(evt.id)) return
431 newEventIdSet.add(evt.id)
432 onNew(evt)
433 },
434 onClose
435 },
436 { startLogin, needSort }
437 )
438 })
439 )
440
441 const key = this.generateMultipleTimelinesKey(subRequests)
442 this.timelines[key] = subs.map((sub) => sub.timelineKey)
443
444 return {
445 closer: () => {
446 onEvents = () => {}
447 onNew = () => {}
448 subs.forEach((sub) => {
449 sub.closer()
450 })
451 },
452 timelineKey: key
453 }
454 }
455
456 private mergeTimelines(a: NEvent[], b: NEvent[]): NEvent[] {
457 if (a.length === 0) return [...b]
458 if (b.length === 0) return [...a]
459
460 const result: NEvent[] = []
461 let i = 0
462 let j = 0
463 while (i < a.length && j < b.length) {
464 const cmp = compareEvents(a[i], b[j])
465 if (cmp > 0) {
466 result.push(a[i])
467 i++
468 } else if (cmp < 0) {
469 result.push(b[j])
470 j++
471 } else {
472 result.push(a[i])
473 i++
474 j++
475 }
476 }
477
478 return result
479 }
480
481 async loadMoreTimeline(key: string, until: number, limit: number) {
482 const timeline = this.timelines[key]
483 if (!timeline) return []
484
485 if (!Array.isArray(timeline)) {
486 return this._loadMoreTimeline(key, until, limit)
487 }
488 const timelines = await Promise.all(
489 timeline.map((key) => this._loadMoreTimeline(key, until, limit))
490 )
491
492 const eventIdSet = new Set<string>()
493 const events: NEvent[] = []
494 timelines.forEach((timeline) => {
495 timeline.forEach((evt) => {
496 if (eventIdSet.has(evt.id)) return
497 eventIdSet.add(evt.id)
498 events.push(evt)
499 })
500 })
501 return events.sort((a, b) => b.created_at - a.created_at).slice(0, limit)
502 }
503
504 subscribe(
505 urls: string[],
506 filter: Filter | Filter[],
507 {
508 onevent,
509 oneose,
510 onclose,
511 startLogin,
512 onAllClose
513 }: {
514 onevent?: (evt: NEvent) => void
515 oneose?: (eosed: boolean) => void
516 onclose?: (url: string, reason: string) => void
517 startLogin?: () => void
518 onAllClose?: (reasons: string[]) => void
519 }
520 ) {
521 const relays = Array.from(new Set(urls))
522 const filters = Array.isArray(filter) ? filter : [filter]
523
524 // eslint-disable-next-line @typescript-eslint/no-this-alias
525 const that = this
526 const _knownIds = new Set<string>()
527 let startedCount = relays.length
528 let eosedCount = 0
529 let eosed = false
530 let closedCount = 0
531 const closeReasons: string[] = []
532 const subPromises: Promise<{ close: () => void }>[] = []
533 relays.forEach((url) => {
534 let hasAuthed = false
535
536 subPromises.push(startSub())
537
538 async function startSub() {
539 const relay = await that.pool.ensureRelay(url, { connectionTimeout: 5_000 }).catch(() => {
540 return undefined
541 })
542 // cannot connect to relay
543 if (!relay) {
544 relayStatsService.recordFetchFailure(url)
545 if (!eosed) {
546 eosedCount++
547 eosed = eosedCount >= startedCount
548 oneose?.(eosed)
549 }
550 return {
551 close: () => {}
552 }
553 }
554
555 return relay.subscribe(filters, {
556 receivedEvent: (relay, id) => {
557 that.trackEventSeenOn(id, relay)
558 },
559 alreadyHaveEvent: (id: string) => {
560 const have = _knownIds.has(id)
561 if (have) {
562 return true
563 }
564 _knownIds.add(id)
565 return false
566 },
567 onevent: (evt: NEvent) => {
568 onevent?.(evt)
569 },
570 oneose: () => {
571 relayStatsService.recordFetchSuccess(url)
572 // make sure eosed is not called multiple times
573 if (eosed) return
574
575 eosedCount++
576 eosed = eosedCount >= startedCount
577 oneose?.(eosed)
578 },
579 onclose: (reason: string) => {
580 // auth-required
581 if (reason.startsWith('auth-required') && !hasAuthed) {
582 // already logged in
583 if (that.signer) {
584 relay
585 .auth(async (authEvt: EventTemplate) => {
586 const evt = await that.signer!.signEvent(authEvt)
587 if (!evt) {
588 throw new Error('sign event failed')
589 }
590 return evt as VerifiedEvent
591 })
592 .then(() => {
593 hasAuthed = true
594 if (!eosed) {
595 startedCount++
596 subPromises.push(startSub())
597 }
598 })
599 .catch(() => {
600 // ignore
601 })
602 return
603 }
604
605 // open login dialog
606 if (startLogin) {
607 startLogin()
608 return
609 }
610 }
611
612 // close the subscription — record as fetch failure
613 relayStatsService.recordFetchFailure(url)
614 closedCount++
615 closeReasons.push(reason)
616 onclose?.(url, reason)
617 if (closedCount >= startedCount) {
618 onAllClose?.(closeReasons)
619 }
620 return
621 },
622 eoseTimeout: 10_000 // 10s
623 })
624 }
625 })
626
627 const handleNewEventFromInternal = (data: Event) => {
628 const customEvent = data as CustomEvent<NEvent>
629 const evt = customEvent.detail
630 if (!matchFilters(filters, evt)) return
631
632 const id = evt.id
633 const have = _knownIds.has(id)
634 if (have) return
635
636 _knownIds.add(id)
637 onevent?.(evt)
638 }
639
640 this.addEventListener('newEvent', handleNewEventFromInternal)
641
642 return {
643 close: () => {
644 this.removeEventListener('newEvent', handleNewEventFromInternal)
645 subPromises.forEach((subPromise) => {
646 subPromise
647 .then((sub) => {
648 sub.close()
649 })
650 .catch((err) => {
651 console.error(err)
652 })
653 })
654 }
655 }
656 }
657
658 private async _subscribeTimeline(
659 urls: string[],
660 filter: TSubRequestFilter, // filter with limit,
661 {
662 onEvents,
663 onNew,
664 onClose
665 }: {
666 onEvents: (events: NEvent[], eosed: boolean) => void
667 onNew: (evt: NEvent) => void
668 onClose?: (url: string, reason: string) => void
669 },
670 {
671 startLogin,
672 needSort = true
673 }: {
674 startLogin?: () => void
675 needSort?: boolean
676 } = {}
677 ) {
678 const relays = Array.from(new Set(urls))
679 const key = this.generateTimelineKey(relays, filter)
680 const timeline = this.timelines[key]
681 let cachedEvents: NEvent[] = []
682 let since: number | undefined
683 if (timeline && !Array.isArray(timeline) && timeline.refs.length && needSort) {
684 cachedEvents = (await this.eventDataLoader.loadMany(timeline.refs.map(([id]) => id))).filter(
685 (evt) => !!evt && !(evt instanceof Error)
686 ) as NEvent[]
687 if (cachedEvents.length) {
688 onEvents([...cachedEvents], false)
689 since = cachedEvents[0].created_at + 1
690 }
691 }
692
693 // eslint-disable-next-line @typescript-eslint/no-this-alias
694 const that = this
695 let events: NEvent[] = []
696 let eosedAt: number | null = null
697 let lastFlushedCount = 0
698 let flushTimer: ReturnType<typeof setTimeout> | null = null
699
700 // Progressive flush: emit accumulated events to onEvents before EOSE
701 // so the UI can start rendering content as it arrives from the relay
702 const scheduleFlush = () => {
703 if (flushTimer !== null) return
704 flushTimer = setTimeout(() => {
705 flushTimer = null
706 if (eosedAt || events.length === lastFlushedCount) return
707 lastFlushedCount = events.length
708 const sorted = needSort
709 ? [...events].sort((a, b) => b.created_at - a.created_at).slice(0, filter.limit)
710 : [...events]
711 const merged = cachedEvents.length > 0
712 ? sorted.concat(cachedEvents).slice(0, filter.limit)
713 : sorted
714 onEvents(merged, false)
715 }, 150)
716 }
717
718 const subCloser = this.subscribe(relays, since ? { ...filter, since } : filter, {
719 startLogin,
720 onevent: (evt: NEvent) => {
721 that.addEventToCache(evt)
722 // not eosed yet, push to events and schedule progressive render
723 if (!eosedAt) {
724 events.push(evt)
725 scheduleFlush()
726 return
727 }
728 // new event
729 if (evt.created_at > eosedAt) {
730 onNew(evt)
731 }
732
733 const timeline = that.timelines[key]
734 if (!timeline || Array.isArray(timeline) || !timeline.refs.length) {
735 return
736 }
737
738 // find the right position to insert
739 let idx = 0
740 for (const ref of timeline.refs) {
741 if (evt.created_at > ref[1] || (evt.created_at === ref[1] && evt.id < ref[0])) {
742 break
743 }
744 // the event is already in the cache
745 if (evt.created_at === ref[1] && evt.id === ref[0]) {
746 return
747 }
748 idx++
749 }
750 // the event is too old, ignore it
751 if (idx >= timeline.refs.length) return
752
753 // insert the event to the right position
754 timeline.refs.splice(idx, 0, [evt.id, evt.created_at])
755 },
756 oneose: (eosed) => {
757 // Cancel any pending progressive flush — the final sorted result replaces it
758 if (flushTimer !== null) {
759 clearTimeout(flushTimer)
760 flushTimer = null
761 }
762 if (eosed && !eosedAt) {
763 eosedAt = dayjs().unix()
764 }
765 // (algo feeds) no need to sort and cache
766 if (!needSort) {
767 return onEvents([...events], !!eosedAt)
768 }
769 if (!eosed) {
770 events = events.sort((a, b) => b.created_at - a.created_at).slice(0, filter.limit)
771 return onEvents([...events.concat(cachedEvents).slice(0, filter.limit)], false)
772 }
773
774 events = events.sort((a, b) => b.created_at - a.created_at).slice(0, filter.limit)
775 const timeline = that.timelines[key]
776 // no cache yet
777 if (!timeline || Array.isArray(timeline) || !timeline.refs.length) {
778 that.timelines[key] = {
779 refs: events.map((evt) => [evt.id, evt.created_at]),
780 filter,
781 urls
782 }
783 return onEvents([...events], true)
784 }
785
786 // Prevent concurrent requests from duplicating the same event
787 const firstRefCreatedAt = timeline.refs[0][1]
788 const newRefs = events
789 .filter((evt) => evt.created_at > firstRefCreatedAt)
790 .map((evt) => [evt.id, evt.created_at] as TTimelineRef)
791
792 if (events.length >= filter.limit) {
793 // if new refs are more than limit, means old refs are too old, replace them
794 timeline.refs = newRefs
795 onEvents([...events], true)
796 } else {
797 // merge new refs with old refs
798 timeline.refs = newRefs.concat(timeline.refs)
799 onEvents([...events.concat(cachedEvents).slice(0, filter.limit)], true)
800 }
801 },
802 onclose: onClose
803 })
804
805 return {
806 timelineKey: key,
807 closer: () => {
808 if (flushTimer !== null) {
809 clearTimeout(flushTimer)
810 flushTimer = null
811 }
812 onEvents = () => {}
813 onNew = () => {}
814 subCloser.close()
815 }
816 }
817 }
818
819 private async _loadMoreTimeline(key: string, until: number, limit: number) {
820 const timeline = this.timelines[key]
821 if (!timeline || Array.isArray(timeline)) return []
822
823 const { filter, urls, refs } = timeline
824 const startIdx = refs.findIndex(([, createdAt]) => createdAt <= until)
825 const cachedEvents =
826 startIdx >= 0
827 ? ((
828 await this.eventDataLoader.loadMany(
829 refs.slice(startIdx, startIdx + limit).map(([id]) => id)
830 )
831 ).filter((evt) => !!evt && !(evt instanceof Error)) as NEvent[])
832 : []
833 if (cachedEvents.length >= limit) {
834 return cachedEvents
835 }
836
837 until = cachedEvents.length ? cachedEvents[cachedEvents.length - 1].created_at - 1 : until
838 limit = limit - cachedEvents.length
839 let events = await this.query(urls, { ...filter, until, limit })
840 events.forEach((evt) => {
841 this.addEventToCache(evt)
842 })
843 events = events.sort((a, b) => b.created_at - a.created_at).slice(0, limit)
844
845 // Prevent concurrent requests from duplicating the same event
846 const lastRefCreatedAt = refs.length > 0 ? refs[refs.length - 1][1] : dayjs().unix()
847 timeline.refs.push(
848 ...events
849 .filter((evt) => evt.created_at < lastRefCreatedAt)
850 .map((evt) => [evt.id, evt.created_at] as TTimelineRef)
851 )
852 return [...cachedEvents, ...events]
853 }
854
855 /** =========== Event =========== */
856
857 getSeenEventRelays(eventId: string) {
858 return Array.from(this.pool.seenOn.get(eventId)?.values() || [])
859 }
860
861 getSeenEventRelayUrls(eventId: string) {
862 return this.getSeenEventRelays(eventId).map((relay) => relay.url)
863 }
864
865 getEventHints(eventId: string) {
866 return this.getSeenEventRelayUrls(eventId).filter((url) => !isLocalNetworkUrl(url))
867 }
868
869 getEventHint(eventId: string) {
870 return this.getSeenEventRelayUrls(eventId).find((url) => !isLocalNetworkUrl(url)) ?? ''
871 }
872
873 trackEventSeenOn(eventId: string, relay: AbstractRelay) {
874 let set = this.pool.seenOn.get(eventId)
875 if (!set) {
876 set = new Set()
877 this.pool.seenOn.set(eventId, set)
878 }
879 set.add(relay)
880 }
881
882 private async query(urls: string[], filter: Filter | Filter[], onevent?: (evt: NEvent) => void) {
883 return await new Promise<NEvent[]>((resolve) => {
884 const events: NEvent[] = []
885 const sub = this.subscribe(urls, filter, {
886 onevent(evt) {
887 onevent?.(evt)
888 events.push(evt)
889 },
890 oneose: (eosed) => {
891 if (eosed) {
892 sub.close()
893 resolve(events)
894 }
895 },
896 onAllClose: () => {
897 resolve(events)
898 }
899 })
900 })
901 }
902
903 /**
904 * Query with NRC cache relay integration
905 *
906 * Query flow:
907 * 1. Check IndexedDB cache first
908 * 2. Query NRC cache relays with 400ms timeout
909 * 3. Query regular relays for remaining events
910 * 4. Push loaded events to cache relays in background
911 */
912 async queryWithCacheRelays(
913 urls: string[],
914 filter: Filter | Filter[],
915 {
916 onevent,
917 skipCache = false
918 }: {
919 onevent?: (evt: NEvent) => void
920 skipCache?: boolean
921 } = {}
922 ): Promise<NEvent[]> {
923 const filters = Array.isArray(filter) ? filter : [filter]
924 const seenIds = new Set<string>()
925 const allEvents: NEvent[] = []
926
927 const addEvent = (evt: NEvent) => {
928 if (!seenIds.has(evt.id)) {
929 seenIds.add(evt.id)
930 allEvents.push(evt)
931 onevent?.(evt)
932 }
933 }
934
935 // Step 1: Check IndexedDB cache
936 if (!skipCache) {
937 try {
938 const cachedEvents = await indexedDb.queryCachedEvents(filters)
939 for (const evt of cachedEvents) {
940 addEvent(evt as NEvent)
941 }
942 if (cachedEvents.length > 0) {
943 console.log(`[ClientService] Found ${cachedEvents.length} events in IndexedDB cache`)
944 }
945 } catch (err) {
946 console.warn('[ClientService] IndexedDB cache query failed:', err)
947 }
948 }
949
950 // Step 2: Query NRC cache relays (400ms timeout)
951 const cacheRelayResult = await nrcCacheRelayService.queryWithTimeout(filters, 400)
952 if (cacheRelayResult.fromCache && cacheRelayResult.events.length > 0) {
953 for (const evt of cacheRelayResult.events) {
954 addEvent(evt as NEvent)
955 }
956 console.log(
957 `[ClientService] Got ${cacheRelayResult.events.length} events from NRC cache relay`
958 )
959 }
960
961 // Step 3: Query regular relays for remaining events
962 const regularEvents = await this.query(urls, filter)
963 const newEvents: NEvent[] = []
964 for (const evt of regularEvents) {
965 if (!seenIds.has(evt.id)) {
966 addEvent(evt)
967 newEvents.push(evt)
968 }
969 }
970
971 // Step 4: Cache new events and push to NRC cache relays in background
972 if (newEvents.length > 0) {
973 // Cache in IndexedDB
974 indexedDb.putCachedEvents(newEvents).catch((err) => {
975 console.warn('[ClientService] Failed to cache events:', err)
976 })
977
978 // Push to NRC cache relays
979 nrcCacheRelayService.queueEventsForPush(newEvents)
980 }
981
982 return allEvents
983 }
984
985 async fetchEvents(
986 urls: string[],
987 filter: Filter | Filter[],
988 {
989 onevent,
990 cache = false,
991 useCacheRelay = false
992 }: {
993 onevent?: (evt: NEvent) => void
994 cache?: boolean
995 useCacheRelay?: boolean
996 } = {}
997 ) {
998 const relays = Array.from(new Set(urls))
999 // Use provided relays, or fall back to current user's relays, or bootstrap relays
1000 let targetRelays = relays.length > 0 ? relays : this.currentRelays
1001
1002 // Fall back to discovered/bootstrap relays if no relays available (essential for initial login)
1003 if (targetRelays.length === 0) {
1004 targetRelays = this.getFallbackRelays()
1005 }
1006
1007 // Use cache relay integration if enabled
1008 if (useCacheRelay) {
1009 const events = await this.queryWithCacheRelays(targetRelays, filter, { onevent })
1010 if (cache) {
1011 events.forEach((evt) => {
1012 this.addEventToCache(evt)
1013 })
1014 }
1015 return events
1016 }
1017
1018 // Standard query path
1019 const events = await this.query(targetRelays, filter, onevent)
1020 if (cache) {
1021 events.forEach((evt) => {
1022 this.addEventToCache(evt)
1023 })
1024 }
1025 return events
1026 }
1027
1028 async fetchEvent(id: string): Promise<NEvent | undefined> {
1029 if (!/^[0-9a-f]{64}$/.test(id)) {
1030 let eventId: string | undefined
1031 let coordinate: string | undefined
1032 const { type, data } = nip19.decode(id)
1033 switch (type) {
1034 case 'note':
1035 eventId = data
1036 break
1037 case 'nevent':
1038 eventId = data.id
1039 break
1040 case 'naddr':
1041 coordinate = getReplaceableCoordinate(data.kind, data.pubkey, data.identifier)
1042 break
1043 }
1044 if (coordinate) {
1045 const cache = this.replaceableEventCacheMap.get(coordinate)
1046 if (cache) {
1047 return cache
1048 }
1049 const indexedDbCache = await indexedDb.getReplaceableEventByCoordinate(coordinate)
1050 if (indexedDbCache) {
1051 this.replaceableEventCacheMap.set(coordinate, indexedDbCache)
1052 return indexedDbCache
1053 }
1054 } else if (eventId) {
1055 const cache = this.eventCacheMap.get(eventId)
1056 if (cache) {
1057 return cache
1058 }
1059 }
1060 }
1061 return this.eventDataLoader.load(id)
1062 }
1063
1064 addEventToCache(event: NEvent) {
1065 this.eventDataLoader.prime(event.id, Promise.resolve(event))
1066 if (isReplaceableEvent(event.kind)) {
1067 const coordinate = getReplaceableCoordinateFromEvent(event)
1068 const cachedEvent = this.replaceableEventCacheMap.get(coordinate)
1069 if (!cachedEvent || compareEvents(event, cachedEvent) > 0) {
1070 this.replaceableEventCacheMap.set(coordinate, event)
1071 }
1072 }
1073 }
1074
1075 getReplaeableEventFromCache(coordinate: string): NEvent | undefined {
1076 return this.replaceableEventCacheMap.get(coordinate)
1077 }
1078
1079 private async fetchEventById(relayUrls: string[], id: string): Promise<NEvent | undefined> {
1080 const event = await this.fetchEventFromBigRelaysDataloader.load(id)
1081 if (event) {
1082 return event
1083 }
1084
1085 return this.fetchEventFromRelays(relayUrls, { ids: [id], limit: 1 })
1086 }
1087
1088 private async _fetchEvent(id: string): Promise<NEvent | undefined> {
1089 let filter: Filter | undefined
1090 let relays: string[] = []
1091 let author: string | undefined
1092 if (/^[0-9a-f]{64}$/.test(id)) {
1093 filter = { ids: [id] }
1094 } else {
1095 const { type, data } = nip19.decode(id)
1096 switch (type) {
1097 case 'note':
1098 filter = { ids: [data] }
1099 break
1100 case 'nevent':
1101 filter = { ids: [data.id] }
1102 if (data.relays) relays = data.relays
1103 if (data.author) author = data.author
1104 break
1105 case 'naddr':
1106 filter = {
1107 authors: [data.pubkey],
1108 kinds: [data.kind],
1109 limit: 1
1110 }
1111 author = data.pubkey
1112 if (data.identifier) {
1113 filter['#d'] = [data.identifier]
1114 }
1115 if (data.relays) relays = data.relays
1116 }
1117 }
1118 if (!filter) {
1119 throw new Error('Invalid id')
1120 }
1121
1122 let event: NEvent | undefined
1123 if (filter.ids?.length) {
1124 event = await this.fetchEventById(relays, filter.ids[0])
1125 }
1126
1127 if (!event && author) {
1128 const relayList = await this.fetchRelayList(author)
1129 event = await this.fetchEventFromRelays(relayList.write.slice(0, 5), filter)
1130 }
1131
1132 // Phase 4: Progressive querying through discovered relays
1133 if (!event) {
1134 const alreadyTried = new Set<string>([
1135 ...this.currentRelays,
1136 ...this.getFallbackRelays(),
1137 ...relays
1138 ])
1139
1140 const batches = relayDiscoveryService.getRelayBatches(10, 0, 50, alreadyTried)
1141
1142 for (const batch of batches) {
1143 if (batch.length === 0) continue
1144
1145 const batchEvents = await Promise.race([
1146 this.query(batch, filter),
1147 new Promise<NEvent[]>((resolve) => setTimeout(() => resolve([]), 5000))
1148 ])
1149
1150 if (batchEvents.length > 0) {
1151 event = batchEvents.sort((a, b) => b.created_at - a.created_at)[0]
1152 if (event) {
1153 this.addEventToCache(event)
1154 break
1155 }
1156 }
1157 }
1158 }
1159
1160 if (event && event.id !== id) {
1161 this.addEventToCache(event)
1162 }
1163
1164 // Don't cache failed lookups - allow retry on next request
1165 if (!event) {
1166 this.eventCacheMap.delete(id)
1167 }
1168
1169 return event
1170 }
1171
1172 private async fetchEventFromRelays(relayUrls: string[], filter: Filter) {
1173 if (!relayUrls.length) return
1174
1175 const events = await this.query(relayUrls, filter)
1176 return events.sort((a, b) => b.created_at - a.created_at)[0]
1177 }
1178
1179 private async fetchEventsFromBigRelays(ids: readonly string[]) {
1180 // Use current relays, falling back to discovered/bootstrap relays
1181 const relays = this.currentRelays.length > 0 ? this.currentRelays : this.getFallbackRelays()
1182
1183 const events = await this.query(relays, {
1184 ids: Array.from(new Set(ids)),
1185 limit: ids.length
1186 })
1187 const eventsMap = new Map<string, NEvent>()
1188 for (const event of events) {
1189 eventsMap.set(event.id, event)
1190 }
1191
1192 return ids.map((id) => eventsMap.get(id))
1193 }
1194
1195 /** =========== Following favorite relays =========== */
1196
1197 private followingFavoriteRelaysCache = new LRUCache<string, Promise<[string, string[]][]>>({
1198 max: 10,
1199 fetchMethod: this._fetchFollowingFavoriteRelays.bind(this)
1200 })
1201
1202 async fetchFollowingFavoriteRelays(pubkey: string) {
1203 return this.followingFavoriteRelaysCache.fetch(pubkey)
1204 }
1205
1206 private async _fetchFollowingFavoriteRelays(pubkey: string) {
1207 const fetchNewData = async () => {
1208 const followings = await this.fetchFollowings(pubkey)
1209 // Fetch from authors' relays instead of hardcoded big relays
1210 const events = await this.fetchEvents([], {
1211 authors: followings,
1212 kinds: [ExtendedKind.FAVORITE_RELAYS, kinds.Relaysets],
1213 limit: 1000
1214 })
1215 const alreadyExistsFavoriteRelaysPubkeySet = new Set<string>()
1216 const alreadyExistsRelaySetsPubkeySet = new Set<string>()
1217 const uniqueEvents: NEvent[] = []
1218 events
1219 .sort((a, b) => b.created_at - a.created_at)
1220 .forEach((event) => {
1221 if (event.kind === ExtendedKind.FAVORITE_RELAYS) {
1222 if (alreadyExistsFavoriteRelaysPubkeySet.has(event.pubkey)) return
1223 alreadyExistsFavoriteRelaysPubkeySet.add(event.pubkey)
1224 } else if (event.kind === kinds.Relaysets) {
1225 if (alreadyExistsRelaySetsPubkeySet.has(event.pubkey)) return
1226 alreadyExistsRelaySetsPubkeySet.add(event.pubkey)
1227 } else {
1228 return
1229 }
1230 uniqueEvents.push(event)
1231 })
1232
1233 const relayMap = new Map<string, Set<string>>()
1234 uniqueEvents.forEach((event) => {
1235 event.tags.forEach(([tagName, tagValue]) => {
1236 if (tagName === 'relay' && tagValue && isWebsocketUrl(tagValue)) {
1237 const url = normalizeUrl(tagValue)
1238 relayMap.set(url, (relayMap.get(url) || new Set()).add(event.pubkey))
1239 }
1240 })
1241 })
1242 const relayMapEntries = Array.from(relayMap.entries())
1243 .sort((a, b) => b[1].size - a[1].size)
1244 .map(([url, pubkeys]) => [url, Array.from(pubkeys)]) as [string, string[]][]
1245
1246 indexedDb.putFollowingFavoriteRelays(pubkey, relayMapEntries)
1247 return relayMapEntries
1248 }
1249
1250 const cached = await indexedDb.getFollowingFavoriteRelays(pubkey)
1251 if (cached) {
1252 fetchNewData()
1253 return cached
1254 }
1255 return fetchNewData()
1256 }
1257
1258 /** =========== Followings =========== */
1259
1260 async initUserIndexFromFollowings(pubkey: string, signal: AbortSignal) {
1261 const followings = await this.fetchFollowings(pubkey, false)
1262 for (let i = 0; i * 20 < followings.length; i++) {
1263 if (signal.aborted) return
1264 await Promise.all(
1265 followings
1266 .slice(i * 20, (i + 1) * 20)
1267 .map((pubkey) => this.fetchProfile(pubkey, false, false))
1268 )
1269 await new Promise((resolve) => setTimeout(resolve, 1000))
1270 }
1271 }
1272
1273 /** =========== Profile =========== */
1274
1275 async searchProfiles(relayUrls: string[], filter: Filter): Promise<TProfile[]> {
1276 const events = await this.query(relayUrls, {
1277 ...filter,
1278 kinds: [kinds.Metadata]
1279 })
1280
1281 const profileEvents = events.sort((a, b) => b.created_at - a.created_at)
1282 await Promise.allSettled(profileEvents.map((profile) => this.addUsernameToIndex(profile)))
1283 profileEvents.forEach((profile) => this.updateProfileEventCache(profile))
1284 return profileEvents.map((profileEvent) => getProfileFromEvent(profileEvent))
1285 }
1286
1287 async searchNpubsFromLocal(query: string, limit: number = 100) {
1288 const result = await this.userIndex.searchAsync(query, { limit })
1289 return result
1290 .map((pubkey) => Pubkey.tryFromString(pubkey as string)?.npub)
1291 .filter(Boolean) as string[]
1292 }
1293
1294 async searchProfilesFromLocal(query: string, limit: number = 100) {
1295 const npubs = await this.searchNpubsFromLocal(query, limit)
1296 const profiles = await Promise.all(npubs.map((npub) => this.fetchProfile(npub)))
1297 return profiles.filter((profile) => !!profile) as TProfile[]
1298 }
1299
1300 private async addUsernameToIndex(profileEvent: NEvent) {
1301 try {
1302 const profileObj = JSON.parse(profileEvent.content)
1303 const text = [
1304 profileObj.display_name?.trim() ?? '',
1305 profileObj.name?.trim() ?? '',
1306 profileObj.nip05
1307 ?.split('@')
1308 .map((s: string) => s.trim())
1309 .join(' ') ?? ''
1310 ].join(' ')
1311 if (!text) return
1312
1313 await this.userIndex.addAsync(profileEvent.pubkey, text)
1314 } catch {
1315 return
1316 }
1317 }
1318
1319 private async _fetchProfileEvent(id: string): Promise<NEvent | undefined> {
1320 let pubkey: string | undefined
1321 let hintRelays: string[] = []
1322 if (/^[0-9a-f]{64}$/.test(id)) {
1323 pubkey = id
1324 } else {
1325 const { data, type } = nip19.decode(id)
1326 switch (type) {
1327 case 'npub':
1328 pubkey = data
1329 break
1330 case 'nprofile':
1331 pubkey = data.pubkey
1332 if (data.relays) hintRelays = data.relays
1333 break
1334 }
1335 }
1336
1337 if (!pubkey) {
1338 throw new Error('Invalid id')
1339 }
1340
1341 // Phase 3: Improved relay selection for profile fetching
1342 // 1. Try relay hints from nprofile first (most specific)
1343 if (hintRelays.length > 0) {
1344 const profileEvent = await this.fetchEventFromRelays(hintRelays.slice(0, 3), {
1345 authors: [pubkey],
1346 kinds: [kinds.Metadata],
1347 limit: 1
1348 })
1349 if (profileEvent) {
1350 this.addUsernameToIndex(profileEvent)
1351 indexedDb.putReplaceableEvent(profileEvent)
1352 return profileEvent
1353 }
1354 }
1355
1356 // 2. Try author's relays from cache/network
1357 const authorRelayList = await relayListCacheService.getRelayList(pubkey)
1358 if (authorRelayList && authorRelayList.write.length > 0) {
1359 const profileEvent = await this.fetchEventFromRelays(authorRelayList.write.slice(0, 5), {
1360 authors: [pubkey],
1361 kinds: [kinds.Metadata],
1362 limit: 1
1363 })
1364 if (profileEvent) {
1365 this.addUsernameToIndex(profileEvent)
1366 indexedDb.putReplaceableEvent(profileEvent)
1367 return profileEvent
1368 }
1369 }
1370
1371 // 3. Fall back to current relays (batched for efficiency)
1372 const profileFromCurrentRelays = await this.replaceableEventFromBigRelaysDataloader.load({
1373 pubkey,
1374 kind: kinds.Metadata
1375 })
1376 if (profileFromCurrentRelays) {
1377 this.addUsernameToIndex(profileFromCurrentRelays)
1378 return profileFromCurrentRelays
1379 }
1380
1381 return undefined
1382 }
1383
1384 private profileDataloader = new DataLoader<string, TProfile | null, string>(async (ids) => {
1385 const results = await Promise.allSettled(ids.map((id) => this._fetchProfile(id)))
1386 return results.map((res) => (res.status === 'fulfilled' ? res.value : null))
1387 })
1388
1389 async fetchProfile(
1390 id: string,
1391 skipCache = false,
1392 updateCacheInBackground = true
1393 ): Promise<TProfile | null> {
1394 if (skipCache) {
1395 return this._fetchProfile(id)
1396 }
1397
1398 const pk = Pubkey.tryFromString(id)
1399 if (!pk) throw new Error('Invalid id')
1400 const localProfileEvent = await indexedDb.getReplaceableEvent(pk.hex, kinds.Metadata)
1401 if (localProfileEvent) {
1402 if (updateCacheInBackground) {
1403 this.profileDataloader.load(id) // update cache in background
1404 }
1405 const localProfile = getProfileFromEvent(localProfileEvent)
1406 return localProfile
1407 }
1408 return await this.profileDataloader.load(id)
1409 }
1410
1411 private async _fetchProfile(id: string): Promise<TProfile | null> {
1412 const profileEvent = await this._fetchProfileEvent(id)
1413 if (profileEvent) {
1414 return getProfileFromEvent(profileEvent)
1415 }
1416
1417 const pk = Pubkey.tryFromString(id)
1418 if (!pk) return null
1419 return { pubkey: pk.hex, npub: pk.npub, username: pk.formatNpub(12) }
1420 }
1421
1422 async updateProfileEventCache(event: NEvent) {
1423 await this.updateReplaceableEventFromBigRelaysCache(event)
1424 }
1425
1426 /** =========== Relay list =========== */
1427
1428 async fetchRelayList(pubkey: string): Promise<TRelayList> {
1429 const [relayList] = await this.fetchRelayLists([pubkey])
1430 return relayList
1431 }
1432
1433 async fetchRelayLists(pubkeys: string[]): Promise<TRelayList[]> {
1434 const relayEvents = await this.fetchReplaceableEventsFromBigRelays(pubkeys, kinds.RelayList)
1435
1436 return relayEvents.map((event) => {
1437 if (event) {
1438 // Cache the relay list for future use
1439 relayListCacheService.setRelayList(event)
1440 return getRelayListFromEvent(event, storage.getFilterOutOnionRelays())
1441 }
1442 // No relay list found - use discovered/bootstrap relays as fallback
1443 const fallback = this.getFallbackRelays()
1444 return {
1445 write: fallback,
1446 read: fallback,
1447 originalRelays: []
1448 }
1449 })
1450 }
1451
1452 async forceUpdateRelayListEvent(pubkey: string) {
1453 await this.replaceableEventFromBigRelaysBatchLoadFn([{ pubkey, kind: kinds.RelayList }])
1454 }
1455
1456 async updateRelayListCache(event: NEvent) {
1457 return await this.updateReplaceableEventFromBigRelaysCache(event)
1458 }
1459
1460 /** =========== Replaceable event from big relays dataloader =========== */
1461
1462 private replaceableEventFromBigRelaysDataloader = new DataLoader<
1463 { pubkey: string; kind: number },
1464 NEvent | null,
1465 string
1466 >(this.replaceableEventFromBigRelaysBatchLoadFn.bind(this), {
1467 batchScheduleFn: (callback) => setTimeout(callback, 50),
1468 maxBatchSize: 500,
1469 cacheKeyFn: ({ pubkey, kind }) => `${pubkey}:${kind}`
1470 })
1471
1472 private async replaceableEventFromBigRelaysBatchLoadFn(
1473 params: readonly { pubkey: string; kind: number }[]
1474 ) {
1475 const groups = new Map<number, string[]>()
1476 params.forEach(({ pubkey, kind }) => {
1477 if (!groups.has(kind)) {
1478 groups.set(kind, [])
1479 }
1480 groups.get(kind)!.push(pubkey)
1481 })
1482
1483 const eventsMap = new Map<string, NEvent>()
1484 // Use current relays, falling back to discovered/bootstrap relays
1485 const relays = this.currentRelays.length > 0 ? this.currentRelays : this.getFallbackRelays()
1486
1487 await Promise.allSettled(
1488 Array.from(groups.entries()).map(async ([kind, pubkeys]) => {
1489 const events = await this.query(relays, {
1490 authors: pubkeys,
1491 kinds: [kind]
1492 })
1493
1494 for (const event of events) {
1495 const key = `${event.pubkey}:${event.kind}`
1496 const existing = eventsMap.get(key)
1497 if (!existing || existing.created_at < event.created_at) {
1498 eventsMap.set(key, event)
1499 }
1500 }
1501 })
1502 )
1503
1504 return params.map(({ pubkey, kind }) => {
1505 const key = `${pubkey}:${kind}`
1506 const event = eventsMap.get(key)
1507 if (event) {
1508 indexedDb.putReplaceableEvent(event)
1509 return event
1510 } else {
1511 indexedDb.putNullReplaceableEvent(pubkey, kind)
1512 return null
1513 }
1514 })
1515 }
1516
1517 private async fetchReplaceableEventsFromBigRelays(pubkeys: string[], kind: number) {
1518 const events = await indexedDb.getManyReplaceableEvents(pubkeys, kind)
1519 const nonExistingPubkeyIndexMap = new Map<string, number>()
1520 const existingPubkeys: string[] = []
1521 pubkeys.forEach((pubkey, i) => {
1522 if (events[i] === undefined) {
1523 nonExistingPubkeyIndexMap.set(pubkey, i)
1524 } else {
1525 existingPubkeys.push(pubkey)
1526 }
1527 })
1528 const newEvents = await this.replaceableEventFromBigRelaysDataloader.loadMany(
1529 Array.from(nonExistingPubkeyIndexMap.keys()).map((pubkey) => ({ pubkey, kind }))
1530 )
1531 newEvents.forEach((event) => {
1532 if (event && !(event instanceof Error)) {
1533 const index = nonExistingPubkeyIndexMap.get(event.pubkey)
1534 if (index !== undefined) {
1535 events[index] = event
1536 }
1537 }
1538 })
1539
1540 this.replaceableEventFromBigRelaysDataloader.loadMany(
1541 existingPubkeys.map((pubkey) => ({ pubkey, kind }))
1542 ) // update cache in background
1543
1544 return events
1545 }
1546
1547 private async updateReplaceableEventFromBigRelaysCache(event: NEvent) {
1548 const newEvent = await indexedDb.putReplaceableEvent(event)
1549 if (newEvent.id !== event.id) {
1550 return newEvent
1551 }
1552
1553 this.replaceableEventFromBigRelaysDataloader.clear({ pubkey: event.pubkey, kind: event.kind })
1554 this.replaceableEventFromBigRelaysDataloader.prime(
1555 { pubkey: event.pubkey, kind: event.kind },
1556 Promise.resolve(event)
1557 )
1558 return newEvent
1559 }
1560
1561 /** =========== Replaceable event dataloader =========== */
1562
1563 private replaceableEventDataLoader = new DataLoader<
1564 { pubkey: string; kind: number; d?: string },
1565 NEvent | null,
1566 string
1567 >(this.replaceableEventBatchLoadFn.bind(this), {
1568 cacheKeyFn: ({ pubkey, kind, d }) => `${kind}:${pubkey}:${d ?? ''}`
1569 })
1570
1571 private async replaceableEventBatchLoadFn(
1572 params: readonly { pubkey: string; kind: number; d?: string }[]
1573 ) {
1574 const groups = new Map<string, { kind: number; d?: string }[]>()
1575 params.forEach(({ pubkey, kind, d }) => {
1576 if (!groups.has(pubkey)) {
1577 groups.set(pubkey, [])
1578 }
1579 groups.get(pubkey)!.push({ kind: kind, d })
1580 })
1581
1582 const eventMap = new Map<string, NEvent | null>()
1583 await Promise.allSettled(
1584 Array.from(groups.entries()).map(async ([pubkey, _params]) => {
1585 const groupByKind = new Map<number, string[]>()
1586 _params.forEach(({ kind, d }) => {
1587 if (!groupByKind.has(kind)) {
1588 groupByKind.set(kind, [])
1589 }
1590 if (d) {
1591 groupByKind.get(kind)!.push(d)
1592 }
1593 })
1594 const filters = Array.from(groupByKind.entries()).map(
1595 ([kind, dList]) =>
1596 (dList.length > 0
1597 ? {
1598 authors: [pubkey],
1599 kinds: [kind],
1600 '#d': dList
1601 }
1602 : { authors: [pubkey], kinds: [kind] }) as Filter
1603 )
1604 const relayList = await this.fetchRelayList(pubkey)
1605 // Use author's relays, falling back to current relays, then discovered/bootstrap relays
1606 let authorRelays = relayList.write.length > 0 ? relayList.write : this.currentRelays
1607 if (authorRelays.length === 0) {
1608 authorRelays = this.getFallbackRelays()
1609 }
1610 const relays = authorRelays.slice(0, 5)
1611 const events = await this.query(relays, filters)
1612
1613 for (const event of events) {
1614 const key = getReplaceableCoordinateFromEvent(event)
1615 const existing = eventMap.get(key)
1616 if (!existing || existing.created_at < event.created_at) {
1617 eventMap.set(key, event)
1618 }
1619 }
1620 })
1621 )
1622
1623 return params.map(({ pubkey, kind, d }) => {
1624 const key = `${kind}:${pubkey}:${d ?? ''}`
1625 const event = eventMap.get(key)
1626 if (kind === kinds.Pinlist) return event ?? null
1627
1628 if (event) {
1629 indexedDb.putReplaceableEvent(event)
1630 return event
1631 } else {
1632 indexedDb.putNullReplaceableEvent(pubkey, kind, d)
1633 return null
1634 }
1635 })
1636 }
1637
1638 private async fetchReplaceableEvent(
1639 pubkey: string,
1640 kind: number,
1641 d?: string,
1642 updateCache = true
1643 ) {
1644 const storedEvent = await indexedDb.getReplaceableEvent(pubkey, kind, d)
1645 if (storedEvent !== undefined) {
1646 if (updateCache) {
1647 this.replaceableEventDataLoader.load({ pubkey, kind, d }) // update cache in background
1648 }
1649 return storedEvent
1650 }
1651
1652 return await this.replaceableEventDataLoader.load({ pubkey, kind, d })
1653 }
1654
1655 private async updateReplaceableEventCache(event: NEvent) {
1656 const newEvent = await indexedDb.putReplaceableEvent(event)
1657 if (newEvent.id !== event.id) {
1658 return
1659 }
1660
1661 this.replaceableEventDataLoader.clear({ pubkey: event.pubkey, kind: event.kind })
1662 this.replaceableEventDataLoader.prime(
1663 { pubkey: event.pubkey, kind: event.kind },
1664 Promise.resolve(event)
1665 )
1666 }
1667
1668 /** =========== Replaceable event =========== */
1669
1670 async fetchFollowListEvent(pubkey: string, updateCache = true) {
1671 return await this.fetchReplaceableEvent(pubkey, kinds.Contacts, undefined, updateCache)
1672 }
1673
1674 async fetchFollowings(pubkey: string, updateCache = true) {
1675 const followListEvent = await this.fetchFollowListEvent(pubkey, updateCache)
1676 return followListEvent ? getPubkeysFromPTags(followListEvent.tags) : []
1677 }
1678
1679 async updateFollowListCache(evt: NEvent) {
1680 await this.updateReplaceableEventCache(evt)
1681 }
1682
1683 async fetchMuteListEvent(pubkey: string) {
1684 return await this.fetchReplaceableEvent(pubkey, kinds.Mutelist)
1685 }
1686
1687 async fetchBookmarkListEvent(pubkey: string) {
1688 return this.fetchReplaceableEvent(pubkey, kinds.BookmarkList)
1689 }
1690
1691 async fetchBlossomServerListEvent(pubkey: string) {
1692 return await this.fetchReplaceableEvent(pubkey, ExtendedKind.BLOSSOM_SERVER_LIST)
1693 }
1694
1695 async fetchBlossomServerList(pubkey: string) {
1696 const evt = await this.fetchBlossomServerListEvent(pubkey)
1697 return evt ? getServersFromServerTags(evt.tags) : []
1698 }
1699
1700 async fetchPinListEvent(pubkey: string) {
1701 return this.fetchReplaceableEvent(pubkey, kinds.Pinlist)
1702 }
1703
1704 async fetchRelayListEvent(pubkey: string) {
1705 return this.fetchReplaceableEvent(pubkey, kinds.RelayList)
1706 }
1707
1708 async fetchFavoriteRelaysEvent(pubkey: string) {
1709 return this.fetchReplaceableEvent(pubkey, ExtendedKind.FAVORITE_RELAYS)
1710 }
1711
1712 async fetchUserEmojiListEvent(pubkey: string) {
1713 return this.fetchReplaceableEvent(pubkey, kinds.UserEmojiList)
1714 }
1715
1716 async fetchPinnedUsersList(pubkey: string) {
1717 return this.fetchReplaceableEvent(pubkey, ExtendedKind.PINNED_USERS)
1718 }
1719
1720 async updateBlossomServerListEventCache(evt: NEvent) {
1721 await this.updateReplaceableEventCache(evt)
1722 }
1723
1724 async fetchEmojiSetEvents(pointers: string[], updateCacheInBackground = true) {
1725 const params = pointers
1726 .map((pointer) => {
1727 const [kindStr, pubkey, d = ''] = pointer.split(':')
1728 if (!pubkey || !kindStr) return null
1729
1730 const kind = parseInt(kindStr, 10)
1731 if (kind !== kinds.Emojisets) return null
1732
1733 return { pubkey, kind, d }
1734 })
1735 .filter(Boolean) as { pubkey: string; kind: number; d: string }[]
1736 return await Promise.all(
1737 params.map(({ pubkey, kind, d }) =>
1738 this.fetchReplaceableEvent(pubkey, kind, d, updateCacheInBackground)
1739 )
1740 )
1741 }
1742
1743 // ================= Utils =================
1744
1745 async generateSubRequestsForPubkeys(pubkeys: string[], myPubkey?: string | null) {
1746 // If many websocket connections are initiated simultaneously, it will be
1747 // very slow on Safari (for unknown reason)
1748 if (isSafari()) {
1749 let urls = this.currentRelays
1750 if (myPubkey) {
1751 const relayList = await this.fetchRelayList(myPubkey)
1752 urls = relayList.read.length > 0 ? relayList.read.slice(0, 5) : this.currentRelays
1753 }
1754 return [{ urls, filter: { authors: pubkeys } }]
1755 }
1756
1757 const relayLists = await this.fetchRelayLists(pubkeys)
1758 const group: Record<string, Set<string>> = {}
1759 relayLists.forEach((relayList, index) => {
1760 relayList.write.slice(0, 4).forEach((url) => {
1761 if (!group[url]) {
1762 group[url] = new Set()
1763 }
1764 group[url].add(pubkeys[index])
1765 })
1766 })
1767
1768 const relayCount = Object.keys(group).length
1769 const coveredCount = new Map<string, number>()
1770 Object.entries(group)
1771 .sort(([, a], [, b]) => b.size - a.size)
1772 .forEach(([url, pubkeys]) => {
1773 if (
1774 relayCount > 10 &&
1775 pubkeys.size < 10 &&
1776 Array.from(pubkeys).every((pubkey) => (coveredCount.get(pubkey) ?? 0) >= 2)
1777 ) {
1778 delete group[url]
1779 } else {
1780 pubkeys.forEach((pubkey) => {
1781 coveredCount.set(pubkey, (coveredCount.get(pubkey) ?? 0) + 1)
1782 })
1783 }
1784 })
1785
1786 return Object.entries(group).map(([url, authors]) => ({
1787 urls: [url],
1788 filter: { authors: Array.from(authors) }
1789 }))
1790 }
1791 }
1792
1793 const instance = ClientService.getInstance()
1794 export default instance
1795