crawler.mx raw
1 // Package crawler discovers Nostr relays by extracting relay URLs
2 // from kind 10002 (relay list metadata) events stored locally.
3 package crawler
4
5 import (
6 "smesh.lol/pkg/nostr/filter"
7 "smesh.lol/pkg/nostr/kind"
8 "smesh.lol/pkg/store"
9 )
10
11 // Crawler discovers relay URLs from the local store.
12 type Crawler struct {
13 store *store.Engine
14 }
15
16 // New creates a crawler.
17 func New(s *store.Engine) *Crawler { return &Crawler{store: s} }
18
19 // Relay is a discovered relay with its usage hints.
20 type Relay struct {
21 URL string
22 Read bool
23 Write bool
24 }
25
26 // Discover scans the store for kind 10002 events and extracts relay URLs.
27 // Returns deduplicated relays.
28 func (c *Crawler) Discover() []Relay {
29 f := &filter.F{
30 Kinds: kind.NewS(kind.RelayListMetadata),
31 }
32 events, err := c.store.QueryEvents(f)
33 if err != nil {
34 return nil
35 }
36
37 seen := map[string]*Relay{}
38 for _, ev := range events {
39 if ev.Tags == nil {
40 continue
41 }
42 rTags := ev.Tags.GetAll([]byte("r"))
43 for _, rt := range rTags {
44 if rt.Len() < 2 {
45 continue
46 }
47 url := string(rt.Value())
48 if url == "" {
49 continue
50 }
51 r, ok := seen[url]
52 if !ok {
53 r = &Relay{URL: url}
54 seen[url] = r
55 }
56 // Third element is optional: "read", "write", or absent (both).
57 hint := rt.Relay()
58 if len(hint) == 0 {
59 r.Read = true
60 r.Write = true
61 } else if string(hint) == "read" {
62 r.Read = true
63 } else if string(hint) == "write" {
64 r.Write = true
65 }
66 }
67 }
68
69 out := []Relay{:0:len(seen)}
70 for _, r := range seen {
71 out = append(out, *r)
72 }
73 return out
74 }
75
76 // DiscoverURLs returns just the relay URL strings.
77 func (c *Crawler) DiscoverURLs() []string {
78 relays := c.Discover()
79 urls := []string{:len(relays)}
80 for i, r := range relays {
81 urls[i] = r.URL
82 }
83 return urls
84 }
85