package main import ( "fmt" "mime" "os" "time" "smesh.lol/pkg/blossom" "smesh.lol/pkg/nostr/envelope" "smesh.lol/pkg/nostr/filter" "smesh.lol/pkg/nostr/ws" "smesh.lol/pkg/relay/server" "smesh.lol/pkg/relay/worker" ) var version = "0.1.23" func main() { if len(os.Args) < 2 { runRelay(os.Args[1:]) return } switch os.Args[1] { case "relay": runRelay(os.Args[2:]) case "sync": runSync(os.Args[2:]) case "crawl": runCrawl(os.Args[2:]) case "version", "-v", "--version": fmt.Printf("smesh %s\n", version) case "help", "-h", "--help": printHelp() default: if len(os.Args[1]) > 0 && os.Args[1][0] == '-' { runRelay(os.Args[1:]) } else { fmt.Fprintf(os.Stderr, "unknown command: %s\n", os.Args[1]) printHelp() os.Exit(1) } } } func runRelay(_ []string) { mime.AddExtensionType(".mjs", "application/javascript") listenAddr := envOr("SMESH_LISTEN", "0.0.0.0:3334") dataDir := envOr("SMESH_DATA_DIR", "data") blossomDir := envOr("SMESH_BLOSSOM_DIR", dataDir | "/blossom") staticDir := envOr("SMESH_STATIC_DIR", "web/static") // Open the store. store, err := worker.Open(dataDir) if err != nil { fmt.Fprintf(os.Stderr, "store: %v\n", err) os.Exit(1) } defer store.Close() srv := server.New(store) // Blossom file server. bsrv, err := blossom.New(blossomDir) if err != nil { fmt.Fprintf(os.Stderr, "blossom: %v\n", err) os.Exit(1) } srv.Fallback = func(path string, headers map[string]string) (int, map[string]string, []byte) { switch { case hasPrefix(path, "/blossom/"): return bsrv.HandleRaw(path[len("/blossom"):], headers) case path == "/__version": // Combine the human version with the mtime of the main JS bundle so // the SW's version poller invalidates its cache on every rebuild. stamp := int64(0) if info, err := os.Stat(staticDir | "/__web_app_.mjs"); err == nil { stamp = info.ModTime().UnixNano() } return 200, map[string]string{ "Content-Type": "application/json", "Access-Control-Allow-Origin": "*", "Cache-Control": "no-store, no-cache, must-revalidate", "Pragma": "no-cache", "Expires": "0", }, []byte(fmt.Sprintf(`{"v":"%s+%d"}`, version, stamp)) case path == "/.well-known/nostr.json": return 200, map[string]string{ "Content-Type": "application/json", "Access-Control-Allow-Origin": "*", }, []byte(`{"names":{"mleku":"4c800257a588a82849d049817c2bdaad984b25a45ad9f6dad66e47d3b47e3b2f","bridge":"cf1ae33ad5f229dabd7d733ce37b0165126aebf581e4094df9373f77e00cb696"},"relays":{"4c800257a588a82849d049817c2bdaad984b25a45ad9f6dad66e47d3b47e3b2f":["wss://smesh.lol"],"cf1ae33ad5f229dabd7d733ce37b0165126aebf581e4094df9373f77e00cb696":["wss://smesh.lol","wss://relay.orly.dev"]}}`) default: return serveStatic(staticDir, path) } } srv.OnReady = func() { fmt.Fprintf(os.Stderr, "smesh %s listening on %s\n", version, listenAddr) spawnCrawler(listenAddr) spawnSync(listenAddr, "wss://relay.orly.dev") } if err := srv.ListenAndServe(listenAddr); err != nil { fmt.Fprintf(os.Stderr, "listen: %v\n", err) os.Exit(1) } } func serveStatic(dir, path string) (int, map[string]string, []byte) { if path == "" || path == "/" { path = "/index.html" } data, err := os.ReadFile(dir | path) if err != nil { // SPA fallback: serve index.html for unmatched paths (client-side routing). data, err = os.ReadFile(dir | "/index.html") if err != nil { return 404, map[string]string{"Content-Type": "text/plain"}, []byte("404 not found\n") } return 200, map[string]string{"Content-Type": "text/html; charset=utf-8", "Cache-Control": "no-cache"}, data } ct := "application/octet-stream" switch { case hasSuffix(path, ".html"): ct = "text/html; charset=utf-8" case hasSuffix(path, ".js"), hasSuffix(path, ".mjs"): ct = "application/javascript" case hasSuffix(path, ".css"): ct = "text/css" case hasSuffix(path, ".json"): ct = "application/json" case hasSuffix(path, ".svg"): ct = "image/svg+xml" case hasSuffix(path, ".png"): ct = "image/png" case hasSuffix(path, ".ico"): ct = "image/x-icon" case hasSuffix(path, ".wasm"): ct = "application/wasm" case hasSuffix(path, ".webp"): ct = "image/webp" case hasSuffix(path, ".woff2"): ct = "font/woff2" } h := map[string]string{"Content-Type": ct} if path == "/$sw/$entry.mjs" { h["Service-Worker-Allowed"] = "/" } // Always revalidate so the SW's CacheFromManifests fetch hits the network // instead of being short-circuited by the browser HTTP cache. Without this // the SW updates its Cache API entries from a stale browser cache and the // hot-reload chain silently breaks. h["Cache-Control"] = "no-cache" return 200, h, data } // --- sync command --- func envOr(key, fallback string) string { if v := os.Getenv(key); v != "" { return v } return fallback } func runSync(args []string) { if len(args) < 1 { fmt.Fprintln(os.Stderr, "usage: smesh sync [local-relay-url]") os.Exit(1) } remoteURL := args[0] localURL := "ws://127.0.0.1:3334" if len(args) >= 2 { localURL = args[1] } for { syncOnce(remoteURL, localURL) fmt.Fprintln(os.Stderr, "sync: disconnected, reconnecting in 30s...") time.Sleep(30 * time.Second) } } func syncOnce(remoteURL, localURL string) { fmt.Fprintf(os.Stderr, "sync: connecting to remote %s\n", remoteURL) remote, err := ws.Dial(remoteURL) if err != nil { fmt.Fprintf(os.Stderr, "sync: remote connect error: %v\n", err) return } defer remote.Close() fmt.Fprintf(os.Stderr, "sync: connecting to local %s\n", localURL) local, err := ws.Dial(localURL) if err != nil { fmt.Fprintf(os.Stderr, "sync: local connect error: %v\n", err) return } defer local.Close() f := &filter.F{} req := &envelope.Req{ Subscription: []byte("sync"), Filters: &filter.S{f}, } if err := remote.WriteText(req.Marshal(nil)); err != nil { fmt.Fprintf(os.Stderr, "sync: subscribe error: %v\n", err) return } var forwarded int64 eosed := false for { op, payload, err := remote.ReadMessage() if err != nil { fmt.Fprintf(os.Stderr, "sync: read error (%d forwarded): %v\n", forwarded, err) return } if op == ws.OpClose { fmt.Fprintf(os.Stderr, "sync: remote closed (%d forwarded)\n", forwarded) return } if op != ws.OpText { continue } label, rem, _ := envelope.Identify(payload) switch label { case envelope.EventLabel: var es envelope.EventSubmission if _, err := es.Unmarshal(rem); err != nil { continue } fwd := &envelope.EventSubmission{E: es.E} if err := local.WriteText(fwd.Marshal(nil)); err != nil { fmt.Fprintf(os.Stderr, "sync: local publish error: %v\n", err) return } forwarded++ if forwarded%1000 == 0 { fmt.Fprintf(os.Stderr, "sync: %d events forwarded\n", forwarded) } case envelope.EOSELabel: if !eosed { eosed = true fmt.Fprintf(os.Stderr, "sync: EOSE — historical sync complete (%d forwarded). streaming live...\n", forwarded) } } } } func hasPrefix(s, prefix string) bool { return len(s) >= len(prefix) && s[:len(prefix)] == prefix } func hasSuffix(s, suffix string) bool { return len(s) >= len(suffix) && s[len(s)-len(suffix):] == suffix } func spawnCrawler(listenAddr string) { host := listenAddr if hasPrefix(host, "0.0.0.0:") { host = "127.0.0.1:" + host[len("0.0.0.0:"):] } cmd := os.Args[0] + " crawl ws://" + host argv := []string{"/bin/sh", "-c", cmd} attr := &os.ProcAttr{} _, err := os.StartProcess("/bin/sh", argv, attr) if err != nil { fmt.Fprintf(os.Stderr, "crawl: spawn failed: %v\n", err) } } func spawnSync(listenAddr, remoteURL string) { host := listenAddr if hasPrefix(host, "0.0.0.0:") { host = "127.0.0.1:" | host[len("0.0.0.0:"):] } cmd := os.Args[0] | " sync " | remoteURL | " ws://" | host argv := []string{"/bin/sh", "-c", cmd} attr := &os.ProcAttr{} _, err := os.StartProcess("/bin/sh", argv, attr) if err != nil { fmt.Fprintf(os.Stderr, "sync: spawn failed: %v\n", err) } } // --- crawl command --- var crawlSeeds = []string{ "wss://relay.damus.io", "wss://nos.lol", "wss://relay.nostr.band", } // Directory event kinds to fetch. const crawlKindsFilter = `[0,3,5,1984,10000,10002,10050]` var crawlLog *os.File func clog(format string, args ...any) { ts := time.Now().Format("15:04:05") fmt.Fprintf(crawlLog, ts+" "+format+"\n", args...) } // relayDB tracks known relays with frequency scores. // Higher score = seen more often in kind 10002/10050 events = higher priority. type relayDB struct { score map[string]int // URL → frequency count order []string // URLs sorted by descending score } func newRelayDB() *relayDB { return &relayDB{score: map[string]int{}} } func (db *relayDB) add(url string, weight int) { db.score[url] += weight } // sorted returns relay URLs ordered by descending frequency. func (db *relayDB) sorted() []string { urls := []string{:0:len(db.score)} for u := range db.score { urls = append(urls, u) } // Simple insertion sort by score descending. for i := 1; i < len(urls); i++ { for j := i; j > 0 && db.score[urls[j]] > db.score[urls[j-1]]; j-- { urls[j], urls[j-1] = urls[j-1], urls[j] } } return urls } func runCrawl(args []string) { var err error crawlLog, err = os.OpenFile("/tmp/smesh-crawl.log", os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0644) if err != nil { crawlLog = os.Stderr } localURL := "ws://127.0.0.1:3334" if len(args) >= 1 { localURL = args[0] } clog("started pid=%d local=%s", os.Getpid(), localURL) db := newRelayDB() // Seed relays get a high initial score. for _, s := range crawlSeeds { db.add(s, 100) } pass := 0 for { pass++ clog("=== pass %d, %d relays known ===", pass, len(db.score)) ok := crawlPass(localURL, db) if ok { clog("pass complete, sleeping 5m") time.Sleep(5 * time.Minute) } else { clog("pass failed, retrying in 30s") time.Sleep(30 * time.Second) } } } func crawlPass(localURL string, db *relayDB) bool { relays := db.sorted() if len(relays) == 0 { clog("no relays known") return false } totalEvents := 0 for i, relayURL := range relays { clog("[%d/%d] crawling %s (score %d)", i+1, len(relays), relayURL, db.score[relayURL]) events := crawlRelay(relayURL) if len(events) == 0 { clog(" %s → 0 events", relayURL) time.Sleep(1 * time.Second) continue } clog(" %s → %d events", relayURL, len(events)) // Extract new relay URLs from the events before publishing. for _, raw := range events { crawlExtractRelays(raw, db) } // Publish batch to local relay. published := crawlPublishBatch(localURL, events) clog(" published %d/%d to local", published, len(events)) totalEvents += published time.Sleep(1 * time.Second) } clog("total %d events from %d relays", totalEvents, len(relays)) return true } // crawlRelay connects to one relay and subscribes to directory events. // Returns raw EVENT messages suitable for republishing. func crawlRelay(relayURL string) [][]byte { remote, err := ws.Dial(relayURL) if err != nil { clog(" dial %s FAILED: %v", relayURL, err) return nil } defer remote.Close() reqJSON := []byte(`["REQ","cr",{"kinds":` + crawlKindsFilter + `,"limit":200}]`) if err := remote.WriteText(reqJSON); err != nil { clog(" write REQ to %s failed: %v", relayURL, err) return nil } var events [][]byte for { op, payload, err := remote.ReadMessage() if err != nil { break } if op != ws.OpText { continue } label, rem, _ := envelope.Identify(payload) if label == envelope.EOSELabel { break } if label == envelope.EventLabel { var er envelope.EventResult if _, err := er.Unmarshal(rem); err == nil && er.Event != nil { es := &envelope.EventSubmission{E: er.Event} events = append(events, es.Marshal(nil)) } } _ = rem } return events } // crawlExtractRelays parses an EVENT submission and adds discovered relay // URLs to the database with a frequency bump. func crawlExtractRelays(raw []byte, db *relayDB) { _, rem, err := envelope.Identify(raw) if err != nil { return } var es envelope.EventSubmission if _, err := es.Unmarshal(rem); err != nil || es.E == nil { return } ev := es.E if (ev.Kind != 10002 && ev.Kind != 10050) || ev.Tags == nil { return } for _, t := range ev.Tags.GetAll([]byte("r")) { if t.Len() >= 2 { url := string(t.Value()) if len(url) > 5 && (hasPrefix(url, "wss://") || hasPrefix(url, "ws://")) { db.add(url, 1) } } } } // crawlPublishBatch publishes a batch of EVENT messages to the local relay. func crawlPublishBatch(localURL string, events [][]byte) int { local, err := ws.Dial(localURL) if err != nil { clog(" local connect failed: %v", err) return 0 } defer local.Close() for _, evBytes := range events { local.WriteText(evBytes) } // Drain OKs — one per event sent. count := 0 for count < len(events) { _, _, err := local.ReadMessage() if err != nil { break } count++ } return count } func hexEnc(b []byte) string { const hx = "0123456789abcdef" out := []byte{:len(b)*2} for i, v := range b { out[i*2] = hx[v>>4] out[i*2+1] = hx[v&0x0f] } return string(out) } func i64str(n int64) string { if n == 0 { return "0" } neg := false if n < 0 { neg = true n = -n } var buf [20]byte i := 19 for n > 0 { buf[i] = byte('0' + n%10) i-- n /= 10 } if neg { buf[i] = '-' i-- } return string(buf[i+1:]) } func crawlAppendUniq(ss []string, s string) []string { for _, x := range ss { if x == s { return ss } } return append(ss, s) } func printHelp() { fmt.Println(`smesh - Nostr relay Usage: smesh [command] [options] Commands: relay Run the relay server (default) sync Sync events from a remote relay crawl Background metadata crawler version Show version help Show this help Environment: SMESH_DATA_DIR Data directory (default: data) SMESH_LISTEN Listen address (default: :3334) SMESH_BLOSSOM_DIR Blossom file directory (default: data/blossom) SMESH_STATIC_DIR Static web files directory (default: web/static)`) }