summaryrefslogtreecommitdiff
path: root/iris/backend.go
diff options
context:
space:
mode:
authortjpcc <tjp@ctrl-c.club>2023-08-30 10:29:47 -0600
committertjpcc <tjp@ctrl-c.club>2023-08-30 10:29:47 -0600
commit77ac83e700415cdbd4635eae91e79f99312ea240 (patch)
tree65205fbf85c2ba6bf7712f65af49eca564c49365 /iris/backend.go
Initial commit
* iris support copied in from iris-news * a new slog backend * "metabackend" wraps and routes between multiple backends based on the groups they support * better logging than iris-news ever had
Diffstat (limited to 'iris/backend.go')
-rw-r--r--iris/backend.go448
1 files changed, 448 insertions, 0 deletions
diff --git a/iris/backend.go b/iris/backend.go
new file mode 100644
index 0000000..787db96
--- /dev/null
+++ b/iris/backend.go
@@ -0,0 +1,448 @@
+package iris
+
+import (
+ "bytes"
+ "crypto/sha1"
+ "encoding/base64"
+ "encoding/json"
+ "fmt"
+ "io"
+ "net/mail"
+ "net/textproto"
+ "os"
+ "os/exec"
+ "path"
+ "sort"
+ "strconv"
+ "strings"
+ "time"
+
+ "github.com/dustin/go-nntp"
+ nntpserver "github.com/dustin/go-nntp/server"
+ "github.com/go-kit/log"
+ "github.com/go-kit/log/level"
+)
+
+var group = &nntp.Group{
+ Name: "ctrl-c.iris",
+ Description: "The iris message board",
+ Posting: nntp.PostingPermitted,
+ Low: 1,
+}
+
+const DefaultWaitTime = 30 * time.Second
+const msgfile = ".iris.messages"
+
+// NewBackend builds an iris nntp backend.
+//
+// The provided waitTime may be <= 0, in which case DefaultWaitTime will be used.
+func NewBackend(logger log.Logger, waitTime time.Duration) (nntpserver.Backend, error) {
+ if waitTime <= 0 {
+ waitTime = DefaultWaitTime
+ }
+
+ b := &backend{logger: logger, waitTime: waitTime}
+ if err := b.refresh(); err != nil {
+ return nil, err
+ }
+
+ return b, nil
+}
+
+type backend struct {
+ logger log.Logger
+ waitTime time.Duration
+ lastRead time.Time
+ messages []*nntp.Article
+}
+
+func (b backend) debug(keyvals ...any) error { return level.Debug(b.logger).Log(keyvals...) }
+func (b backend) info(keyvals ...any) error { return level.Info(b.logger).Log(keyvals...) }
+func (b backend) warn(keyvals ...any) error { return level.Warn(b.logger).Log(keyvals...) }
+func (b backend) err(keyvals ...any) error { return level.Error(b.logger).Log(keyvals...) }
+
+func (b backend) ListGroups(max int) ([]*nntp.Group, error) {
+ return []*nntp.Group{group}, nil
+}
+
+func (b *backend) GetGroup(name string) (*nntp.Group, error) {
+ if name != group.Name {
+ return nil, nntpserver.ErrNoSuchGroup
+ }
+ if err := b.refresh(); err != nil {
+ return nil, err
+ }
+
+ return group, nil
+}
+
+func (b *backend) GetArticles(_ *nntp.Group, from, to int64) ([]nntpserver.NumberedArticle, error) {
+ if err := b.refresh(); err != nil {
+ return nil, err
+ }
+
+ numbered := make([]nntpserver.NumberedArticle, 0, len(b.messages))
+ for i, msg := range b.messages {
+ num := int64(i + 1)
+ if num >= from && num <= to {
+ numbered = append(numbered, nntpserver.NumberedArticle{
+ Num: num,
+ Article: copyArticle(msg),
+ })
+ }
+ }
+
+ return numbered, nil
+}
+
+func (b *backend) GetArticle(_ *nntp.Group, messageID string) (*nntp.Article, error) {
+ if err := b.refresh(); err != nil {
+ return nil, err
+ }
+
+ for _, msg := range b.messages {
+ if msg.Header.Get("Message-Id") == messageID {
+ return msg, nil
+ }
+ }
+
+ num, err := strconv.Atoi(messageID)
+ if err == nil && num <= len(b.messages) {
+ return copyArticle(b.messages[num-1]), nil
+ }
+
+ return nil, nntpserver.ErrInvalidMessageID
+}
+
+func (b backend) Post(article *nntp.Article) error {
+ // iris replies are all made to a top-level post, there is no grandchild nesting.
+ //
+ // but NNTP supports this, so collapse the provided "References" header up to the OP.
+ parent := b.findOP(article.Header.Get("References"))
+ if parent != nil {
+ article.Header.Set("References", parent.MessageID())
+ }
+
+ msg, err := msgToIris(article)
+ if err != nil {
+ return err
+ }
+ return appendMessage(msg)
+}
+
+func (b backend) Authorized() bool { return true }
+func (b backend) AllowPost() bool { return true }
+func (b backend) Authenticate(_, _ string) (nntpserver.Backend, error) { return nil, nil }
+
+func (b backend) findOP(ref string) *nntp.Article {
+ if ref == "" {
+ return nil
+ }
+ // all references should have the same OP so just take the first
+ msgID := strings.SplitN(ref, " ", 2)[0]
+
+ // traverse backwards expecting most reply activity concentrated late in the total history
+ for i := len(b.messages) - 1; i >= 0; i-- {
+ article := b.messages[i]
+ if article.MessageID() != msgID {
+ continue
+ }
+
+ gpID := article.Header.Get("References")
+ if gpID != "" {
+ return b.findOP(gpID)
+ }
+ return article
+ }
+
+ return nil
+}
+
+func (b *backend) refresh() error {
+ now := time.Now()
+ if b.lastRead.IsZero() || now.Sub(b.lastRead) > b.waitTime {
+ b.lastRead = now
+ } else {
+ return nil
+ }
+
+ binpath, err := exec.LookPath("iris")
+ if err != nil {
+ return err
+ }
+ cmd := exec.Command(binpath, "-d")
+ buf := &bytes.Buffer{}
+ cmd.Stdout = buf
+
+ if err := cmd.Run(); err != nil {
+ return err
+ }
+
+ msgs := irisDump{}
+ if err := json.NewDecoder(buf).Decode(&msgs); err != nil {
+ return err
+ }
+
+ b.messages, err = msgs.Articles()
+ if err != nil {
+ return err
+ }
+ group.High = int64(len(b.messages))
+ group.Count = int64(len(b.messages))
+ return nil
+}
+
+func copyArticle(article *nntp.Article) *nntp.Article {
+ out := *article
+ out.Body = bytes.NewBuffer(article.Body.(*bytes.Buffer).Bytes())
+ return &out
+}
+
+type irisMsg struct {
+ Hash string `json:"hash"`
+ EditHash *string `json:"edit_hash"`
+ IsDeleted *bool `json:"is_deleted"`
+ Data struct {
+ Author string `json:"author"`
+ Parent *string `json:"parent"`
+ Timestamp string `json:"timestamp"`
+ Message string `json:"message"`
+ } `json:"data"`
+}
+
+func (m irisMsg) calcHash() (string, error) {
+ /*
+ Careful coding here to match ruby's hash calculation:
+ ```
+ Base64.encode64(Digest::SHA1.digest(m["data"].to_json))
+ ```
+
+ * have to use an encoder rather than json.Marshal so we can
+ turn off the default HTML escaping (ruby doesn't do this)
+ * strip trailing newline from JSON encoding output
+ * add a trailing newline to base64 encoded form
+ */
+
+ b := &bytes.Buffer{}
+ enc := json.NewEncoder(b)
+ enc.SetEscapeHTML(false)
+ if err := enc.Encode(m.Data); err != nil {
+ return "", err
+ }
+
+ arr := sha1.Sum(bytes.TrimSuffix(b.Bytes(), []byte("\n")))
+ s := base64.StdEncoding.EncodeToString(arr[:])
+ if !strings.HasSuffix(s, "\n") {
+ s += "\n"
+ }
+ return s, nil
+}
+
+func msgToIris(article *nntp.Article) (*irisMsg, error) {
+ postTime := time.Now().UTC().Format(time.RFC3339)
+
+ body, err := io.ReadAll(article.Body)
+ if err != nil {
+ return nil, err
+ }
+
+ var msg irisMsg
+ msg.Data.Author = irisAuthor(article.Header.Get("From"))
+ msg.Data.Timestamp = postTime
+ msg.Data.Message = string(body)
+ refs := article.Header.Get("References")
+ if refs != "" {
+ spl := strings.SplitN(refs, " ", 2)
+ ref := fromMsgID(spl[0])
+ msg.Data.Parent = &ref
+ }
+
+ hash, err := msg.calcHash()
+ if err != nil {
+ return nil, err
+ }
+ msg.Hash = hash
+
+ return &msg, nil
+}
+
+func irisAuthor(nntpAuthor string) string {
+ addr, err := mail.ParseAddress(nntpAuthor)
+ if err != nil {
+ return nntpAuthor
+ }
+
+ return addr.Address
+}
+
+type irisDump []irisMsg
+
+func (dump irisDump) Articles() ([]*nntp.Article, error) {
+ // calculate the article replacements due to edits
+ //
+ // note: this is only a single "hop", and because there can be edits-of-edits
+ // and edits-of-edits-of-edits, we must actually resolve replacements with a loop.
+ //
+ // we need to keep all the hops though because there could have been replies to
+ // the original or to any intermediate edits.
+ replacements := make(map[string]string)
+ for _, msg := range dump {
+ if msg.EditHash != nil {
+ replacements[*msg.EditHash] = msg.Hash
+ }
+ }
+
+ articles := make([]*nntp.Article, 0, len(dump)-len(replacements))
+
+ // index iris hashes -> nntp Articles for reference lookups
+ idx := make(map[string]*nntp.Article)
+
+ sort.SliceStable(dump, func(i, j int) bool {
+ return dump[i].Data.Timestamp < dump[j].Data.Timestamp
+ })
+
+outer:
+ for _, msg := range dump {
+ if _, ok := replacements[msg.Hash]; ok {
+ continue
+ }
+ if msg.EditHash != nil && *msg.EditHash == msg.Hash {
+ continue
+ }
+
+ msgID := msgIDFor(&msg)
+ ts, err := time.Parse(time.RFC3339, msg.Data.Timestamp)
+ if err != nil {
+ return nil, err
+ }
+
+ article := &nntp.Article{
+ Header: textproto.MIMEHeader{
+ "Message-Id": []string{msgID},
+ "From": []string{msg.Data.Author},
+ "Newsgroups": []string{group.Name},
+ "Date": []string{ts.Format(time.RFC1123Z)},
+ },
+ }
+
+ if msg.IsDeleted != nil && *msg.IsDeleted {
+ article.Header.Set("Subject", "**TOPIC DELETED BY AUTHOR**")
+ article.Body = &bytes.Buffer{}
+ article.Bytes = 0
+ article.Lines = 0
+ } else {
+ article.Body = bytes.NewBufferString(msg.Data.Message)
+ article.Bytes = len(msg.Data.Message)
+ article.Lines = strings.Count(msg.Data.Message, "\n")
+
+ if msg.Data.Parent == nil {
+ article.Header.Set("Subject", strings.SplitN(msg.Data.Message, "\n", 2)[0])
+ } else {
+ parentHash := *msg.Data.Parent
+ for {
+ if p, ok := replacements[parentHash]; ok {
+ if parentHash == p {
+ continue outer
+ }
+ parentHash = p
+ } else {
+ break
+ }
+ }
+ msg.Data.Parent = &parentHash
+ parent, ok := idx[parentHash]
+ if !ok {
+ continue
+ }
+ parentSubj := strings.TrimPrefix(parent.Header.Get("Subject"), "Re: ")
+ article.Header.Set("Subject", "Re: "+parentSubj)
+ }
+ }
+
+ if msg.Data.Parent != nil {
+ parent := idx[*msg.Data.Parent]
+ if parent == nil {
+ continue
+ }
+ parentRefs := parent.Header.Get("References")
+ if parentRefs != "" {
+ article.Header.Set("References", parentRefs)
+ } else {
+ article.Header.Set("References", parent.MessageID())
+ }
+ }
+
+ articles = append(articles, article)
+ idx[msg.Hash] = article
+ }
+
+ return articles, nil
+}
+
+func (id irisDump) MarshalJSON() ([]byte, error) {
+ buf := &bytes.Buffer{}
+ enc := json.NewEncoder(buf)
+ enc.SetEscapeHTML(false)
+
+ out := bytes.NewBufferString("[\n ")
+
+ for i, msg := range id {
+ if err := enc.Encode(msg); err != nil {
+ return nil, err
+ }
+
+ if i > 0 {
+ _, _ = out.WriteString(",\n ")
+ }
+ _, _ = out.Write(bytes.TrimSuffix(buf.Bytes(), []byte("\n")))
+ buf.Reset()
+ }
+ _, _ = out.WriteString("\n]")
+
+ return out.Bytes(), nil
+}
+
+func msgIDFor(msg *irisMsg) string {
+ return fmt.Sprintf("<%s.%s>",
+ strings.TrimSuffix(msg.Hash, "=\n"),
+ msg.Data.Author,
+ )
+}
+
+func fromMsgID(nntpID string) string {
+ hash, _, _ := strings.Cut(strings.TrimSuffix(strings.TrimPrefix(nntpID, "<"), ">"), ".")
+ return hash + "=\n"
+}
+
+func appendMessage(msg *irisMsg) error {
+ home, err := os.UserHomeDir()
+ if err != nil {
+ return err
+ }
+
+ msgFile, err := os.Open(path.Join(home, msgfile))
+ if err != nil {
+ return err
+ }
+
+ var msgs irisDump
+ if err := json.NewDecoder(msgFile).Decode(&msgs); err != nil {
+ _ = msgFile.Close()
+ return err
+ }
+ _ = msgFile.Close()
+ msgs = append(msgs, *msg)
+
+ msgFile, err = os.Create(path.Join(home, msgfile))
+ if err != nil {
+ return err
+ }
+ defer func() { _ = msgFile.Close() }()
+
+ out, err := msgs.MarshalJSON()
+ if err != nil {
+ return err
+ }
+ _, err = msgFile.Write(out)
+ return err
+}