zorg/feed.go

120 lines
2.2 KiB
Go

package main
import (
"bufio"
"fmt"
"log"
"os"
"time"
"regexp"
"github.com/mmcdole/gofeed"
)
func init() {
go feedDaemon()
log.Println("Feed daemon started.")
}
func forwardLastFeed(url string) {
fp := gofeed.NewParser()
feed, err := fp.ParseURL(url)
if err != nil {
log.Println(err)
return
}
var b *gofeed.Item
var postAge time.Time
if len(feed.Items) > 0 {
b = feed.Items[0]
log.Printf("%d - %s %s\n", len(feed.Items), "Feeds from: ", feed.Title)
} else {
return
}
if b.PublishedParsed != nil {
postAge = *b.PublishedParsed
} else if b.UpdatedParsed != nil {
log.Println("Ops, no Published in the feed. Using Update Date")
postAge = *b.UpdatedParsed
} else {
log.Println("No Published date or update date. This feed is crap, giving up")
postAge = time.Now().Add(Zint + (1 * time.Hour))
}
log.Println("Last Post Age: ", postAge)
if time.Since(postAge) < Zint {
TheTitle := fmt.Sprintf("[News from %s ]", html2text(feed.Title))
TheBody := fmt.Sprintf("%s\n\n %s\n\n %s\n\n%s\n ", html2text(b.Author.Name), html2text(b.Title), html2text(b.Description), b.Link)
postOnMastodon(TheBody, TheTitle)
log.Println("New content from: ", feed.Title, b.Title, feed.Description)
} else {
log.Println("No new content from: ", feed.Title, feed.Description)
}
}
// but we need to iterate on feeds....
func feedDaemon() {
ticker := time.NewTicker(Zint)
defer ticker.Stop()
scanFeeds(fileByLines("feeds.conf"))
log.Println("RSS poll done. Next in ", Zint.String())
for range ticker.C {
scanFeeds(fileByLines("feeds.conf"))
log.Println("RSS poll done. Next in ", Zint.String())
}
}
func scanFeeds(fiids []string) {
for _, a := range fiids {
forwardLastFeed(a)
}
}
func fileByLines(filename string) (blurls []string) {
file, err := os.Open(filename)
if err != nil {
fmt.Println(err.Error())
}
defer file.Close()
scanner := bufio.NewScanner(file)
for scanner.Scan() {
d := scanner.Text()
blurls = append(blurls, d)
}
if err := scanner.Err(); err != nil {
fmt.Println(err.Error())
}
return
}
func html2text(html string) string {
re := regexp.MustCompile(`<[^>]*>`)
return re.ReplaceAllString(html, "")
}