2020-10-08 15:33:26 -04:00
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
|
|
|
"bufio"
|
|
|
|
"fmt"
|
|
|
|
"log"
|
|
|
|
"os"
|
|
|
|
"time"
|
|
|
|
|
2023-08-07 14:20:29 -04:00
|
|
|
"regexp"
|
|
|
|
|
2020-10-08 15:33:26 -04:00
|
|
|
"github.com/mmcdole/gofeed"
|
|
|
|
)
|
|
|
|
|
|
|
|
func init() {
|
|
|
|
go feedDaemon()
|
|
|
|
log.Println("Feed daemon started.")
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
func forwardLastFeed(url string) {
|
|
|
|
|
|
|
|
fp := gofeed.NewParser()
|
|
|
|
feed, err := fp.ParseURL(url)
|
|
|
|
if err != nil {
|
|
|
|
log.Println(err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
var b *gofeed.Item
|
2023-07-14 18:25:03 -04:00
|
|
|
var postAge time.Time
|
2020-10-08 15:33:26 -04:00
|
|
|
|
|
|
|
if len(feed.Items) > 0 {
|
|
|
|
b = feed.Items[0]
|
|
|
|
log.Printf("%d - %s %s\n", len(feed.Items), "Feeds from: ", feed.Title)
|
|
|
|
} else {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-07-14 18:25:03 -04:00
|
|
|
if b.PublishedParsed != nil {
|
|
|
|
postAge = *b.PublishedParsed
|
2023-07-14 18:37:54 -04:00
|
|
|
} else if b.UpdatedParsed != nil {
|
|
|
|
log.Println("Ops, no Published in the feed. Using Update Date")
|
2023-07-14 18:25:03 -04:00
|
|
|
postAge = *b.UpdatedParsed
|
2023-07-14 18:37:54 -04:00
|
|
|
} else {
|
|
|
|
log.Println("No Published date or update date. This feed is crap, giving up")
|
2023-07-14 19:06:56 -04:00
|
|
|
postAge = time.Now().Add(Zint + (1 * time.Hour))
|
2023-07-14 18:25:03 -04:00
|
|
|
}
|
|
|
|
|
2023-07-14 19:06:56 -04:00
|
|
|
log.Println("Last Post Age: ", postAge)
|
2023-07-14 18:25:03 -04:00
|
|
|
|
|
|
|
if time.Since(postAge) < Zint {
|
2020-10-08 15:33:26 -04:00
|
|
|
|
2023-08-07 14:20:29 -04:00
|
|
|
TheTitle := fmt.Sprintf("[News from %s ]", html2text(feed.Title))
|
|
|
|
TheBody := fmt.Sprintf("%s\n\n %s\n\n %s\n\n%s\n ", html2text(b.Author.Name), html2text(b.Title), html2text(b.Description), b.Link)
|
2023-07-14 20:04:50 -04:00
|
|
|
|
|
|
|
postOnMastodon(TheBody, TheTitle)
|
2023-07-14 19:06:56 -04:00
|
|
|
log.Println("New content from: ", feed.Title, b.Title, feed.Description)
|
2020-10-08 15:33:26 -04:00
|
|
|
} else {
|
2023-07-14 19:06:56 -04:00
|
|
|
log.Println("No new content from: ", feed.Title, feed.Description)
|
2020-10-08 15:33:26 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2023-07-14 18:25:03 -04:00
|
|
|
// but we need to iterate on feeds....
|
2020-10-08 15:33:26 -04:00
|
|
|
func feedDaemon() {
|
|
|
|
|
|
|
|
ticker := time.NewTicker(Zint)
|
|
|
|
|
|
|
|
defer ticker.Stop()
|
|
|
|
scanFeeds(fileByLines("feeds.conf"))
|
|
|
|
log.Println("RSS poll done. Next in ", Zint.String())
|
|
|
|
|
|
|
|
for range ticker.C {
|
|
|
|
scanFeeds(fileByLines("feeds.conf"))
|
|
|
|
log.Println("RSS poll done. Next in ", Zint.String())
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
func scanFeeds(fiids []string) {
|
|
|
|
|
|
|
|
for _, a := range fiids {
|
|
|
|
forwardLastFeed(a)
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
func fileByLines(filename string) (blurls []string) {
|
|
|
|
|
|
|
|
file, err := os.Open(filename)
|
|
|
|
if err != nil {
|
|
|
|
fmt.Println(err.Error())
|
|
|
|
}
|
|
|
|
defer file.Close()
|
|
|
|
|
|
|
|
scanner := bufio.NewScanner(file)
|
|
|
|
for scanner.Scan() {
|
|
|
|
d := scanner.Text()
|
|
|
|
blurls = append(blurls, d)
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := scanner.Err(); err != nil {
|
|
|
|
fmt.Println(err.Error())
|
|
|
|
}
|
|
|
|
|
|
|
|
return
|
|
|
|
|
|
|
|
}
|
2023-08-07 14:20:29 -04:00
|
|
|
|
|
|
|
func html2text(html string) string {
|
|
|
|
|
|
|
|
re := regexp.MustCompile(`<[^>]*>`)
|
|
|
|
|
|
|
|
return re.ReplaceAllString(html, "")
|
|
|
|
|
|
|
|
}
|