2024-01-19 03:33:37 +00:00
|
|
|
package rss
|
|
|
|
|
|
|
|
import (
|
|
|
|
"fmt"
|
|
|
|
"io"
|
|
|
|
"net/http"
|
|
|
|
"strings"
|
2024-01-19 04:28:30 +00:00
|
|
|
"time"
|
2024-01-19 03:33:37 +00:00
|
|
|
|
|
|
|
"golang.org/x/net/html"
|
|
|
|
)
|
2024-01-21 06:37:00 +00:00
|
|
|
const feedfmt = `<?xml version="1.0" encoding="utf-8"?>
|
|
|
|
<rss version="2.0">
|
|
|
|
<channel>
|
|
|
|
<title>%s</title>
|
|
|
|
<link>%s</link>
|
|
|
|
<description>%s</description>
|
|
|
|
%s
|
|
|
|
</channel>
|
|
|
|
</rss>
|
|
|
|
`
|
|
|
|
|
|
|
|
const itemfmt = `<item>
|
|
|
|
<title>Content Title</title>
|
|
|
|
<link>%s</link>
|
|
|
|
<guid>%s</guid>
|
|
|
|
<pubDate>%s</pubDate>
|
|
|
|
<description><![CDATA[%s]]></description>
|
|
|
|
</item>`;
|
2024-01-19 03:33:37 +00:00
|
|
|
|
|
|
|
func fetchPage(url string) (string, error) {
|
|
|
|
resp, err := http.Get(url)
|
|
|
|
if err != nil {
|
|
|
|
return "", fmt.Errorf("Error sending Get request: %w", err)
|
|
|
|
}
|
|
|
|
defer resp.Body.Close()
|
|
|
|
body, err := io.ReadAll(resp.Body)
|
|
|
|
|
|
|
|
return string(body), nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func parseTime(timestr string) (*time.Time, error) {
|
|
|
|
var formats = []string {
|
|
|
|
time.ANSIC,
|
|
|
|
time.UnixDate,
|
|
|
|
time.RubyDate,
|
|
|
|
time.RFC822,
|
|
|
|
time.RFC822Z,
|
|
|
|
time.RFC850,
|
|
|
|
time.RFC1123,
|
|
|
|
time.RFC1123Z,
|
|
|
|
time.RFC3339,
|
|
|
|
time.RFC3339Nano,
|
|
|
|
time.DateTime,
|
|
|
|
time.DateOnly,
|
|
|
|
}
|
|
|
|
for _, f := range formats {
|
|
|
|
pagetime, err := time.Parse(f, timestr)
|
|
|
|
if err == nil {
|
|
|
|
return &pagetime, nil
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil, fmt.Errorf("Error parsing time: invalid format")
|
|
|
|
}
|
|
|
|
|
|
|
|
// parseArticle returns an error if it could not parse the HTML or if it could not parse a time
|
|
|
|
// if a time could not be parsed, the parsed html article will still be returned
|
|
|
|
func parseArticle(content string) (string, *time.Time, error) {
|
|
|
|
doc, err := html.Parse(strings.NewReader(content))
|
|
|
|
if err != nil {
|
|
|
|
return "", nil, fmt.Errorf("Error parsing HTML: %w", err)
|
|
|
|
}
|
|
|
|
var f func(*html.Node, string)
|
|
|
|
var element *html.Node
|
|
|
|
var pagetime *time.Time
|
|
|
|
f = func(n *html.Node, tag string) {
|
|
|
|
if n.Type == html.ElementNode && n.Data == tag {
|
|
|
|
element = n
|
|
|
|
return
|
|
|
|
}
|
|
|
|
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
|
|
|
f(c, tag)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
f(doc, "article")
|
|
|
|
var builder strings.Builder
|
|
|
|
html.Render(&builder, element)
|
|
|
|
|
|
|
|
f(element, "time")
|
|
|
|
for _, d := range element.Attr {
|
|
|
|
if d.Key == "datetime" {
|
|
|
|
pagetime, err = parseTime(d.Val)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-01-21 06:37:00 +00:00
|
|
|
return builder.String(), pagetime, nil
|
2024-01-19 03:33:37 +00:00
|
|
|
}
|
|
|
|
|
2024-01-19 04:28:30 +00:00
|
|
|
func GenerateRss(siteUrl, siteTitle, siteDesc string, pageUrls ...string) (string, error) {
|
|
|
|
var items strings.Builder
|
|
|
|
var err error
|
|
|
|
|
|
|
|
for _, u := range pageUrls {
|
|
|
|
page, err := fetchPage(u)
|
|
|
|
if err != nil {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
article, atime, err := parseArticle(page)
|
|
|
|
if err != nil && article == "" {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
if atime != nil {
|
|
|
|
items.WriteString(fmt.Sprintf(itemfmt, u, u, atime.Format("Mon, 2 Jan 2006 15:04:05 MST"), article))
|
|
|
|
} else {
|
|
|
|
items.WriteString(fmt.Sprintf(itemfmt, u, u, time.Now().Format("Mon, 2 Jan 2006 15:04:05 MST"), article))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-01-21 06:37:00 +00:00
|
|
|
return fmt.Sprintf(feedfmt, siteTitle, siteUrl, siteDesc, items.String()), err
|
2024-01-19 03:33:37 +00:00
|
|
|
}
|