2019-01-10 16:39:42 +00:00
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
|
|
|
"C"
|
|
|
|
"errors"
|
|
|
|
"fmt"
|
|
|
|
"net/http"
|
|
|
|
"time"
|
|
|
|
|
|
|
|
"github.com/PuerkitoBio/goquery"
|
|
|
|
)
|
|
|
|
|
2019-01-13 14:22:29 +00:00
|
|
|
func GetSourceName() string {
|
|
|
|
return "xkcd"
|
|
|
|
}
|
|
|
|
|
|
|
|
func GetFeedList() []map[string]string {
|
|
|
|
var ret []map[string]string
|
|
|
|
feedMap := make(map[string]string)
|
|
|
|
feedMap["slug"] = "xkcd"
|
|
|
|
feedMap["name"] = "XKCD"
|
|
|
|
feedMap["author"] = "Randall Munroe"
|
|
|
|
feedMap["source"] = GetSourceName()
|
|
|
|
ret = append(ret, feedMap)
|
2019-01-10 16:39:42 +00:00
|
|
|
return ret
|
|
|
|
}
|
|
|
|
|
2019-01-13 14:22:29 +00:00
|
|
|
/*
|
2019-01-10 16:39:42 +00:00
|
|
|
func getRssItem(slug string) (string, error) {
|
|
|
|
desc, err := getFeedDesc(time.Now())
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
feed, err := m.GetFeed(SRC_XKCD, slug)
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
desc = "<![CDATA[" + desc + "]]>"
|
|
|
|
ret := " <item>\n"
|
|
|
|
ret += " <title>" + feed.Name + "</title>\n"
|
|
|
|
ret += " <pubDate>" + feed.LastUpdate.Format(time.RFC1123Z) + "</pubDate>\n"
|
|
|
|
ret += " <guid>xkcd;" + slug + ";" + feed.LastUpdate.Format(time.RFC1123Z) + "</guid>\n"
|
|
|
|
ret += " <link>" + getUrl(time.Now()) + "</link>\n"
|
|
|
|
ret += " <description>" + desc + "</description>\n"
|
|
|
|
ret += " </item>\n"
|
|
|
|
return ret, nil
|
|
|
|
}
|
2019-01-13 14:22:29 +00:00
|
|
|
*/
|
2019-01-10 16:39:42 +00:00
|
|
|
|
2019-01-13 14:22:29 +00:00
|
|
|
func GetFeedUrl(slug string, dt time.Time) (string, error) {
|
2019-01-10 16:39:42 +00:00
|
|
|
var isComicDay = func(dt time.Time) bool {
|
|
|
|
return dt.Weekday() == time.Monday || dt.Weekday() == time.Wednesday || dt.Weekday() == time.Friday
|
|
|
|
}
|
|
|
|
if !isComicDay(dt) {
|
|
|
|
return "", errors.New("No URL for the given day")
|
|
|
|
}
|
|
|
|
var num int
|
|
|
|
wrkDate := time.Date(2005, time.August, 19, 0, 0, 0, 0, time.UTC)
|
|
|
|
for wrkDate.Before(dt) {
|
|
|
|
if isComicDay(wrkDate) {
|
|
|
|
num++
|
|
|
|
}
|
|
|
|
wrkDate = wrkDate.Add(time.Hour * 24)
|
|
|
|
}
|
|
|
|
return fmt.Sprintf("https://xkcd.com/%d", num), nil
|
|
|
|
}
|
|
|
|
|
2019-01-13 14:22:29 +00:00
|
|
|
func GetFeedDesc(slug string, dt time.Time) (string, error) {
|
|
|
|
var url string
|
|
|
|
var err error
|
|
|
|
if url, err = GetFeedUrl(slug, dt); err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
res, err := http.Get(url)
|
2019-01-10 16:39:42 +00:00
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
defer res.Body.Close()
|
|
|
|
if res.StatusCode != 200 {
|
|
|
|
return "", errors.New(fmt.Sprintf("Status code error: %d %s", res.StatusCode, res.Status))
|
|
|
|
}
|
|
|
|
|
|
|
|
// Load the HTML document
|
|
|
|
doc, err := goquery.NewDocumentFromReader(res.Body)
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
// Find the Picture
|
|
|
|
sel := doc.Find("div#comic>img")
|
|
|
|
src, exists := sel.Attr("src")
|
|
|
|
if !exists {
|
|
|
|
return "", errors.New("Couldn't find image source")
|
|
|
|
}
|
|
|
|
src = "https:" + src
|
|
|
|
title, exists := sel.Attr("title")
|
|
|
|
if !exists {
|
|
|
|
title = ""
|
|
|
|
}
|
|
|
|
return "<img src=\"" + src + "\" /><p>" + title + "</p>", nil
|
|
|
|
}
|