Made Feed.CanUpdate() public. This returns true if all cache timeout values have expired and a fresh remote update can be performed. Fixed bug where repeated calls to feed.Fetch() creates duplicate Channels instead of generating a fresh list. Did some minor code householding to replace manual slice appends with the builtin append() function.
This commit is contained in:
parent
24864b01f1
commit
eb15b6a3ef
17
src/atom.go
17
src/atom.go
|
@ -29,16 +29,14 @@ func (this *Feed) readAtom(doc *xmlx.Document) (err os.Error) {
|
|||
ch.SubTitle.Text = tn.Value
|
||||
}
|
||||
|
||||
tn = node.SelectNode(ns, "generator")
|
||||
if tn != nil {
|
||||
if tn = node.SelectNode(ns, "generator"); tn != nil {
|
||||
ch.Generator = Generator{}
|
||||
ch.Generator.Uri = tn.GetAttr("", "uri")
|
||||
ch.Generator.Version = tn.GetAttr("", "version")
|
||||
ch.Generator.Text = tn.Value
|
||||
}
|
||||
|
||||
tn = node.SelectNode(ns, "author")
|
||||
if tn != nil {
|
||||
if tn = node.SelectNode(ns, "author"); tn != nil {
|
||||
ch.Author = Author{}
|
||||
ch.Author.Name = tn.GetValue("", "name")
|
||||
ch.Author.Uri = tn.GetValue("", "uri")
|
||||
|
@ -61,14 +59,14 @@ func (this *Feed) readAtom(doc *xmlx.Document) (err os.Error) {
|
|||
enc := Enclosure{}
|
||||
enc.Url = lv.GetAttr("", "href")
|
||||
enc.Type = lv.GetAttr("", "type")
|
||||
item.addEnclosure(enc)
|
||||
item.Enclosures = append(item.Enclosures, enc)
|
||||
} else {
|
||||
lnk := Link{}
|
||||
lnk.Href = lv.GetAttr("", "href")
|
||||
lnk.Rel = lv.GetAttr("", "rel")
|
||||
lnk.Type = lv.GetAttr("", "type")
|
||||
lnk.HrefLang = lv.GetAttr("", "hreflang")
|
||||
item.addLink(lnk)
|
||||
item.Links = append(item.Links, lnk)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -78,18 +76,17 @@ func (this *Feed) readAtom(doc *xmlx.Document) (err os.Error) {
|
|||
item.Contributors[ci] = cv.GetValue("", "name")
|
||||
}
|
||||
|
||||
tn = v.SelectNode(ns, "content")
|
||||
if tn != nil {
|
||||
if tn = v.SelectNode(ns, "content"); tn != nil {
|
||||
item.Content = Content{}
|
||||
item.Content.Type = tn.GetAttr("", "type")
|
||||
item.Content.Lang = tn.GetValue("xml", "lang")
|
||||
item.Content.Base = tn.GetValue("xml", "base")
|
||||
item.Content.Text = tn.Value
|
||||
}
|
||||
ch.addItem(item)
|
||||
ch.Items = append(ch.Items, item)
|
||||
}
|
||||
|
||||
this.addChannel(ch)
|
||||
this.Channels = append(this.Channels, ch)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
|
|
@ -28,17 +28,3 @@ type Channel struct {
|
|||
Author Author
|
||||
SubTitle SubTitle
|
||||
}
|
||||
|
||||
func (this *Channel) addItem(item Item) {
|
||||
c := make([]Item, len(this.Items)+1)
|
||||
copy(c, this.Items)
|
||||
c[len(c)-1] = item
|
||||
this.Items = c
|
||||
}
|
||||
|
||||
func (this *Channel) addLink(l Link) {
|
||||
c := make([]Link, len(this.Links)+1)
|
||||
copy(c, this.Links)
|
||||
c[len(c)-1] = l
|
||||
this.Links = c
|
||||
}
|
||||
|
|
46
src/feed.go
46
src/feed.go
|
@ -26,13 +26,11 @@
|
|||
package feeder
|
||||
|
||||
import "os"
|
||||
import "http"
|
||||
import "time"
|
||||
import "xmlx"
|
||||
import "fmt"
|
||||
import "strconv"
|
||||
import "strings"
|
||||
import "io/ioutil"
|
||||
|
||||
type Feed struct {
|
||||
// Custom cache timeout in minutes.
|
||||
|
@ -60,46 +58,25 @@ type Feed struct {
|
|||
}
|
||||
|
||||
func New(cachetimeout int, enforcecachelimit bool) *Feed {
|
||||
return &Feed{
|
||||
CacheTimeout: cachetimeout,
|
||||
EnforceCacheLimit: enforcecachelimit,
|
||||
Type: "none",
|
||||
Version: [2]int{0, 0},
|
||||
Channels: make([]Channel, 0),
|
||||
}
|
||||
}
|
||||
|
||||
func (this *Feed) addChannel(ch Channel) {
|
||||
c := make([]Channel, len(this.Channels)+1)
|
||||
copy(c, this.Channels)
|
||||
c[len(c)-1] = ch
|
||||
this.Channels = c
|
||||
v := new(Feed)
|
||||
v.CacheTimeout = cachetimeout
|
||||
v.EnforceCacheLimit = enforcecachelimit
|
||||
v.Type = "none"
|
||||
return v
|
||||
}
|
||||
|
||||
func (this *Feed) Fetch(uri string) (err os.Error) {
|
||||
if !this.canUpdate() {
|
||||
return
|
||||
}
|
||||
|
||||
// Fetch data from remote location.
|
||||
r, _, err := http.Get(uri)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
defer r.Body.Close()
|
||||
|
||||
var b []byte
|
||||
if b, err = ioutil.ReadAll(r.Body); err != nil {
|
||||
if !this.CanUpdate() {
|
||||
return
|
||||
}
|
||||
|
||||
this.Url = uri
|
||||
this.Channels = nil
|
||||
|
||||
// Extract type and version of the feed so we can have the appropriate
|
||||
// function parse it (rss 0.91, rss 0.92, rss 2, atom etc).
|
||||
doc := xmlx.New()
|
||||
if err = doc.LoadString(string(b)); err != nil {
|
||||
if err = doc.LoadUri(uri); err != nil {
|
||||
return
|
||||
}
|
||||
this.Type, this.Version = this.GetVersionInfo(doc)
|
||||
|
@ -120,7 +97,12 @@ func (this *Feed) Fetch(uri string) (err os.Error) {
|
|||
return
|
||||
}
|
||||
|
||||
func (this *Feed) canUpdate() bool {
|
||||
// This function returns true or false, depending on whether the CacheTimeout
|
||||
// value has expired or not. Additionally, it will ensure that we adhere to the
|
||||
// RSS spec's SkipDays and SkipHours values (if Feed.EnforceCacheLimit is set to
|
||||
// true). If this function returns true, you can be sure that a fresh feed
|
||||
// update will be performed.
|
||||
func (this *Feed) CanUpdate() bool {
|
||||
// Make sure we are not within the specified cache-limit.
|
||||
// This ensures we don't request data too often.
|
||||
utc := time.UTC()
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package feeder
|
||||
|
||||
import "testing"
|
||||
import "os"
|
||||
|
||||
func TestFeed(t *testing.T) {
|
||||
urilist := []string{
|
||||
|
|
14
src/item.go
14
src/item.go
|
@ -19,17 +19,3 @@ type Item struct {
|
|||
Contributors []string
|
||||
Content Content
|
||||
}
|
||||
|
||||
func (this *Item) addEnclosure(e Enclosure) {
|
||||
c := make([]Enclosure, len(this.Enclosures)+1)
|
||||
copy(c, this.Enclosures)
|
||||
c[len(c)-1] = e
|
||||
this.Enclosures = c
|
||||
}
|
||||
|
||||
func (this *Item) addLink(l Link) {
|
||||
c := make([]Link, len(this.Links)+1)
|
||||
copy(c, this.Links)
|
||||
c[len(c)-1] = l
|
||||
this.Links = c
|
||||
}
|
||||
|
|
18
src/rss.go
18
src/rss.go
|
@ -52,8 +52,7 @@ func (this *Feed) readRss2(doc *xmlx.Document) (err os.Error) {
|
|||
ch.SkipDays[i] = mapDay(v.Value)
|
||||
}
|
||||
|
||||
n = node.SelectNode("", "image")
|
||||
if n != nil {
|
||||
if n = node.SelectNode("", "image"); n != nil {
|
||||
ch.Image.Title = n.GetValue("", "title")
|
||||
ch.Image.Url = n.GetValue("", "url")
|
||||
ch.Image.Link = n.GetValue("", "link")
|
||||
|
@ -62,8 +61,7 @@ func (this *Feed) readRss2(doc *xmlx.Document) (err os.Error) {
|
|||
ch.Image.Description = n.GetValue("", "description")
|
||||
}
|
||||
|
||||
n = node.SelectNode("", "cloud")
|
||||
if n != nil {
|
||||
if n = node.SelectNode("", "cloud"); n != nil {
|
||||
ch.Cloud = Cloud{}
|
||||
ch.Cloud.Domain = n.GetAttr("", "domain")
|
||||
ch.Cloud.Port = n.GetAttri("", "port")
|
||||
|
@ -72,8 +70,7 @@ func (this *Feed) readRss2(doc *xmlx.Document) (err os.Error) {
|
|||
ch.Cloud.Protocol = n.GetAttr("", "protocol")
|
||||
}
|
||||
|
||||
n = node.SelectNode("", "textInput")
|
||||
if n != nil {
|
||||
if n = node.SelectNode("", "textInput"); n != nil {
|
||||
ch.TextInput = Input{}
|
||||
ch.TextInput.Title = n.GetValue("", "title")
|
||||
ch.TextInput.Description = n.GetValue("", "description")
|
||||
|
@ -92,11 +89,10 @@ func (this *Feed) readRss2(doc *xmlx.Document) (err os.Error) {
|
|||
for _, v := range list {
|
||||
lnk := Link{}
|
||||
lnk.Href = v.Value
|
||||
i.addLink(lnk)
|
||||
i.Links = append(i.Links, lnk)
|
||||
}
|
||||
|
||||
n = item.SelectNode("", "author")
|
||||
if n != nil {
|
||||
if n = item.SelectNode("", "author"); n != nil {
|
||||
i.Author = Author{}
|
||||
i.Author.Name = n.Value
|
||||
}
|
||||
|
@ -127,10 +123,10 @@ func (this *Feed) readRss2(doc *xmlx.Document) (err os.Error) {
|
|||
i.Source.Text = src.Value
|
||||
}
|
||||
|
||||
ch.addItem(i)
|
||||
ch.Items = append(ch.Items, i)
|
||||
}
|
||||
|
||||
this.addChannel(ch)
|
||||
this.Channels = append(this.Channels, ch)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue