Fixed code according to new Go release requirements.

This commit is contained in:
jim teeuwen 2010-05-23 16:21:30 +02:00
parent 62f2d4170f
commit 49ab919bd7
18 changed files with 331 additions and 321 deletions

2
README
View File

@ -10,7 +10,7 @@
- Atom 1.0 - Atom 1.0
The package allows us to maintain cache timeout management. This prevents us The package allows us to maintain cache timeout management. This prevents us
from querying the servers for feed updates too often and risk ip bams. Apart from querying the servers for feed updates too often and risk ip bans. Apart
from setting a cache timeout manually, the package also optionally adheres to from setting a cache timeout manually, the package also optionally adheres to
the TTL, SkipDays and SkipHours values specied in the feeds themselves. the TTL, SkipDays and SkipHours values specied in the feeds themselves.

View File

@ -4,94 +4,93 @@ import "os"
import "xmlx" import "xmlx"
func (this *Feed) readAtom(doc *xmlx.Document) (err os.Error) { func (this *Feed) readAtom(doc *xmlx.Document) (err os.Error) {
ns := "http://www.w3.org/2005/Atom"; ns := "http://www.w3.org/2005/Atom"
channels := doc.SelectNodes(ns, "feed"); channels := doc.SelectNodes(ns, "feed")
for _, node := range channels { for _, node := range channels {
ch := Channel{}; ch := Channel{}
ch.Title = node.GetValue(ns, "title"); ch.Title = node.GetValue(ns, "title")
ch.LastBuildDate = node.GetValue(ns, "updated"); ch.LastBuildDate = node.GetValue(ns, "updated")
ch.Id = node.GetValue(ns, "id"); ch.Id = node.GetValue(ns, "id")
ch.Rights = node.GetValue(ns, "rights"); ch.Rights = node.GetValue(ns, "rights")
list := node.SelectNodes(ns, "link"); list := node.SelectNodes(ns, "link")
ch.Links = make([]Link, len(list)); ch.Links = make([]Link, len(list))
for i, v := range list { for i, v := range list {
ch.Links[i].Href = v.GetAttr("", "href"); ch.Links[i].Href = v.GetAttr("", "href")
ch.Links[i].Rel = v.GetAttr("", "rel"); ch.Links[i].Rel = v.GetAttr("", "rel")
ch.Links[i].Type = v.GetAttr("", "type"); ch.Links[i].Type = v.GetAttr("", "type")
ch.Links[i].HrefLang = v.GetAttr("", "hreflang"); ch.Links[i].HrefLang = v.GetAttr("", "hreflang")
} }
tn := node.SelectNode(ns, "subtitle"); tn := node.SelectNode(ns, "subtitle")
if tn != nil { if tn != nil {
ch.SubTitle = SubTitle{}; ch.SubTitle = SubTitle{}
ch.SubTitle.Type = tn.GetAttr("", "type"); ch.SubTitle.Type = tn.GetAttr("", "type")
ch.SubTitle.Text = tn.Value; ch.SubTitle.Text = tn.Value
} }
tn = node.SelectNode(ns, "generator"); tn = node.SelectNode(ns, "generator")
if tn != nil { if tn != nil {
ch.Generator = Generator{}; ch.Generator = Generator{}
ch.Generator.Uri = tn.GetAttr("", "uri"); ch.Generator.Uri = tn.GetAttr("", "uri")
ch.Generator.Version = tn.GetAttr("", "version"); ch.Generator.Version = tn.GetAttr("", "version")
ch.Generator.Text = tn.Value; ch.Generator.Text = tn.Value
} }
tn = node.SelectNode(ns, "author"); tn = node.SelectNode(ns, "author")
if tn != nil { if tn != nil {
ch.Author = Author{}; ch.Author = Author{}
ch.Author.Name = tn.GetValue("", "name"); ch.Author.Name = tn.GetValue("", "name")
ch.Author.Uri = tn.GetValue("", "uri"); ch.Author.Uri = tn.GetValue("", "uri")
ch.Author.Email = tn.GetValue("", "email"); ch.Author.Email = tn.GetValue("", "email")
} }
list = node.SelectNodes(ns, "entry"); list = node.SelectNodes(ns, "entry")
ch.Items = make([]Item, len(list)); ch.Items = make([]Item, len(list))
for _, v := range list { for _, v := range list {
item := Item{}; item := Item{}
item.Title = v.GetValue(ns, "title"); item.Title = v.GetValue(ns, "title")
item.Id = v.GetValue(ns, "id"); item.Id = v.GetValue(ns, "id")
item.PubDate = v.GetValue(ns, "updated"); item.PubDate = v.GetValue(ns, "updated")
item.Description = v.GetValue(ns, "summary"); item.Description = v.GetValue(ns, "summary")
list = v.SelectNodes(ns, "link"); list = v.SelectNodes(ns, "link")
item.Links = make([]Link, 0); item.Links = make([]Link, 0)
for _, lv := range list { for _, lv := range list {
if tn.GetAttr(ns, "rel") == "enclosure" { if tn.GetAttr(ns, "rel") == "enclosure" {
enc := Enclosure{}; enc := Enclosure{}
enc.Url = lv.GetAttr("", "href"); enc.Url = lv.GetAttr("", "href")
enc.Type = lv.GetAttr("", "type"); enc.Type = lv.GetAttr("", "type")
item.addEnclosure(enc); item.addEnclosure(enc)
} else { } else {
lnk := Link{}; lnk := Link{}
lnk.Href = lv.GetAttr("", "href"); lnk.Href = lv.GetAttr("", "href")
lnk.Rel = lv.GetAttr("", "rel"); lnk.Rel = lv.GetAttr("", "rel")
lnk.Type = lv.GetAttr("", "type"); lnk.Type = lv.GetAttr("", "type")
lnk.HrefLang = lv.GetAttr("", "hreflang"); lnk.HrefLang = lv.GetAttr("", "hreflang")
item.addLink(lnk); item.addLink(lnk)
} }
} }
list = v.SelectNodes(ns, "contributor"); list = v.SelectNodes(ns, "contributor")
item.Contributors = make([]string, len(list)); item.Contributors = make([]string, len(list))
for ci, cv := range list { for ci, cv := range list {
item.Contributors[ci] = cv.GetValue("", "name"); item.Contributors[ci] = cv.GetValue("", "name")
} }
tn = v.SelectNode(ns, "content"); tn = v.SelectNode(ns, "content")
if tn != nil { if tn != nil {
item.Content = Content{}; item.Content = Content{}
item.Content.Type = tn.GetAttr("", "type"); item.Content.Type = tn.GetAttr("", "type")
item.Content.Lang = tn.GetValue("xml", "lang"); item.Content.Lang = tn.GetValue("xml", "lang")
item.Content.Base = tn.GetValue("xml", "base"); item.Content.Base = tn.GetValue("xml", "base")
item.Content.Text = tn.Value; item.Content.Text = tn.Value
} }
ch.addItem(item); ch.addItem(item)
} }
this.addChannel(ch); this.addChannel(ch)
} }
return return
} }

View File

@ -1,7 +1,7 @@
package feeder package feeder
type Author struct { type Author struct {
Name string; Name string
Uri string; Uri string
Email string; Email string
} }

View File

@ -1,6 +1,6 @@
package feeder package feeder
type Category struct { type Category struct {
Domain string; Domain string
Text string; Text string
} }

View File

@ -1,49 +1,49 @@
package feeder package feeder
type Channel struct { type Channel struct {
Title string; Title string
Links []Link; Links []Link
Description string; Description string
Language string; Language string
Copyright string; Copyright string
ManagingEditor string; ManagingEditor string
WebMaster string; WebMaster string
PubDate string; PubDate string
LastBuildDate string; LastBuildDate string
Docs string; Docs string
Categories []Category; Categories []Category
Generator Generator; Generator Generator
TTL int; TTL int
Rating string; Rating string
SkipHours []int; SkipHours []int
SkipDays []int; SkipDays []int
Image Image; Image Image
Items []Item; Items []Item
Cloud Cloud; Cloud Cloud
TextInput Input; TextInput Input
// Atom fields // Atom fields
Id string; Id string
Rights string; Rights string
Author Author; Author Author
SubTitle SubTitle; SubTitle SubTitle
} }
func (this *Channel) addItem(item Item) { func (this *Channel) addItem(item Item) {
slice := make([]Item, len(this.Items) + 1); slice := make([]Item, len(this.Items)+1)
for i,v := range this.Items { for i, v := range this.Items {
slice[i] = v; slice[i] = v
} }
slice[len(slice) - 1] = item; slice[len(slice)-1] = item
this.Items = slice; this.Items = slice
} }
func (this *Channel) addLink(l Link) { func (this *Channel) addLink(l Link) {
slice := make([]Link, len(this.Links) + 1); slice := make([]Link, len(this.Links)+1)
for i,v := range this.Links { for i, v := range this.Links {
slice[i] = v; slice[i] = v
} }
slice[len(slice) - 1] = l; slice[len(slice)-1] = l
this.Links = slice; this.Links = slice
} }

View File

@ -1,10 +1,9 @@
package feeder package feeder
type Cloud struct { type Cloud struct {
Domain string; Domain string
Port int; Port int
Path string; Path string
RegisterProcedure string; RegisterProcedure string
Protocol string; Protocol string
} }

View File

@ -1,8 +1,8 @@
package feeder package feeder
type Content struct { type Content struct {
Type string; Type string
Lang string; Lang string
Base string; Base string
Text string; Text string
} }

View File

@ -1,8 +1,7 @@
package feeder package feeder
type Enclosure struct { type Enclosure struct {
Url string; Url string
Length int64; Length int64
Type string; Type string
} }

View File

@ -15,7 +15,7 @@
- Atom 1.0 - Atom 1.0
The package allows us to maintain cache timeout management. This prevents us The package allows us to maintain cache timeout management. This prevents us
from querying the servers for feed updates too often and risk ip bams. Appart from querying the servers for feed updates too often and risk ip bams. Appart
from setting a cache timeout manually, the package also optionally adheres to from setting a cache timeout manually, the package also optionally adheres to
the TTL, SkipDays and SkipHours values specied in the feeds themselves. the TTL, SkipDays and SkipHours values specied in the feeds themselves.
@ -31,100 +31,106 @@ package feeder
import "os" import "os"
import "http" import "http"
import "io"
import "time" import "time"
import "xmlx" import "xmlx"
import "fmt" import "fmt"
import "strconv" import "strconv"
import "strings" import "strings"
import "io/ioutil"
type Feed struct { type Feed struct {
// Custom cache timeout in minutes. // Custom cache timeout in minutes.
CacheTimeout int; CacheTimeout int
// Make sure we adhere to the cache timeout specified in the feed. If // Make sure we adhere to the cache timeout specified in the feed. If
// our CacheTimeout is higher than that, we will use that instead. // our CacheTimeout is higher than that, we will use that instead.
EnforceCacheLimit bool; EnforceCacheLimit bool
// Type of feed. Rss, Atom, etc // Type of feed. Rss, Atom, etc
Type string; Type string
// Version of the feed. Major and Minor. // Version of the feed. Major and Minor.
Version [2]int; Version [2]int
// Channels with content. // Channels with content.
Channels []Channel; Channels []Channel
// Url from which this feed was created. // Url from which this feed was created.
Url string; Url string
// Last time content was fetched. Used in conjunction with CacheTimeout // Last time content was fetched. Used in conjunction with CacheTimeout
// to ensure we don't get content too often. // to ensure we don't get content too often.
lastupdate int64; lastupdate int64
} }
func New(cachetimeout int, enforcecachelimit bool) *Feed { func New(cachetimeout int, enforcecachelimit bool) *Feed {
return &Feed{ return &Feed{
CacheTimeout: cachetimeout, CacheTimeout: cachetimeout,
EnforceCacheLimit: enforcecachelimit, EnforceCacheLimit: enforcecachelimit,
Type: "none", Type: "none",
Version: [2]int{0, 0}, Version: [2]int{0, 0},
Channels: make([]Channel, 0), Channels: make([]Channel, 0),
} }
} }
func (this *Feed) addChannel(ch Channel) { func (this *Feed) addChannel(ch Channel) {
slice := make([]Channel, len(this.Channels) + 1); slice := make([]Channel, len(this.Channels)+1)
for i,v := range this.Channels { for i, v := range this.Channels {
slice[i] = v; slice[i] = v
} }
slice[len(slice) - 1] = ch; slice[len(slice)-1] = ch
this.Channels = slice; this.Channels = slice
} }
func (this *Feed) Fetch(uri string) (err os.Error) { func (this *Feed) Fetch(uri string) (err os.Error) {
if !this.canUpdate() { return } if !this.canUpdate() {
return
}
// Fetch data from remote location. // Fetch data from remote location.
r, _, err := http.Get(uri); r, _, err := http.Get(uri)
if err != nil { return } if err != nil {
return
}
defer r.Body.Close(); defer r.Body.Close()
b, err := io.ReadAll(r.Body); var b []byte
if err != nil { return } if b, err = ioutil.ReadAll(r.Body); err != nil {
content := string(b); return
}
this.Url = uri; this.Url = uri
// Extract type and version of the feed so we can have the appropriate // Extract type and version of the feed so we can have the appropriate
// function parse it (rss 0.91, rss 0.92, rss 2, atom etc). // function parse it (rss 0.91, rss 0.92, rss 2, atom etc).
doc := xmlx.New(); doc := xmlx.New()
err = doc.LoadString(content); if err = doc.LoadString(string(b)); err != nil {
if err != nil { return } return
this.Type, this.Version = this.GetVersionInfo(doc); }
this.Type, this.Version = this.GetVersionInfo(doc)
ok := this.testVersions(); if ok := this.testVersions(); !ok {
if !ok { err = os.NewError(fmt.Sprintf("Unsupported feed: %s, version: %+v", this.Type, this.Version))
err = os.NewError(fmt.Sprintf("Unsupported feed: %s, version: %+v", this.Type, this.Version)); return
return;
} }
err = this.buildFeed(doc); if err = this.buildFeed(doc); err != nil || len(this.Channels) == 0 {
if err != nil || len(this.Channels) == 0 { return } return
}
// reset cache timeout values according to feed specified values (TTL) // reset cache timeout values according to feed specified values (TTL)
if this.EnforceCacheLimit && this.CacheTimeout < this.Channels[0].TTL { if this.EnforceCacheLimit && this.CacheTimeout < this.Channels[0].TTL {
this.CacheTimeout = this.Channels[0].TTL; this.CacheTimeout = this.Channels[0].TTL
} }
return; return
} }
func (this *Feed) canUpdate() bool { func (this *Feed) canUpdate() bool {
// Make sure we are not within the specified cache-limit. // Make sure we are not within the specified cache-limit.
// This ensures we don't request data too often. // This ensures we don't request data too often.
utc := time.UTC(); utc := time.UTC()
if utc.Seconds() - this.lastupdate < int64(this.CacheTimeout * 60) { if utc.Seconds()-this.lastupdate < int64(this.CacheTimeout*60) {
return false return false
} }
@ -132,7 +138,7 @@ func (this *Feed) canUpdate() bool {
// we can update. // we can update.
if len(this.Channels) == 0 && this.Type == "rss" { if len(this.Channels) == 0 && this.Type == "rss" {
if this.EnforceCacheLimit && len(this.Channels[0].SkipDays) > 0 { if this.EnforceCacheLimit && len(this.Channels[0].SkipDays) > 0 {
for _ ,v := range this.Channels[0].SkipDays { for _, v := range this.Channels[0].SkipDays {
if v == utc.Weekday { if v == utc.Weekday {
return false return false
} }
@ -140,7 +146,7 @@ func (this *Feed) canUpdate() bool {
} }
if this.EnforceCacheLimit && len(this.Channels[0].SkipHours) > 0 { if this.EnforceCacheLimit && len(this.Channels[0].SkipHours) > 0 {
for _ ,v := range this.Channels[0].SkipHours { for _, v := range this.Channels[0].SkipHours {
if v == utc.Hour { if v == utc.Hour {
return false return false
} }
@ -148,14 +154,16 @@ func (this *Feed) canUpdate() bool {
} }
} }
this.lastupdate = utc.Seconds(); this.lastupdate = utc.Seconds()
return true return true
} }
func (this *Feed) buildFeed(doc *xmlx.Document) (err os.Error) { func (this *Feed) buildFeed(doc *xmlx.Document) (err os.Error) {
switch this.Type { switch this.Type {
case "rss": err = this.readRss2(doc); case "rss":
case "atom": err = this.readAtom(doc); err = this.readRss2(doc)
case "atom":
err = this.readAtom(doc)
} }
return return
} }
@ -176,30 +184,33 @@ func (this *Feed) testVersions() bool {
return false return false
} }
return true; return true
} }
func (this *Feed) GetVersionInfo(doc *xmlx.Document) (ftype string, fversion [2]int) { func (this *Feed) GetVersionInfo(doc *xmlx.Document) (ftype string, fversion [2]int) {
node := doc.SelectNode("http://www.w3.org/2005/Atom", "feed"); node := doc.SelectNode("http://www.w3.org/2005/Atom", "feed")
if node == nil { goto rss } if node == nil {
ftype = "atom"; goto rss
fversion = [2]int{1, 0}; }
return; ftype = "atom"
fversion = [2]int{1, 0}
return
rss: rss:
node = doc.SelectNode("", "rss"); node = doc.SelectNode("", "rss")
if node == nil { goto end } if node == nil {
ftype = "rss"; goto end
version := node.GetAttr("", "version"); }
p := strings.Index(version, "."); ftype = "rss"
major, _ := strconv.Atoi(version[0:p]); version := node.GetAttr("", "version")
minor, _ := strconv.Atoi(version[p+1 : len(version)]); p := strings.Index(version, ".")
fversion = [2]int{major, minor}; major, _ := strconv.Atoi(version[0:p])
return; minor, _ := strconv.Atoi(version[p+1 : len(version)])
fversion = [2]int{major, minor}
return
end: end:
ftype = "unknown"; ftype = "unknown"
fversion = [2]int{0, 0}; fversion = [2]int{0, 0}
return; return
} }

View File

@ -8,15 +8,14 @@ func TestFeed(t *testing.T) {
"http://cyber.law.harvard.edu/rss/examples/sampleRss092.xml", "http://cyber.law.harvard.edu/rss/examples/sampleRss092.xml",
"http://cyber.law.harvard.edu/rss/examples/rss2sample.xml", "http://cyber.law.harvard.edu/rss/examples/rss2sample.xml",
"http://blog.case.edu/news/feed.atom", "http://blog.case.edu/news/feed.atom",
}; }
for _, uri := range urilist { for _, uri := range urilist {
feed := New(5, true); feed := New(5, true)
err := feed.Fetch(uri); err := feed.Fetch(uri)
if err != nil { if err != nil {
t.Errorf("%s >>> %s", uri, err); t.Errorf("%s >>> %s", uri, err)
continue; continue
} }
} }
} }

View File

@ -1,8 +1,7 @@
package feeder package feeder
type Generator struct { type Generator struct {
Uri string; Uri string
Version string; Version string
Text string; Text string
} }

View File

@ -1,10 +1,10 @@
package feeder package feeder
type Image struct { type Image struct {
Title string; Title string
Url string; Url string
Link string; Link string
Width int; Width int
Height int; Height int
Description string; Description string
} }

View File

@ -1,8 +1,8 @@
package feeder package feeder
type Input struct { type Input struct {
Title string; Title string
Description string; Description string
Name string; Name string
Link string; Link string
} }

View File

@ -2,38 +2,38 @@ package feeder
type Item struct { type Item struct {
// RSS and Shared fields // RSS and Shared fields
Title string; Title string
Links []Link; Links []Link
Description string; Description string
Author Author; Author Author
Categories []Category; Categories []Category
Comments string; Comments string
Enclosures []Enclosure; Enclosures []Enclosure
Guid string; Guid string
PubDate string; PubDate string
Source Source; Source Source
// Atom specific fields // Atom specific fields
Id string; Id string
Generator Generator; Generator Generator
Contributors []string; Contributors []string
Content Content; Content Content
} }
func (this *Item) addEnclosure(e Enclosure) { func (this *Item) addEnclosure(e Enclosure) {
slice := make([]Enclosure, len(this.Enclosures) + 1); slice := make([]Enclosure, len(this.Enclosures)+1)
for i,v := range this.Enclosures { for i, v := range this.Enclosures {
slice[i] = v; slice[i] = v
} }
slice[len(slice) - 1] = e; slice[len(slice)-1] = e
this.Enclosures = slice; this.Enclosures = slice
} }
func (this *Item) addLink(l Link) { func (this *Item) addLink(l Link) {
slice := make([]Link, len(this.Links) + 1); slice := make([]Link, len(this.Links)+1)
for i,v := range this.Links { for i, v := range this.Links {
slice[i] = v; slice[i] = v
} }
slice[len(slice) - 1] = l; slice[len(slice)-1] = l
this.Links = slice; this.Links = slice
} }

View File

@ -1,9 +1,8 @@
package feeder package feeder
type Link struct { type Link struct {
Href string; Href string
Rel string; Rel string
Type string; Type string
HrefLang string; HrefLang string
} }

View File

@ -4,147 +4,153 @@ import "os"
import "xmlx" import "xmlx"
func (this *Feed) readRss2(doc *xmlx.Document) (err os.Error) { func (this *Feed) readRss2(doc *xmlx.Document) (err os.Error) {
channels := doc.SelectNodes("", "channel"); channels := doc.SelectNodes("", "channel")
for _, node := range channels { for _, node := range channels {
ch := Channel{}; ch := Channel{}
ch.Title = node.GetValue("", "title"); ch.Title = node.GetValue("", "title")
list := node.SelectNodes("", "link"); list := node.SelectNodes("", "link")
ch.Links = make([]Link, len(list)); ch.Links = make([]Link, len(list))
for i, v := range list { for i, v := range list {
ch.Links[i].Href = v.Value; ch.Links[i].Href = v.Value
} }
ch.Description = node.GetValue("", "description"); ch.Description = node.GetValue("", "description")
ch.Language = node.GetValue("", "language"); ch.Language = node.GetValue("", "language")
ch.Copyright = node.GetValue("", "copyright"); ch.Copyright = node.GetValue("", "copyright")
ch.ManagingEditor = node.GetValue("", "managingEditor"); ch.ManagingEditor = node.GetValue("", "managingEditor")
ch.WebMaster = node.GetValue("", "webMaster"); ch.WebMaster = node.GetValue("", "webMaster")
ch.PubDate = node.GetValue("", "pubDate"); ch.PubDate = node.GetValue("", "pubDate")
ch.LastBuildDate = node.GetValue("", "lastBuildDate"); ch.LastBuildDate = node.GetValue("", "lastBuildDate")
ch.Docs = node.GetValue("", "docs"); ch.Docs = node.GetValue("", "docs")
list = node.SelectNodes("", "category"); list = node.SelectNodes("", "category")
ch.Categories = make([]Category, len(list)); ch.Categories = make([]Category, len(list))
for i, v := range list { for i, v := range list {
ch.Categories[i].Domain = v.GetAttr("", "domain"); ch.Categories[i].Domain = v.GetAttr("", "domain")
ch.Categories[i].Text = v.Value; ch.Categories[i].Text = v.Value
} }
n := node.SelectNode("", "generator"); n := node.SelectNode("", "generator")
if n != nil { if n != nil {
ch.Generator = Generator{}; ch.Generator = Generator{}
ch.Generator.Text = n.Value; ch.Generator.Text = n.Value
} }
ch.TTL = node.GetValuei("", "ttl"); ch.TTL = node.GetValuei("", "ttl")
ch.Rating = node.GetValue("", "rating"); ch.Rating = node.GetValue("", "rating")
list = node.SelectNodes("", "hour"); list = node.SelectNodes("", "hour")
ch.SkipHours = make([]int, len(list)); ch.SkipHours = make([]int, len(list))
for i, v := range list { for i, v := range list {
ch.SkipHours[i] = int(v.GetValuei("", "hour")); ch.SkipHours[i] = int(v.GetValuei("", "hour"))
} }
list = node.SelectNodes("", "days"); list = node.SelectNodes("", "days")
ch.SkipDays = make([]int, len(list)); ch.SkipDays = make([]int, len(list))
for i, v := range list { for i, v := range list {
ch.SkipDays[i] = mapDay(v.Value); ch.SkipDays[i] = mapDay(v.Value)
} }
n = node.SelectNode("", "image"); n = node.SelectNode("", "image")
if n != nil { if n != nil {
ch.Image.Title = n.GetValue("", "title"); ch.Image.Title = n.GetValue("", "title")
ch.Image.Url = n.GetValue("", "url"); ch.Image.Url = n.GetValue("", "url")
ch.Image.Link = n.GetValue("", "link"); ch.Image.Link = n.GetValue("", "link")
ch.Image.Width = n.GetValuei("", "width"); ch.Image.Width = n.GetValuei("", "width")
ch.Image.Height = n.GetValuei("", "height"); ch.Image.Height = n.GetValuei("", "height")
ch.Image.Description = n.GetValue("", "description"); ch.Image.Description = n.GetValue("", "description")
} }
n = node.SelectNode("", "cloud"); n = node.SelectNode("", "cloud")
if n != nil { if n != nil {
ch.Cloud = Cloud{}; ch.Cloud = Cloud{}
ch.Cloud.Domain = n.GetAttr("", "domain"); ch.Cloud.Domain = n.GetAttr("", "domain")
ch.Cloud.Port = n.GetAttri("", "port"); ch.Cloud.Port = n.GetAttri("", "port")
ch.Cloud.Path = n.GetAttr("", "path"); ch.Cloud.Path = n.GetAttr("", "path")
ch.Cloud.RegisterProcedure = n.GetAttr("", "registerProcedure"); ch.Cloud.RegisterProcedure = n.GetAttr("", "registerProcedure")
ch.Cloud.Protocol = n.GetAttr("", "protocol"); ch.Cloud.Protocol = n.GetAttr("", "protocol")
} }
n = node.SelectNode("", "textInput"); n = node.SelectNode("", "textInput")
if n != nil { if n != nil {
ch.TextInput = Input{}; ch.TextInput = Input{}
ch.TextInput.Title = n.GetValue("", "title"); ch.TextInput.Title = n.GetValue("", "title")
ch.TextInput.Description = n.GetValue("", "description"); ch.TextInput.Description = n.GetValue("", "description")
ch.TextInput.Name = n.GetValue("", "name"); ch.TextInput.Name = n.GetValue("", "name")
ch.TextInput.Link = n.GetValue("", "link"); ch.TextInput.Link = n.GetValue("", "link")
} }
list = node.SelectNodes("", "item"); list = node.SelectNodes("", "item")
for _, item := range list { for _, item := range list {
i := Item{}; i := Item{}
i.Title = item.GetValue("", "title"); i.Title = item.GetValue("", "title")
i.Description = item.GetValue("", "description"); i.Description = item.GetValue("", "description")
list = node.SelectNodes("", "link"); list = node.SelectNodes("", "link")
i.Links = make([]Link, 0); i.Links = make([]Link, 0)
for _, v := range list { for _, v := range list {
lnk := Link{}; lnk := Link{}
lnk.Href = v.Value; lnk.Href = v.Value
i.addLink(lnk); i.addLink(lnk)
} }
n = item.SelectNode("", "author"); n = item.SelectNode("", "author")
if n != nil { if n != nil {
i.Author = Author{}; i.Author = Author{}
i.Author.Name = n.Value; i.Author.Name = n.Value
} }
i.Comments = item.GetValue("", "comments"); i.Comments = item.GetValue("", "comments")
i.Guid = item.GetValue("", "guid"); i.Guid = item.GetValue("", "guid")
i.PubDate = item.GetValue("", "pubDate"); i.PubDate = item.GetValue("", "pubDate")
list := item.SelectNodes("", "category"); list := item.SelectNodes("", "category")
i.Categories = make([]Category, len(list)); i.Categories = make([]Category, len(list))
for li, lv := range list { for li, lv := range list {
i.Categories[li].Domain = lv.GetAttr("", "domain"); i.Categories[li].Domain = lv.GetAttr("", "domain")
i.Categories[li].Text = lv.Value; i.Categories[li].Text = lv.Value
} }
list = item.SelectNodes("", "enclosure"); list = item.SelectNodes("", "enclosure")
i.Enclosures = make([]Enclosure, len(list)); i.Enclosures = make([]Enclosure, len(list))
for li, lv := range list { for li, lv := range list {
i.Enclosures[li].Url = lv.GetAttr("", "url"); i.Enclosures[li].Url = lv.GetAttr("", "url")
i.Enclosures[li].Length = lv.GetAttri64("", "length"); i.Enclosures[li].Length = lv.GetAttri64("", "length")
i.Enclosures[li].Type = lv.GetAttr("", "type"); i.Enclosures[li].Type = lv.GetAttr("", "type")
} }
src := item.SelectNode("", "source"); src := item.SelectNode("", "source")
if src != nil { if src != nil {
i.Source = Source{}; i.Source = Source{}
i.Source.Url = src.GetAttr("", "url"); i.Source.Url = src.GetAttr("", "url")
i.Source.Text = src.Value; i.Source.Text = src.Value
} }
ch.addItem(i); ch.addItem(i)
} }
this.addChannel(ch); this.addChannel(ch)
} }
return return
} }
func mapDay(day string) int { func mapDay(day string) int {
switch day { switch day {
case "Monday": return 1; case "Monday":
case "Tuesday": return 2; return 1
case "Wednesday": return 3; case "Tuesday":
case "Thursday": return 4; return 2
case "Friday": return 5; case "Wednesday":
case "Saturday": return 6; return 3
case "Sunday": return 7; case "Thursday":
return 4
case "Friday":
return 5
case "Saturday":
return 6
case "Sunday":
return 7
} }
return 1; return 1
} }

View File

@ -1,6 +1,6 @@
package feeder package feeder
type Source struct { type Source struct {
Url string; Url string
Text string; Text string
} }

View File

@ -1,7 +1,6 @@
package feeder package feeder
type SubTitle struct { type SubTitle struct {
Type string; Type string
Text string; Text string
} }