Cleanup relative callback urls
All checks were successful
continuous-integration/drone/push Build is passing

This commit is contained in:
Peter Stuifzand 2019-03-20 10:28:10 +01:00
parent e631dcc813
commit 414d2cb78e
Signed by: peter
GPG Key ID: 374322D56E5209E8

View File

@ -5,6 +5,7 @@ import (
"io" "io"
"log" "log"
"net/http" "net/http"
"net/url"
"strconv" "strconv"
"strings" "strings"
"time" "time"
@ -21,7 +22,8 @@ const LeaseSeconds = 24 * 60 * 60
// HubBackend handles information for the incoming handler // HubBackend handles information for the incoming handler
type HubBackend interface { type HubBackend interface {
GetFeeds() []Feed GetFeeds() []Feed // Deprecated
Feeds() ([]Feed, error)
CreateFeed(url, channel string) (int64, error) CreateFeed(url, channel string) (int64, error)
GetSecret(feedID int64) string GetSecret(feedID int64) string
UpdateFeed(feedID int64, contentType string, body io.Reader) error UpdateFeed(feedID int64, contentType string, body io.Reader) error
@ -138,7 +140,18 @@ func (h *hubIncomingBackend) FeedSetLeaseSeconds(feedID int64, leaseSeconds int6
return nil return nil
} }
// GetFeeds is deprecated, use Feeds instead
func (h *hubIncomingBackend) GetFeeds() []Feed { func (h *hubIncomingBackend) GetFeeds() []Feed {
log.Println("GetFeeds called, consider replacing with Feeds")
feeds, err := h.Feeds()
if err != nil {
log.Printf("Feeds returned an error: %v", err)
}
return feeds
}
// Feeds returns a list of subscribed feeds
func (h *hubIncomingBackend) Feeds() ([]Feed, error) {
conn := pool.Get() conn := pool.Get()
defer conn.Close() defer conn.Close()
feeds := []Feed{} feeds := []Feed{}
@ -146,29 +159,48 @@ func (h *hubIncomingBackend) GetFeeds() []Feed {
// FIXME(peter): replace with set of currently checked feeds // FIXME(peter): replace with set of currently checked feeds
feedKeys, err := redis.Strings(conn.Do("KEYS", "feed:*")) feedKeys, err := redis.Strings(conn.Do("KEYS", "feed:*"))
if err != nil { if err != nil {
log.Println(err) return nil, errors.Wrap(err, "could not get feeds from backend")
return feeds
} }
for _, feedKey := range feedKeys { for _, feedKey := range feedKeys {
var feed Feed var feed Feed
values, err := redis.Values(conn.Do("HGETALL", feedKey)) values, err := redis.Values(conn.Do("HGETALL", feedKey))
if err != nil { if err != nil {
log.Println(err) log.Printf("could not get feed info for key %s: %v", feedKey, err)
continue continue
} }
err = redis.ScanStruct(values, &feed) err = redis.ScanStruct(values, &feed)
if err != nil { if err != nil {
log.Println(err) log.Printf("could not scan struct for key %s: %v", feedKey, err)
continue continue
} }
// Add feed id
if feed.ID == 0 { if feed.ID == 0 {
parts := strings.Split(feedKey, ":") parts := strings.Split(feedKey, ":")
if len(parts) == 2 { if len(parts) == 2 {
feed.ID, _ = strconv.ParseInt(parts[1], 10, 64) feed.ID, _ = strconv.ParseInt(parts[1], 10, 64)
conn.Do("HPUT", feedKey, "id", feed.ID) _, err = conn.Do("HPUT", feedKey, "id", feed.ID)
if err != nil {
log.Printf("could not save id for %s: %v", feedKey, err)
}
}
}
// Fix the callback url
callbackURL, err := url.Parse(feed.Callback)
if err != nil || !callbackURL.IsAbs() {
if err != nil {
log.Printf("could not parse callback url %q: %v", callbackURL, err)
} else {
log.Printf("url is relative; replace with absolute url: %q", callbackURL)
}
feed.Callback = fmt.Sprintf("%s/incoming/%d", h.baseURL, feed.ID)
_, err = conn.Do("HPUT", feedKey, "callback", feed.Callback)
if err != nil {
log.Printf("could not save id for %s: %v", feedKey, err)
} }
} }
@ -181,7 +213,7 @@ func (h *hubIncomingBackend) GetFeeds() []Feed {
feeds = append(feeds, feed) feeds = append(feeds, feed)
} }
return feeds return feeds, nil
} }
func (h *hubIncomingBackend) Subscribe(feed *Feed) error { func (h *hubIncomingBackend) Subscribe(feed *Feed) error {