Create Fetcher interface and use it

This commit is contained in:
Peter Stuifzand 2021-10-30 21:09:27 +02:00
parent 44ec376bda
commit b608fe6bc6
Signed by: peter
GPG Key ID: 374322D56E5209E8
5 changed files with 31 additions and 20 deletions

View File

@ -495,11 +495,11 @@ func (b *memoryBackend) Search(query string) ([]microsub.Feed, error) {
// needs to be like this, because we get a null result otherwise in the json output // needs to be like this, because we get a null result otherwise in the json output
feeds := []microsub.Feed{} feeds := []microsub.Feed{}
cachingFetch := WithCaching(b.pool, Fetch2) cachingFetch := WithCaching(b.pool, fetch.FetcherFunc(Fetch2))
for _, u := range urls { for _, u := range urls {
log.Println(u) log.Println(u)
resp, err := cachingFetch(u) resp, err := cachingFetch.Fetch(u)
if err != nil { if err != nil {
log.Printf("Error while fetching %s: %v\n", u, err) log.Printf("Error while fetching %s: %v\n", u, err)
continue continue
@ -513,7 +513,7 @@ func (b *memoryBackend) Search(query string) ([]microsub.Feed, error) {
continue continue
} }
feedResp, err := cachingFetch(fetchURL.String()) feedResp, err := cachingFetch.Fetch(fetchURL.String())
if err != nil { if err != nil {
log.Printf("Error in fetch of %s - %v\n", fetchURL, err) log.Printf("Error in fetch of %s - %v\n", fetchURL, err)
continue continue
@ -536,7 +536,7 @@ func (b *memoryBackend) Search(query string) ([]microsub.Feed, error) {
log.Printf("alternate found with type %s %#v\n", relURL.Type, relURL) log.Printf("alternate found with type %s %#v\n", relURL.Type, relURL)
if strings.HasPrefix(relURL.Type, "text/html") || strings.HasPrefix(relURL.Type, "application/json") || strings.HasPrefix(relURL.Type, "application/xml") || strings.HasPrefix(relURL.Type, "text/xml") || strings.HasPrefix(relURL.Type, "application/rss+xml") || strings.HasPrefix(relURL.Type, "application/atom+xml") { if strings.HasPrefix(relURL.Type, "text/html") || strings.HasPrefix(relURL.Type, "application/json") || strings.HasPrefix(relURL.Type, "application/xml") || strings.HasPrefix(relURL.Type, "text/xml") || strings.HasPrefix(relURL.Type, "application/rss+xml") || strings.HasPrefix(relURL.Type, "application/atom+xml") {
feedResp, err := cachingFetch(alt) feedResp, err := cachingFetch.Fetch(alt)
if err != nil { if err != nil {
log.Printf("Error in fetch of %s - %v\n", alt, err) log.Printf("Error in fetch of %s - %v\n", alt, err)
continue continue
@ -560,8 +560,8 @@ func (b *memoryBackend) Search(query string) ([]microsub.Feed, error) {
} }
func (b *memoryBackend) PreviewURL(previewURL string) (microsub.Timeline, error) { func (b *memoryBackend) PreviewURL(previewURL string) (microsub.Timeline, error) {
cachingFetch := WithCaching(b.pool, Fetch2) cachingFetch := WithCaching(b.pool, fetch.FetcherFunc(Fetch2))
resp, err := cachingFetch(previewURL) resp, err := cachingFetch.Fetch(previewURL)
if err != nil { if err != nil {
return microsub.Timeline{}, fmt.Errorf("error while fetching %s: %v", previewURL, err) return microsub.Timeline{}, fmt.Errorf("error while fetching %s: %v", previewURL, err)
} }
@ -598,7 +598,7 @@ func (b *memoryBackend) Events() (chan sse.Message, error) {
} }
// ProcessSourcedItems processes items and adds the Source // ProcessSourcedItems processes items and adds the Source
func ProcessSourcedItems(fetcher fetch.FetcherFunc, fetchURL, contentType string, body io.Reader) ([]microsub.Item, error) { func ProcessSourcedItems(fetcher fetch.Fetcher, fetchURL, contentType string, body io.Reader) ([]microsub.Item, error) {
// When the source is available from the Header, we fill the Source of the item // When the source is available from the Header, we fill the Source of the item
bodyBytes, err := ioutil.ReadAll(body) bodyBytes, err := ioutil.ReadAll(body)
@ -636,7 +636,7 @@ func ProcessSourcedItems(fetcher fetch.FetcherFunc, fetchURL, contentType string
} }
func (b *memoryBackend) ProcessContent(channel, fetchURL, contentType string, body io.Reader) error { func (b *memoryBackend) ProcessContent(channel, fetchURL, contentType string, body io.Reader) error {
cachingFetch := WithCaching(b.pool, Fetch2) cachingFetch := WithCaching(b.pool, fetch.FetcherFunc(Fetch2))
items, err := ProcessSourcedItems(cachingFetch, fetchURL, contentType, body) items, err := ProcessSourcedItems(cachingFetch, fetchURL, contentType, body)
if err != nil { if err != nil {
@ -827,12 +827,12 @@ func (b *memoryBackend) updateChannelUnreadCount(channel string) error {
return nil return nil
} }
// WithCaching adds caching to a FetcherFunc // WithCaching adds caching to a fetch.Fetcher
func WithCaching(pool *redis.Pool, ff fetch.FetcherFunc) fetch.FetcherFunc { func WithCaching(pool *redis.Pool, ff fetch.Fetcher) fetch.Fetcher {
conn := pool.Get() ff2 := (func(fetchURL string) (*http.Response, error) {
defer conn.Close() conn := pool.Get()
defer conn.Close()
return func(fetchURL string) (*http.Response, error) {
cacheKey := fmt.Sprintf("http_cache:%s", fetchURL) cacheKey := fmt.Sprintf("http_cache:%s", fetchURL)
u, err := url.Parse(fetchURL) u, err := url.Parse(fetchURL)
if err != nil { if err != nil {
@ -850,7 +850,7 @@ func WithCaching(pool *redis.Pool, ff fetch.FetcherFunc) fetch.FetcherFunc {
log.Printf("MISS %s\n", fetchURL) log.Printf("MISS %s\n", fetchURL)
resp, err := ff(fetchURL) resp, err := ff.Fetch(fetchURL)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -870,7 +870,8 @@ func WithCaching(pool *redis.Pool, ff fetch.FetcherFunc) fetch.FetcherFunc {
cachedResp, err := http.ReadResponse(bufio.NewReader(bytes.NewReader(cachedCopy)), req) cachedResp, err := http.ReadResponse(bufio.NewReader(bytes.NewReader(cachedCopy)), req)
return cachedResp, err return cachedResp, err
} })
return fetch.FetcherFunc(ff2)
} }
// Fetch2 fetches stuff // Fetch2 fetches stuff

View File

@ -32,7 +32,7 @@ func main() {
} }
defer resp.Body.Close() defer resp.Body.Close()
items, err := fetch.FeedItems(Fetch, url, resp.Header.Get("Content-Type"), resp.Body) items, err := fetch.FeedItems(fetch.FetcherFunc(Fetch), url, resp.Header.Get("Content-Type"), resp.Body)
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)
} }

View File

@ -25,7 +25,7 @@ import (
) )
// FeedHeader returns a new microsub.Feed with the information parsed from body. // FeedHeader returns a new microsub.Feed with the information parsed from body.
func FeedHeader(fetcher FetcherFunc, fetchURL, contentType string, body io.Reader) (microsub.Feed, error) { func FeedHeader(fetcher Fetcher, fetchURL, contentType string, body io.Reader) (microsub.Feed, error) {
log.Printf("ProcessContent %s\n", fetchURL) log.Printf("ProcessContent %s\n", fetchURL)
log.Println("Found " + contentType) log.Println("Found " + contentType)
@ -38,7 +38,7 @@ func FeedHeader(fetcher FetcherFunc, fetchURL, contentType string, body io.Reade
author, ok := jf2.SimplifyMicroformatDataAuthor(data) author, ok := jf2.SimplifyMicroformatDataAuthor(data)
if !ok { if !ok {
if strings.HasPrefix(author.URL, "http") { if strings.HasPrefix(author.URL, "http") {
resp, err := fetcher(author.URL) resp, err := fetcher.Fetch(author.URL)
if err != nil { if err != nil {
return feed, err return feed, err
} }
@ -108,7 +108,7 @@ func FeedHeader(fetcher FetcherFunc, fetchURL, contentType string, body io.Reade
} }
// FeedItems returns the items from the url, parsed from body. // FeedItems returns the items from the url, parsed from body.
func FeedItems(fetcher FetcherFunc, fetchURL, contentType string, body io.Reader) ([]microsub.Item, error) { func FeedItems(fetcher Fetcher, fetchURL, contentType string, body io.Reader) ([]microsub.Item, error) {
log.Printf("ProcessContent %s\n", fetchURL) log.Printf("ProcessContent %s\n", fetchURL)
log.Println("Found " + contentType) log.Println("Found " + contentType)

View File

@ -23,7 +23,7 @@ func TestFeedHeader(t *testing.T) {
</body> </body>
</html> </html>
` `
feed, err := FeedHeader(fetcher, "https://example.com/", "text/html", strings.NewReader(doc)) feed, err := FeedHeader(FetcherFunc(fetcher), "https://example.com/", "text/html", strings.NewReader(doc))
if assert.NoError(t, err) { if assert.NoError(t, err) {
assert.Equal(t, "feed", feed.Type) assert.Equal(t, "feed", feed.Type)
assert.Equal(t, "Title", feed.Name) assert.Equal(t, "Title", feed.Name)

View File

@ -2,5 +2,15 @@ package fetch
import "net/http" import "net/http"
// Fetcher fetches urls
type Fetcher interface {
Fetch(url string) (*http.Response, error)
}
// FetcherFunc is a function that fetches an url // FetcherFunc is a function that fetches an url
type FetcherFunc func(url string) (*http.Response, error) type FetcherFunc func(url string) (*http.Response, error)
// Fetch fetches an url and returns a response or error
func (ff FetcherFunc) Fetch(url string) (*http.Response, error) {
return ff(url)
}