From b608fe6bc65704352cc33cd06435dd96a1fede93 Mon Sep 17 00:00:00 2001 From: Peter Stuifzand Date: Sat, 30 Oct 2021 21:09:27 +0200 Subject: [PATCH] Create Fetcher interface and use it --- cmd/eksterd/memory.go | 31 ++++++++++++++++--------------- cmd/jf2test/main.go | 2 +- pkg/fetch/fetch.go | 6 +++--- pkg/fetch/fetch_test.go | 2 +- pkg/fetch/fetcher.go | 10 ++++++++++ 5 files changed, 31 insertions(+), 20 deletions(-) diff --git a/cmd/eksterd/memory.go b/cmd/eksterd/memory.go index 6b98355..a770cb6 100644 --- a/cmd/eksterd/memory.go +++ b/cmd/eksterd/memory.go @@ -495,11 +495,11 @@ func (b *memoryBackend) Search(query string) ([]microsub.Feed, error) { // needs to be like this, because we get a null result otherwise in the json output feeds := []microsub.Feed{} - cachingFetch := WithCaching(b.pool, Fetch2) + cachingFetch := WithCaching(b.pool, fetch.FetcherFunc(Fetch2)) for _, u := range urls { log.Println(u) - resp, err := cachingFetch(u) + resp, err := cachingFetch.Fetch(u) if err != nil { log.Printf("Error while fetching %s: %v\n", u, err) continue @@ -513,7 +513,7 @@ func (b *memoryBackend) Search(query string) ([]microsub.Feed, error) { continue } - feedResp, err := cachingFetch(fetchURL.String()) + feedResp, err := cachingFetch.Fetch(fetchURL.String()) if err != nil { log.Printf("Error in fetch of %s - %v\n", fetchURL, err) continue @@ -536,7 +536,7 @@ func (b *memoryBackend) Search(query string) ([]microsub.Feed, error) { log.Printf("alternate found with type %s %#v\n", relURL.Type, relURL) if strings.HasPrefix(relURL.Type, "text/html") || strings.HasPrefix(relURL.Type, "application/json") || strings.HasPrefix(relURL.Type, "application/xml") || strings.HasPrefix(relURL.Type, "text/xml") || strings.HasPrefix(relURL.Type, "application/rss+xml") || strings.HasPrefix(relURL.Type, "application/atom+xml") { - feedResp, err := cachingFetch(alt) + feedResp, err := cachingFetch.Fetch(alt) if err != nil { log.Printf("Error in fetch of %s - %v\n", alt, err) continue @@ -560,8 +560,8 @@ func (b *memoryBackend) Search(query string) ([]microsub.Feed, error) { } func (b *memoryBackend) PreviewURL(previewURL string) (microsub.Timeline, error) { - cachingFetch := WithCaching(b.pool, Fetch2) - resp, err := cachingFetch(previewURL) + cachingFetch := WithCaching(b.pool, fetch.FetcherFunc(Fetch2)) + resp, err := cachingFetch.Fetch(previewURL) if err != nil { return microsub.Timeline{}, fmt.Errorf("error while fetching %s: %v", previewURL, err) } @@ -598,7 +598,7 @@ func (b *memoryBackend) Events() (chan sse.Message, error) { } // ProcessSourcedItems processes items and adds the Source -func ProcessSourcedItems(fetcher fetch.FetcherFunc, fetchURL, contentType string, body io.Reader) ([]microsub.Item, error) { +func ProcessSourcedItems(fetcher fetch.Fetcher, fetchURL, contentType string, body io.Reader) ([]microsub.Item, error) { // When the source is available from the Header, we fill the Source of the item bodyBytes, err := ioutil.ReadAll(body) @@ -636,7 +636,7 @@ func ProcessSourcedItems(fetcher fetch.FetcherFunc, fetchURL, contentType string } func (b *memoryBackend) ProcessContent(channel, fetchURL, contentType string, body io.Reader) error { - cachingFetch := WithCaching(b.pool, Fetch2) + cachingFetch := WithCaching(b.pool, fetch.FetcherFunc(Fetch2)) items, err := ProcessSourcedItems(cachingFetch, fetchURL, contentType, body) if err != nil { @@ -827,12 +827,12 @@ func (b *memoryBackend) updateChannelUnreadCount(channel string) error { return nil } -// WithCaching adds caching to a FetcherFunc -func WithCaching(pool *redis.Pool, ff fetch.FetcherFunc) fetch.FetcherFunc { - conn := pool.Get() - defer conn.Close() +// WithCaching adds caching to a fetch.Fetcher +func WithCaching(pool *redis.Pool, ff fetch.Fetcher) fetch.Fetcher { + ff2 := (func(fetchURL string) (*http.Response, error) { + conn := pool.Get() + defer conn.Close() - return func(fetchURL string) (*http.Response, error) { cacheKey := fmt.Sprintf("http_cache:%s", fetchURL) u, err := url.Parse(fetchURL) if err != nil { @@ -850,7 +850,7 @@ func WithCaching(pool *redis.Pool, ff fetch.FetcherFunc) fetch.FetcherFunc { log.Printf("MISS %s\n", fetchURL) - resp, err := ff(fetchURL) + resp, err := ff.Fetch(fetchURL) if err != nil { return nil, err } @@ -870,7 +870,8 @@ func WithCaching(pool *redis.Pool, ff fetch.FetcherFunc) fetch.FetcherFunc { cachedResp, err := http.ReadResponse(bufio.NewReader(bytes.NewReader(cachedCopy)), req) return cachedResp, err - } + }) + return fetch.FetcherFunc(ff2) } // Fetch2 fetches stuff diff --git a/cmd/jf2test/main.go b/cmd/jf2test/main.go index c927d5b..0147e70 100644 --- a/cmd/jf2test/main.go +++ b/cmd/jf2test/main.go @@ -32,7 +32,7 @@ func main() { } defer resp.Body.Close() - items, err := fetch.FeedItems(Fetch, url, resp.Header.Get("Content-Type"), resp.Body) + items, err := fetch.FeedItems(fetch.FetcherFunc(Fetch), url, resp.Header.Get("Content-Type"), resp.Body) if err != nil { log.Fatal(err) } diff --git a/pkg/fetch/fetch.go b/pkg/fetch/fetch.go index 9c9bab0..75054be 100644 --- a/pkg/fetch/fetch.go +++ b/pkg/fetch/fetch.go @@ -25,7 +25,7 @@ import ( ) // FeedHeader returns a new microsub.Feed with the information parsed from body. -func FeedHeader(fetcher FetcherFunc, fetchURL, contentType string, body io.Reader) (microsub.Feed, error) { +func FeedHeader(fetcher Fetcher, fetchURL, contentType string, body io.Reader) (microsub.Feed, error) { log.Printf("ProcessContent %s\n", fetchURL) log.Println("Found " + contentType) @@ -38,7 +38,7 @@ func FeedHeader(fetcher FetcherFunc, fetchURL, contentType string, body io.Reade author, ok := jf2.SimplifyMicroformatDataAuthor(data) if !ok { if strings.HasPrefix(author.URL, "http") { - resp, err := fetcher(author.URL) + resp, err := fetcher.Fetch(author.URL) if err != nil { return feed, err } @@ -108,7 +108,7 @@ func FeedHeader(fetcher FetcherFunc, fetchURL, contentType string, body io.Reade } // FeedItems returns the items from the url, parsed from body. -func FeedItems(fetcher FetcherFunc, fetchURL, contentType string, body io.Reader) ([]microsub.Item, error) { +func FeedItems(fetcher Fetcher, fetchURL, contentType string, body io.Reader) ([]microsub.Item, error) { log.Printf("ProcessContent %s\n", fetchURL) log.Println("Found " + contentType) diff --git a/pkg/fetch/fetch_test.go b/pkg/fetch/fetch_test.go index 1f1bd78..64727d6 100644 --- a/pkg/fetch/fetch_test.go +++ b/pkg/fetch/fetch_test.go @@ -23,7 +23,7 @@ func TestFeedHeader(t *testing.T) { ` - feed, err := FeedHeader(fetcher, "https://example.com/", "text/html", strings.NewReader(doc)) + feed, err := FeedHeader(FetcherFunc(fetcher), "https://example.com/", "text/html", strings.NewReader(doc)) if assert.NoError(t, err) { assert.Equal(t, "feed", feed.Type) assert.Equal(t, "Title", feed.Name) diff --git a/pkg/fetch/fetcher.go b/pkg/fetch/fetcher.go index 6769a0b..1f11dbe 100644 --- a/pkg/fetch/fetcher.go +++ b/pkg/fetch/fetcher.go @@ -2,5 +2,15 @@ package fetch import "net/http" +// Fetcher fetches urls +type Fetcher interface { + Fetch(url string) (*http.Response, error) +} + // FetcherFunc is a function that fetches an url type FetcherFunc func(url string) (*http.Response, error) + +// Fetch fetches an url and returns a response or error +func (ff FetcherFunc) Fetch(url string) (*http.Response, error) { + return ff(url) +}