Create Fetcher interface and use it

This commit is contained in:
Peter Stuifzand 2021-10-30 21:09:27 +02:00
parent 44ec376bda
commit b608fe6bc6
Signed by: peter
GPG Key ID: 374322D56E5209E8
5 changed files with 31 additions and 20 deletions

View File

@ -495,11 +495,11 @@ func (b *memoryBackend) Search(query string) ([]microsub.Feed, error) {
// needs to be like this, because we get a null result otherwise in the json output
feeds := []microsub.Feed{}
cachingFetch := WithCaching(b.pool, Fetch2)
cachingFetch := WithCaching(b.pool, fetch.FetcherFunc(Fetch2))
for _, u := range urls {
log.Println(u)
resp, err := cachingFetch(u)
resp, err := cachingFetch.Fetch(u)
if err != nil {
log.Printf("Error while fetching %s: %v\n", u, err)
continue
@ -513,7 +513,7 @@ func (b *memoryBackend) Search(query string) ([]microsub.Feed, error) {
continue
}
feedResp, err := cachingFetch(fetchURL.String())
feedResp, err := cachingFetch.Fetch(fetchURL.String())
if err != nil {
log.Printf("Error in fetch of %s - %v\n", fetchURL, err)
continue
@ -536,7 +536,7 @@ func (b *memoryBackend) Search(query string) ([]microsub.Feed, error) {
log.Printf("alternate found with type %s %#v\n", relURL.Type, relURL)
if strings.HasPrefix(relURL.Type, "text/html") || strings.HasPrefix(relURL.Type, "application/json") || strings.HasPrefix(relURL.Type, "application/xml") || strings.HasPrefix(relURL.Type, "text/xml") || strings.HasPrefix(relURL.Type, "application/rss+xml") || strings.HasPrefix(relURL.Type, "application/atom+xml") {
feedResp, err := cachingFetch(alt)
feedResp, err := cachingFetch.Fetch(alt)
if err != nil {
log.Printf("Error in fetch of %s - %v\n", alt, err)
continue
@ -560,8 +560,8 @@ func (b *memoryBackend) Search(query string) ([]microsub.Feed, error) {
}
func (b *memoryBackend) PreviewURL(previewURL string) (microsub.Timeline, error) {
cachingFetch := WithCaching(b.pool, Fetch2)
resp, err := cachingFetch(previewURL)
cachingFetch := WithCaching(b.pool, fetch.FetcherFunc(Fetch2))
resp, err := cachingFetch.Fetch(previewURL)
if err != nil {
return microsub.Timeline{}, fmt.Errorf("error while fetching %s: %v", previewURL, err)
}
@ -598,7 +598,7 @@ func (b *memoryBackend) Events() (chan sse.Message, error) {
}
// ProcessSourcedItems processes items and adds the Source
func ProcessSourcedItems(fetcher fetch.FetcherFunc, fetchURL, contentType string, body io.Reader) ([]microsub.Item, error) {
func ProcessSourcedItems(fetcher fetch.Fetcher, fetchURL, contentType string, body io.Reader) ([]microsub.Item, error) {
// When the source is available from the Header, we fill the Source of the item
bodyBytes, err := ioutil.ReadAll(body)
@ -636,7 +636,7 @@ func ProcessSourcedItems(fetcher fetch.FetcherFunc, fetchURL, contentType string
}
func (b *memoryBackend) ProcessContent(channel, fetchURL, contentType string, body io.Reader) error {
cachingFetch := WithCaching(b.pool, Fetch2)
cachingFetch := WithCaching(b.pool, fetch.FetcherFunc(Fetch2))
items, err := ProcessSourcedItems(cachingFetch, fetchURL, contentType, body)
if err != nil {
@ -827,12 +827,12 @@ func (b *memoryBackend) updateChannelUnreadCount(channel string) error {
return nil
}
// WithCaching adds caching to a FetcherFunc
func WithCaching(pool *redis.Pool, ff fetch.FetcherFunc) fetch.FetcherFunc {
// WithCaching adds caching to a fetch.Fetcher
func WithCaching(pool *redis.Pool, ff fetch.Fetcher) fetch.Fetcher {
ff2 := (func(fetchURL string) (*http.Response, error) {
conn := pool.Get()
defer conn.Close()
return func(fetchURL string) (*http.Response, error) {
cacheKey := fmt.Sprintf("http_cache:%s", fetchURL)
u, err := url.Parse(fetchURL)
if err != nil {
@ -850,7 +850,7 @@ func WithCaching(pool *redis.Pool, ff fetch.FetcherFunc) fetch.FetcherFunc {
log.Printf("MISS %s\n", fetchURL)
resp, err := ff(fetchURL)
resp, err := ff.Fetch(fetchURL)
if err != nil {
return nil, err
}
@ -870,7 +870,8 @@ func WithCaching(pool *redis.Pool, ff fetch.FetcherFunc) fetch.FetcherFunc {
cachedResp, err := http.ReadResponse(bufio.NewReader(bytes.NewReader(cachedCopy)), req)
return cachedResp, err
}
})
return fetch.FetcherFunc(ff2)
}
// Fetch2 fetches stuff

View File

@ -32,7 +32,7 @@ func main() {
}
defer resp.Body.Close()
items, err := fetch.FeedItems(Fetch, url, resp.Header.Get("Content-Type"), resp.Body)
items, err := fetch.FeedItems(fetch.FetcherFunc(Fetch), url, resp.Header.Get("Content-Type"), resp.Body)
if err != nil {
log.Fatal(err)
}

View File

@ -25,7 +25,7 @@ import (
)
// FeedHeader returns a new microsub.Feed with the information parsed from body.
func FeedHeader(fetcher FetcherFunc, fetchURL, contentType string, body io.Reader) (microsub.Feed, error) {
func FeedHeader(fetcher Fetcher, fetchURL, contentType string, body io.Reader) (microsub.Feed, error) {
log.Printf("ProcessContent %s\n", fetchURL)
log.Println("Found " + contentType)
@ -38,7 +38,7 @@ func FeedHeader(fetcher FetcherFunc, fetchURL, contentType string, body io.Reade
author, ok := jf2.SimplifyMicroformatDataAuthor(data)
if !ok {
if strings.HasPrefix(author.URL, "http") {
resp, err := fetcher(author.URL)
resp, err := fetcher.Fetch(author.URL)
if err != nil {
return feed, err
}
@ -108,7 +108,7 @@ func FeedHeader(fetcher FetcherFunc, fetchURL, contentType string, body io.Reade
}
// FeedItems returns the items from the url, parsed from body.
func FeedItems(fetcher FetcherFunc, fetchURL, contentType string, body io.Reader) ([]microsub.Item, error) {
func FeedItems(fetcher Fetcher, fetchURL, contentType string, body io.Reader) ([]microsub.Item, error) {
log.Printf("ProcessContent %s\n", fetchURL)
log.Println("Found " + contentType)

View File

@ -23,7 +23,7 @@ func TestFeedHeader(t *testing.T) {
</body>
</html>
`
feed, err := FeedHeader(fetcher, "https://example.com/", "text/html", strings.NewReader(doc))
feed, err := FeedHeader(FetcherFunc(fetcher), "https://example.com/", "text/html", strings.NewReader(doc))
if assert.NoError(t, err) {
assert.Equal(t, "feed", feed.Type)
assert.Equal(t, "Title", feed.Name)

View File

@ -2,5 +2,15 @@ package fetch
import "net/http"
// Fetcher fetches urls
type Fetcher interface {
Fetch(url string) (*http.Response, error)
}
// FetcherFunc is a function that fetches an url
type FetcherFunc func(url string) (*http.Response, error)
// Fetch fetches an url and returns a response or error
func (ff FetcherFunc) Fetch(url string) (*http.Response, error) {
return ff(url)
}