Compare commits
5 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| a1be6f4e35 | |||
| 01b255b3f7 | |||
| 25bdf5a4a2 | |||
| c7f231a38e | |||
| dcc9bfa889 |
124
cmd/eksterd/feedsearch.go
Normal file
124
cmd/eksterd/feedsearch.go
Normal file
|
|
@ -0,0 +1,124 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"p83.nl/go/ekster/pkg/fetch"
|
||||||
|
"p83.nl/go/ekster/pkg/microsub"
|
||||||
|
"willnorris.com/go/microformats"
|
||||||
|
)
|
||||||
|
|
||||||
|
func isSupportedFeedType(feedType string) bool {
|
||||||
|
return strings.HasPrefix(feedType, "text/html") ||
|
||||||
|
strings.HasPrefix(feedType, "application/json") ||
|
||||||
|
strings.HasPrefix(feedType, "application/xml") ||
|
||||||
|
strings.HasPrefix(feedType, "text/xml") ||
|
||||||
|
strings.HasPrefix(feedType, "application/rss+xml") ||
|
||||||
|
strings.HasPrefix(feedType, "application/atom+xml")
|
||||||
|
}
|
||||||
|
|
||||||
|
func findFeeds(cachingFetch fetch.FetcherFunc, feedURL string) ([]microsub.Feed, error) {
|
||||||
|
resp, err := cachingFetch(feedURL)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("while fetching %s: %w", feedURL, err)
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
fetchURL, err := url.Parse(feedURL)
|
||||||
|
md := microformats.Parse(resp.Body, fetchURL)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("while fetching %s: %w", feedURL, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
feedResp, err := cachingFetch(fetchURL.String())
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("in fetch of %s: %w", fetchURL, err)
|
||||||
|
}
|
||||||
|
defer feedResp.Body.Close()
|
||||||
|
|
||||||
|
// TODO: Combine FeedHeader and FeedItems so we can use it here
|
||||||
|
parsedFeed, err := fetch.FeedHeader(cachingFetch, fetchURL.String(), feedResp.Header.Get("Content-Type"), feedResp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("in parse of %s: %w", fetchURL, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var feeds []microsub.Feed
|
||||||
|
|
||||||
|
// TODO: Only include the feed if it contains some items
|
||||||
|
feeds = append(feeds, parsedFeed)
|
||||||
|
|
||||||
|
// Fetch alternates
|
||||||
|
if alts, e := md.Rels["alternate"]; e {
|
||||||
|
for _, alt := range alts {
|
||||||
|
relURL := md.RelURLs[alt]
|
||||||
|
log.Printf("alternate found with type %s %#v\n", relURL.Type, relURL)
|
||||||
|
|
||||||
|
if isSupportedFeedType(relURL.Type) {
|
||||||
|
parsedFeed, err := fetchAlternateFeed(cachingFetch, alt)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
feeds = append(feeds, parsedFeed)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return feeds, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func fetchAlternateFeed(cachingFetch fetch.FetcherFunc, altURL string) (microsub.Feed, error) {
|
||||||
|
feedResp, err := cachingFetch(altURL)
|
||||||
|
if err != nil {
|
||||||
|
return microsub.Feed{}, fmt.Errorf("fetch of %s: %v", altURL, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
defer feedResp.Body.Close()
|
||||||
|
|
||||||
|
parsedFeed, err := fetch.FeedHeader(cachingFetch, altURL, feedResp.Header.Get("Content-Type"), feedResp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return microsub.Feed{}, fmt.Errorf("in parse of %s: %v", altURL, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return parsedFeed, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func getPossibleURLs(query string) []string {
|
||||||
|
urls := []string{}
|
||||||
|
if !(strings.HasPrefix(query, "https://") || strings.HasPrefix(query, "http://")) {
|
||||||
|
secureURL := "https://" + query
|
||||||
|
if checkURL(secureURL) {
|
||||||
|
urls = append(urls, secureURL)
|
||||||
|
} else {
|
||||||
|
unsecureURL := "http://" + query
|
||||||
|
if checkURL(unsecureURL) {
|
||||||
|
urls = append(urls, unsecureURL)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
urls = append(urls, query)
|
||||||
|
}
|
||||||
|
return urls
|
||||||
|
}
|
||||||
|
|
||||||
|
func checkURL(u string) bool {
|
||||||
|
testURL, err := url.Parse(u)
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
resp, err := http.Head(testURL.String())
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("Error while HEAD %s: %v\n", u, err)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
return resp.StatusCode == 200
|
||||||
|
}
|
||||||
|
|
@ -362,48 +362,56 @@ func (h *mainHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
var page settingsPage
|
var page settingsPage
|
||||||
page.Session = sess
|
page.Session = sess
|
||||||
currentChannel := r.URL.Query().Get("uid")
|
currentChannelUID := r.URL.Query().Get("uid")
|
||||||
page.Channels, err = h.Backend.ChannelsGetList()
|
page.Channels, err = h.Backend.ChannelsGetList()
|
||||||
page.Feeds, err = h.Backend.FollowGetList(currentChannel)
|
page.Feeds, err = h.Backend.FollowGetList(currentChannelUID)
|
||||||
|
|
||||||
|
var selectedChannel microsub.Channel
|
||||||
|
found := false
|
||||||
|
|
||||||
for _, v := range page.Channels {
|
for _, v := range page.Channels {
|
||||||
if v.UID == currentChannel {
|
if v.UID == currentChannelUID {
|
||||||
page.CurrentChannel = v
|
selectedChannel = v
|
||||||
if setting, e := h.Backend.Settings[v.UID]; e {
|
found = true
|
||||||
page.CurrentSetting = setting
|
|
||||||
} else {
|
|
||||||
page.CurrentSetting = channelSetting{}
|
|
||||||
}
|
|
||||||
// FIXME: similar code is found in timeline.go
|
|
||||||
if page.CurrentSetting.ChannelType == "" {
|
|
||||||
if v.UID == "notifications" {
|
|
||||||
page.CurrentSetting.ChannelType = "stream"
|
|
||||||
} else {
|
|
||||||
page.CurrentSetting.ChannelType = "sorted-set"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
page.ExcludedTypeNames = map[string]string{
|
|
||||||
"repost": "Reposts",
|
|
||||||
"like": "Likes",
|
|
||||||
"bookmark": "Bookmarks",
|
|
||||||
"reply": "Replies",
|
|
||||||
"checkin": "Checkins",
|
|
||||||
}
|
|
||||||
page.ExcludedTypes = make(map[string]bool)
|
|
||||||
types := []string{"repost", "like", "bookmark", "reply", "checkin"}
|
|
||||||
for _, v := range types {
|
|
||||||
page.ExcludedTypes[v] = false
|
|
||||||
}
|
|
||||||
for _, v := range page.CurrentSetting.ExcludeType {
|
|
||||||
page.ExcludedTypes[v] = true
|
|
||||||
}
|
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if found {
|
||||||
|
page.CurrentChannel = selectedChannel
|
||||||
|
if setting, e := h.Backend.Settings[selectedChannel.UID]; e {
|
||||||
|
page.CurrentSetting = setting
|
||||||
|
} else {
|
||||||
|
page.CurrentSetting = channelSetting{}
|
||||||
|
}
|
||||||
|
// FIXME: similar code is found in timeline.go
|
||||||
|
if page.CurrentSetting.ChannelType == "" {
|
||||||
|
if selectedChannel.UID == "notifications" {
|
||||||
|
page.CurrentSetting.ChannelType = "stream"
|
||||||
|
} else {
|
||||||
|
page.CurrentSetting.ChannelType = "sorted-set"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
page.ExcludedTypeNames = map[string]string{
|
||||||
|
"repost": "Reposts",
|
||||||
|
"like": "Likes",
|
||||||
|
"bookmark": "Bookmarks",
|
||||||
|
"reply": "Replies",
|
||||||
|
"checkin": "Checkins",
|
||||||
|
}
|
||||||
|
page.ExcludedTypes = make(map[string]bool)
|
||||||
|
types := []string{"repost", "like", "bookmark", "reply", "checkin"}
|
||||||
|
for _, v := range types {
|
||||||
|
page.ExcludedTypes[v] = false
|
||||||
|
}
|
||||||
|
for _, v := range page.CurrentSetting.ExcludeType {
|
||||||
|
page.ExcludedTypes[v] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
err = h.renderTemplate(w, "channel.html", page)
|
err = h.renderTemplate(w, "channel.html", page)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fmt.Fprintf(w, "ERROR: %s\n", err)
|
http.Error(w, err.Error(), 500)
|
||||||
}
|
}
|
||||||
return
|
return
|
||||||
} else if r.URL.Path == "/logs" {
|
} else if r.URL.Path == "/logs" {
|
||||||
|
|
|
||||||
|
|
@ -25,7 +25,6 @@ import (
|
||||||
"p83.nl/go/ekster/pkg/util"
|
"p83.nl/go/ekster/pkg/util"
|
||||||
|
|
||||||
"github.com/gomodule/redigo/redis"
|
"github.com/gomodule/redigo/redis"
|
||||||
"willnorris.com/go/microformats"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// DefaultPrio is the priority value for new channels
|
// DefaultPrio is the priority value for new channels
|
||||||
|
|
@ -423,42 +422,6 @@ func (b *memoryBackend) UnfollowURL(uid string, url string) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func checkURL(u string) bool {
|
|
||||||
testURL, err := url.Parse(u)
|
|
||||||
if err != nil {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
resp, err := http.Head(testURL.String())
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
log.Printf("Error while HEAD %s: %v\n", u, err)
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
defer resp.Body.Close()
|
|
||||||
|
|
||||||
return resp.StatusCode == 200
|
|
||||||
}
|
|
||||||
|
|
||||||
func getPossibleURLs(query string) []string {
|
|
||||||
urls := []string{}
|
|
||||||
if !(strings.HasPrefix(query, "https://") || strings.HasPrefix(query, "http://")) {
|
|
||||||
secureURL := "https://" + query
|
|
||||||
if checkURL(secureURL) {
|
|
||||||
urls = append(urls, secureURL)
|
|
||||||
} else {
|
|
||||||
unsecureURL := "http://" + query
|
|
||||||
if checkURL(unsecureURL) {
|
|
||||||
urls = append(urls, unsecureURL)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
urls = append(urls, query)
|
|
||||||
}
|
|
||||||
return urls
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *memoryBackend) ItemSearch(channel, query string) ([]microsub.Item, error) {
|
func (b *memoryBackend) ItemSearch(channel, query string) ([]microsub.Item, error) {
|
||||||
return querySearch(channel, query)
|
return querySearch(channel, query)
|
||||||
}
|
}
|
||||||
|
|
@ -471,63 +434,13 @@ func (b *memoryBackend) Search(query string) ([]microsub.Feed, error) {
|
||||||
|
|
||||||
cachingFetch := WithCaching(b.pool, Fetch2)
|
cachingFetch := WithCaching(b.pool, Fetch2)
|
||||||
|
|
||||||
for _, u := range urls {
|
for _, feedURL := range urls {
|
||||||
log.Println(u)
|
log.Println(feedURL)
|
||||||
resp, err := cachingFetch(u)
|
foundFeeds, err := findFeeds(cachingFetch, feedURL)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Printf("Error while fetching %s: %v\n", u, err)
|
log.Printf("error while finding feeds: %v", err)
|
||||||
continue
|
|
||||||
}
|
|
||||||
defer resp.Body.Close()
|
|
||||||
|
|
||||||
fetchURL, err := url.Parse(u)
|
|
||||||
md := microformats.Parse(resp.Body, fetchURL)
|
|
||||||
if err != nil {
|
|
||||||
log.Printf("Error while fetching %s: %v\n", u, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
feedResp, err := cachingFetch(fetchURL.String())
|
|
||||||
if err != nil {
|
|
||||||
log.Printf("Error in fetch of %s - %v\n", fetchURL, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
defer feedResp.Body.Close()
|
|
||||||
|
|
||||||
// TODO: Combine FeedHeader and FeedItems so we can use it here
|
|
||||||
parsedFeed, err := fetch.FeedHeader(cachingFetch, fetchURL.String(), feedResp.Header.Get("Content-Type"), feedResp.Body)
|
|
||||||
if err != nil {
|
|
||||||
log.Printf("Error in parse of %s - %v\n", fetchURL, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: Only include the feed if it contains some items
|
|
||||||
feeds = append(feeds, parsedFeed)
|
|
||||||
|
|
||||||
if alts, e := md.Rels["alternate"]; e {
|
|
||||||
for _, alt := range alts {
|
|
||||||
relURL := md.RelURLs[alt]
|
|
||||||
log.Printf("alternate found with type %s %#v\n", relURL.Type, relURL)
|
|
||||||
|
|
||||||
if strings.HasPrefix(relURL.Type, "text/html") || strings.HasPrefix(relURL.Type, "application/json") || strings.HasPrefix(relURL.Type, "application/xml") || strings.HasPrefix(relURL.Type, "text/xml") || strings.HasPrefix(relURL.Type, "application/rss+xml") || strings.HasPrefix(relURL.Type, "application/atom+xml") {
|
|
||||||
feedResp, err := cachingFetch(alt)
|
|
||||||
if err != nil {
|
|
||||||
log.Printf("Error in fetch of %s - %v\n", alt, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
// FIXME: don't defer in for loop (possible memory leak)
|
|
||||||
defer feedResp.Body.Close()
|
|
||||||
|
|
||||||
parsedFeed, err := fetch.FeedHeader(cachingFetch, alt, feedResp.Header.Get("Content-Type"), feedResp.Body)
|
|
||||||
if err != nil {
|
|
||||||
log.Printf("Error in parse of %s - %v\n", alt, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
feeds = append(feeds, parsedFeed)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
feeds = append(feeds, foundFeeds...)
|
||||||
}
|
}
|
||||||
|
|
||||||
return feeds, nil
|
return feeds, nil
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
|
{{- /*gotype: p83.nl/go/ekster/cmd/eksterd.authPage*/ -}}
|
||||||
<!DOCTYPE html>
|
<!DOCTYPE html>
|
||||||
<html>
|
<html lang="en">
|
||||||
<head>
|
<head>
|
||||||
<meta charset="utf-8">
|
<meta charset="utf-8">
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,4 @@
|
||||||
|
{{- /*gotype: p83.nl/go/ekster/cmd/eksterd.settingsPage*/ -}}
|
||||||
<!DOCTYPE html>
|
<!DOCTYPE html>
|
||||||
<html>
|
<html>
|
||||||
<head>
|
<head>
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue
Block a user