Move server to package

This commit is contained in:
Peter Stuifzand 2018-09-12 22:35:49 +02:00
parent c160a34260
commit 7e15298175
Signed by: peter
GPG Key ID: 374322D56E5209E8
13 changed files with 378 additions and 149 deletions

View File

@ -26,20 +26,12 @@ import (
"time" "time"
"github.com/gomodule/redigo/redis" "github.com/gomodule/redigo/redis"
"p83.nl/go/ekster/pkg/auth"
) )
// TokenResponse is the information that we get back from the token endpoint of the user...
type TokenResponse struct {
Me string `json:"me"`
ClientID string `json:"client_id"`
Scope string `json:"scope"`
IssuedAt int64 `json:"issued_at"`
Nonce int64 `json:"nonce"`
}
var authHeaderRegex = regexp.MustCompile("^Bearer (.+)$") var authHeaderRegex = regexp.MustCompile("^Bearer (.+)$")
func (h *microsubHandler) cachedCheckAuthToken(conn redis.Conn, header string, r *TokenResponse) bool { func (b *memoryBackend) cachedCheckAuthToken(conn redis.Conn, header string, r *auth.TokenResponse) bool {
log.Println("Cached checking Auth Token") log.Println("Cached checking Auth Token")
tokens := authHeaderRegex.FindStringSubmatch(header) tokens := authHeaderRegex.FindStringSubmatch(header)
@ -60,7 +52,7 @@ func (h *microsubHandler) cachedCheckAuthToken(conn redis.Conn, header string, r
log.Printf("Error while HGETALL %v\n", err) log.Printf("Error while HGETALL %v\n", err)
} }
authorized := h.checkAuthToken(header, r) authorized := b.checkAuthToken(header, r)
authorized = true authorized = true
if authorized { if authorized {
@ -85,10 +77,10 @@ func (h *microsubHandler) cachedCheckAuthToken(conn redis.Conn, header string, r
return authorized return authorized
} }
func (h *microsubHandler) checkAuthToken(header string, token *TokenResponse) bool { func (b *memoryBackend) checkAuthToken(header string, token *auth.TokenResponse) bool {
log.Println("Checking auth token") log.Println("Checking auth token")
tokenEndpoint := h.Backend.(*memoryBackend).TokenEndpoint tokenEndpoint := b.TokenEndpoint
req, err := http.NewRequest("GET", tokenEndpoint, nil) req, err := http.NewRequest("GET", tokenEndpoint, nil)
if err != nil { if err != nil {

View File

@ -211,7 +211,7 @@ func isLoggedIn(backend *memoryBackend, sess *session) bool {
return false return false
} }
if !auth { if !authEnabled {
return true return true
} }
@ -448,7 +448,7 @@ func (h *mainHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
scope = "create" scope = "create"
} }
auth := authRequest{ authReq := authRequest{
Me: me, Me: me,
ClientID: clientID, ClientID: clientID,
RedirectURI: redirectURI, RedirectURI: redirectURI,
@ -456,7 +456,7 @@ func (h *mainHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
State: state, State: state,
} }
_, err = conn.Do("HMSET", redis.Args{}.Add("state:"+state).AddFlat(&auth)...) _, err = conn.Do("HMSET", redis.Args{}.Add("state:"+state).AddFlat(&authReq)...)
if err != nil { if err != nil {
log.Println(err) log.Println(err)
fmt.Fprintf(w, "ERROR: %q\n", err) fmt.Fprintf(w, "ERROR: %q\n", err)

View File

@ -23,12 +23,13 @@ import (
"log" "log"
"net/http" "net/http"
"os" "os"
"regexp"
"time" "time"
"github.com/gomodule/redigo/redis" "github.com/gomodule/redigo/redis"
"p83.nl/go/ekster/pkg/auth"
"p83.nl/go/ekster/pkg/microsub" "p83.nl/go/ekster/pkg/microsub"
"p83.nl/go/ekster/pkg/server"
) )
const ( const (
@ -38,16 +39,15 @@ const (
var ( var (
pool *redis.Pool pool *redis.Pool
port int port int
auth bool authEnabled bool
redisServer = flag.String("redis", "redis:6379", "") redisServer = flag.String("redis", "redis:6379", "")
entryRegex = regexp.MustCompile("^entry\\[\\d+\\]$")
) )
func init() { func init() {
log.SetFlags(log.Lshortfile | log.Ldate | log.Ltime) log.SetFlags(log.Lshortfile | log.Ldate | log.Ltime)
flag.IntVar(&port, "port", 80, "port for serving api") flag.IntVar(&port, "port", 80, "port for serving api")
flag.BoolVar(&auth, "auth", true, "use auth") flag.BoolVar(&authEnabled, "auth", true, "use auth")
} }
func newPool(addr string) *redis.Pool { func newPool(addr string) *redis.Pool {
@ -58,11 +58,33 @@ func newPool(addr string) *redis.Pool {
} }
} }
func WithAuth(handler http.Handler, b *memoryBackend) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
authorization := r.Header.Get("Authorization")
var token auth.TokenResponse
if !b.AuthTokenAccepted(authorization, &token) {
log.Printf("Token could not be validated")
http.Error(w, "Can't validate token", 403)
return
}
if token.Me != b.Me {
log.Printf("Missing \"me\" in token response: %#v\n", token)
http.Error(w, "Wrong me", 403)
return
}
handler.ServeHTTP(w, r)
})
}
func main() { func main() {
log.Println("eksterd - microsub server") log.Println("eksterd - microsub server")
flag.Parse() flag.Parse()
if auth { if authEnabled {
log.Println("Using auth") log.Println("Using auth")
} else { } else {
log.Println("Authentication disabled") log.Println("Authentication disabled")
@ -98,10 +120,13 @@ func main() {
Backend: backend.(*memoryBackend), Backend: backend.(*memoryBackend),
}) })
http.Handle("/microsub", &microsubHandler{ handler := server.NewMicrosubHandler(backend, pool)
Backend: backend, if authEnabled {
HubIncomingBackend: &hubBackend, handler = WithAuth(handler, backend.(*memoryBackend))
}) }
http.Handle("/microsub", handler)
http.Handle("/incoming/", &incomingHandler{ http.Handle("/incoming/", &incomingHandler{
Backend: &hubBackend, Backend: &hubBackend,
}) })

View File

@ -32,6 +32,7 @@ import (
"sync" "sync"
"time" "time"
"p83.nl/go/ekster/pkg/auth"
"p83.nl/go/ekster/pkg/fetch" "p83.nl/go/ekster/pkg/fetch"
"p83.nl/go/ekster/pkg/microsub" "p83.nl/go/ekster/pkg/microsub"
"p83.nl/go/ekster/pkg/util" "p83.nl/go/ekster/pkg/util"
@ -41,6 +42,8 @@ import (
) )
type memoryBackend struct { type memoryBackend struct {
hubIncomingBackend
lock sync.RWMutex lock sync.RWMutex
Channels map[string]microsub.Channel Channels map[string]microsub.Channel
Feeds map[string][]microsub.Feed Feeds map[string][]microsub.Feed
@ -78,6 +81,12 @@ func (f *fetch2) Fetch(url string) (*http.Response, error) {
return Fetch2(url) return Fetch2(url)
} }
func (b *memoryBackend) AuthTokenAccepted(header string, r *auth.TokenResponse) bool {
conn := pool.Get()
defer conn.Close()
return b.cachedCheckAuthToken(conn, header, r)
}
func (b *memoryBackend) Debug() { func (b *memoryBackend) Debug() {
b.lock.RLock() b.lock.RLock()
defer b.lock.RUnlock() defer b.lock.RUnlock()
@ -412,6 +421,8 @@ func (b *memoryBackend) FollowURL(uid string, url string) (microsub.Feed, error)
b.ProcessContent(uid, feed.URL, resp.Header.Get("Content-Type"), resp.Body) b.ProcessContent(uid, feed.URL, resp.Header.Get("Content-Type"), resp.Body)
b.CreateFeed(url, uid)
return feed, nil return feed, nil
} }
@ -498,12 +509,14 @@ func (b *memoryBackend) Search(query string) ([]microsub.Feed, error) {
} }
defer feedResp.Body.Close() defer feedResp.Body.Close()
// TODO: Combine FeedHeader and FeedItems so we can use it here
parsedFeed, err := fetch.FeedHeader(&fetch2{}, fetchUrl.String(), feedResp.Header.Get("Content-Type"), feedResp.Body) parsedFeed, err := fetch.FeedHeader(&fetch2{}, fetchUrl.String(), feedResp.Header.Get("Content-Type"), feedResp.Body)
if err != nil { if err != nil {
log.Printf("Error in parse of %s - %v\n", fetchUrl, err) log.Printf("Error in parse of %s - %v\n", fetchUrl, err)
continue continue
} }
// TODO: Only include the feed if it contains some items
feeds = append(feeds, parsedFeed) feeds = append(feeds, parsedFeed)
if alts, e := md.Rels["alternate"]; e { if alts, e := md.Rels["alternate"]; e {

14
pkg/auth/types.go Normal file
View File

@ -0,0 +1,14 @@
package auth
type Auther interface {
AuthTokenAccepted(header string, r *TokenResponse) bool
}
// TokenResponse is the information that we get back from the token endpoint of the user...
type TokenResponse struct {
Me string `json:"me"`
ClientID string `json:"client_id"`
Scope string `json:"scope"`
IssuedAt int64 `json:"issued_at"`
Nonce int64 `json:"nonce"`
}

View File

@ -20,6 +20,7 @@ package jf2
import ( import (
"fmt" "fmt"
"log" "log"
"reflect"
"strings" "strings"
"time" "time"
@ -28,6 +29,54 @@ import (
"willnorris.com/go/microformats" "willnorris.com/go/microformats"
) )
func ConvertItemProps(item interface{}, props map[string][]interface{}) {
sv := reflect.ValueOf(item).Elem()
st := reflect.TypeOf(item).Elem()
for i := 0; i < st.NumField(); i++ {
ft := st.Field(i)
fv := sv.Field(i)
if value, ok := ft.Tag.Lookup("mf2"); ok {
if value == "" {
continue
}
if s, e := props[value]; e {
if len(s) > 0 {
if str, ok := s[0].(string); ft.Type.Kind() == reflect.String && ok {
fv.SetString(str)
} else if ft.Type.Kind() == reflect.Slice {
for _, v := range s {
fv.Set(reflect.Append(fv, reflect.ValueOf(v)))
}
} else if card, ok := s[0].(map[string]interface{}); ok {
var hcard microsub.Card
if t, ok := card["type"].([]interface{}); ok {
hcard.Type = t[0].(string)[2:]
}
if properties, ok := card["properties"].(map[string]interface{}); ok {
ps := make(map[string][]interface{})
for k, v := range properties {
ps[k] = v.([]interface{})
}
ConvertItemProps(&hcard, ps)
}
fv.Set(reflect.ValueOf(&hcard))
}
}
}
}
}
}
func ConvertItem(item interface{}, md *microformats.Microformat) {
sv := reflect.ValueOf(item).Elem()
sv.FieldByName("Type").SetString(md.Type[0][2:])
ConvertItemProps(item, md.Properties)
}
func simplify(itemType string, item map[string][]interface{}, author map[string]string) map[string]interface{} { func simplify(itemType string, item map[string][]interface{}, author map[string]string) map[string]interface{} {
feedItem := make(map[string]interface{}) feedItem := make(map[string]interface{})
@ -200,6 +249,7 @@ func fetchValue(key string, values map[string]string) string {
} }
return "" return ""
} }
func MapToAuthor(result map[string]string) *microsub.Card { func MapToAuthor(result map[string]string) *microsub.Card {
item := &microsub.Card{} item := &microsub.Card{}
item.Type = "card" item.Type = "card"

View File

@ -18,78 +18,197 @@
package jf2 package jf2
import ( import (
"log" "encoding/json"
"net/url"
"os" "os"
"testing" "testing"
"p83.nl/go/ekster/pkg/microsub"
"willnorris.com/go/microformats" "willnorris.com/go/microformats"
) )
func TestInReplyTo(t *testing.T) { // func TestInReplyTo(t *testing.T) {
//
// f, err := os.Open("./tests/tantek-in-reply-to.html")
// if err != nil {
// log.Fatal(err)
// }
// defer f.Close()
//
// u, err := url.Parse("http://tantek.com/2018/115/t1/")
// if err != nil {
// log.Fatal(err)
// }
//
// data := microformats.Parse(f, u)
// results := SimplifyMicroformatData(data)
//
// if results[0]["type"] != "entry" {
// t.Fatalf("not an h-entry, but %s", results[0]["type"])
// }
// if results[0]["in-reply-to"] != "https://github.com/w3c/csswg-drafts/issues/2589" {
// t.Fatalf("not in-reply-to, but %s", results[0]["in-reply-to"])
// }
// if results[0]["syndication"] != "https://github.com/w3c/csswg-drafts/issues/2589#thumbs_up-by-tantek" {
// t.Fatalf("not in-reply-to, but %s", results[0]["syndication"])
// }
// if results[0]["published"] != "2018-04-25 11:14-0700" {
// t.Fatalf("not published, but %s", results[0]["published"])
// }
// if results[0]["updated"] != "2018-04-25 11:14-0700" {
// t.Fatalf("not updated, but %s", results[0]["updated"])
// }
// if results[0]["url"] != "http://tantek.com/2018/115/t1/" {
// t.Fatalf("not url, but %s", results[0]["url"])
// }
// if results[0]["uid"] != "http://tantek.com/2018/115/t1/" {
// t.Fatalf("not uid, but %s", results[0]["url"])
// }
//
// if authorValue, e := results[0]["author"]; e {
// if author, ok := authorValue.(map[string]string); ok {
// if author["name"] != "Tantek Çelik" {
// t.Fatalf("name is not expected name, but %q", author["name"])
// }
// if author["photo"] != "http://tantek.com/logo.jpg" {
// t.Fatalf("photo is not expected photo, but %q", author["photo"])
// }
// if author["url"] != "http://tantek.com/" {
// t.Fatalf("url is not expected url, but %q", author["url"])
// }
// } else {
// t.Fatal("author not a map")
// }
// } else {
// t.Fatal("author missing")
// }
//
// if contentValue, e := results[0]["content"]; e {
// if content, ok := contentValue.(map[string]string); ok {
// if content["text"] != "👍" {
// t.Fatal("text content missing")
// }
// if content["html"] != "👍" {
// t.Fatal("html content missing")
// }
// }
// }
// }
f, err := os.Open("./tests/tantek-in-reply-to.html") func TestMapToAuthor(t *testing.T) {
if err != nil { cardmap := make(map[string]string)
log.Fatal(err)
}
defer f.Close()
u, err := url.Parse("http://tantek.com/2018/115/t1/") cardmap["name"] = "Peter"
if err != nil { cardmap["url"] = "https://p83.nl/"
log.Fatal(err) cardmap["photo"] = "https://peterstuifzand.nl/img/profile.jpg"
}
data := microformats.Parse(f, u) card := MapToAuthor(cardmap)
results := SimplifyMicroformatData(data)
if results[0]["type"] != "entry" { if card.Type != "card" {
t.Fatalf("not an h-entry, but %s", results[0]["type"]) t.Error("mapped author type is not card")
} }
if results[0]["in-reply-to"] != "https://github.com/w3c/csswg-drafts/issues/2589" { if card.Name != cardmap["name"] {
t.Fatalf("not in-reply-to, but %s", results[0]["in-reply-to"]) t.Errorf("%q is not equal to %q", card.Name, "Peter")
} }
if results[0]["syndication"] != "https://github.com/w3c/csswg-drafts/issues/2589#thumbs_up-by-tantek" { if card.URL != cardmap["url"] {
t.Fatalf("not in-reply-to, but %s", results[0]["syndication"]) t.Errorf("%q is not equal to %q", card.URL, cardmap["url"])
} }
if results[0]["published"] != "2018-04-25 11:14-0700" { if card.Photo != cardmap["photo"] {
t.Fatalf("not published, but %s", results[0]["published"]) t.Errorf("%q is not equal to %q", card.Photo, cardmap["photo"])
} }
if results[0]["updated"] != "2018-04-25 11:14-0700" { }
t.Fatalf("not updated, but %s", results[0]["updated"])
} func TestMapToItem(t *testing.T) {
if results[0]["url"] != "http://tantek.com/2018/115/t1/" { itemmap := make(map[string]interface{})
t.Fatalf("not url, but %s", results[0]["url"]) itemmap["type"] = "entry"
} itemmap["name"] = "Title"
if results[0]["uid"] != "http://tantek.com/2018/115/t1/" { c := make(map[string]interface{})
t.Fatalf("not uid, but %s", results[0]["url"]) c["text"] = "Simple content"
} c["html"] = "<p>Simple content</p>"
itemmap["content"] = c
if authorValue, e := results[0]["author"]; e { itemmap["like-of"] = []string{
if author, ok := authorValue.(map[string]string); ok { "https://p83.nl/",
if author["name"] != "Tantek Çelik" { "https://p83.nl/test.html",
t.Fatalf("name is not expected name, but %q", author["name"]) }
} item := MapToItem(itemmap)
if author["photo"] != "http://tantek.com/logo.jpg" { if item.Type != "entry" {
t.Fatalf("photo is not expected photo, but %q", author["photo"]) t.Errorf("Expected Type entry, was %q", item.Type)
} }
if author["url"] != "http://tantek.com/" { if item.Name != "Title" {
t.Fatalf("url is not expected url, but %q", author["url"]) t.Errorf("Expected Name == %q, was actually %q", "Title", item.Name)
} }
} else { if item.Content.Text != "Simple content" {
t.Fatal("author not a map") t.Errorf("Expected Content.Text == %q, was actually %q", "Simple content", item.Content.Text)
} }
} else { if item.Content.HTML != "<p>Simple content</p>" {
t.Fatal("author missing") t.Errorf("Expected Content.HTML == %q, was actually %q", "<p>Simple content</p>", item.Content.HTML)
} }
// if val := item.LikeOf[0]; val != "https://p83.nl/" {
if contentValue, e := results[0]["content"]; e { // t.Errorf("Expected LikeOf[0] == %q, was actually %q", "https://p83.nl/", val)
if content, ok := contentValue.(map[string]string); ok { // }
if content["text"] != "👍" { // if val := item.LikeOf[1]; val != "https://p83.nl/test.html" {
t.Fatal("text content missing") // t.Errorf("Expected LikeOf[1] == %q, was actually %q", "https://p83.nl/test.html", val)
} // }
if content["html"] != "👍" { }
t.Fatal("html content missing")
} func TestConvertItem0(t *testing.T) {
} var item microsub.Item
var mdItem microformats.Microformat
f, err := os.Open("tests/test0.json")
if err != nil {
t.Fatalf("error while opening test0.json: %s", err)
}
json.NewDecoder(f).Decode(&mdItem)
ConvertItem(&item, &mdItem)
if item.Type != "entry" {
t.Errorf("Expected Type entry, was %q", item.Type)
}
if item.Name != "name test" {
t.Errorf("Expected Name == %q, was %q", "name test", item.Name)
}
}
func TestConvertItem1(t *testing.T) {
var item microsub.Item
var mdItem microformats.Microformat
f, err := os.Open("tests/test1.json")
if err != nil {
t.Fatalf("error while opening test1.json: %s", err)
}
json.NewDecoder(f).Decode(&mdItem)
ConvertItem(&item, &mdItem)
if item.Type != "entry" {
t.Errorf("Expected Type entry, was %q", item.Type)
}
if item.Author.Type != "card" {
t.Errorf("Expected Author.Type card, was %q", item.Author.Type)
}
if item.Author.Name != "Peter" {
t.Errorf("Expected Author.Name == %q, was %q", "Peter", item.Author.Name)
}
}
func TestConvertItem2(t *testing.T) {
var item microsub.Item
var mdItem microformats.Microformat
f, err := os.Open("tests/test2.json")
if err != nil {
t.Fatalf("error while opening test2.json: %s", err)
}
json.NewDecoder(f).Decode(&mdItem)
ConvertItem(&item, &mdItem)
if item.Type != "entry" {
t.Errorf("Expected Type entry, was %q", item.Type)
}
if item.Photo[0] != "https://peterstuifzand.nl/img/profile.jpg" {
t.Errorf("Expected Photo[0], was %q", item.Type)
}
if item.Author.Type != "card" {
t.Errorf("Expected Author.Type card, was %q", item.Author.Type)
}
if item.Author.Name != "Peter" {
t.Errorf("Expected Author.Name == %q, was %q", "Peter", item.Author.Name)
} }
} }

1
pkg/jf2/tests/test0.json Normal file
View File

@ -0,0 +1 @@
{"type":["h-entry"],"properties":{"name":["name test"]}}

1
pkg/jf2/tests/test1.json Normal file
View File

@ -0,0 +1 @@
{"type":["h-entry"],"properties":{"name":["name test"],"author":[{"type":["h-card"],"properties":{"name":["Peter"]}}]}}

25
pkg/jf2/tests/test2.json Normal file
View File

@ -0,0 +1,25 @@
{
"type": [
"h-entry"
],
"properties": {
"name": [
"name test"
],
"photo": [
"https://peterstuifzand.nl/img/profile.jpg"
],
"author": [
{
"type": [
"h-card"
],
"properties": {
"name": [
"Peter"
]
}
}
]
}
}

View File

@ -40,40 +40,40 @@ type Channel struct {
type Card struct { type Card struct {
Filled bool `json:"-,omitempty"` Filled bool `json:"-,omitempty"`
Type string `json:"type,omitempty"` Type string `json:"type,omitempty"`
Name string `json:"name,omitempty"` Name string `json:"name,omitempty" mf2:"name"`
URL string `json:"url,omitempty"` URL string `json:"url,omitempty" mf2:"url"`
Photo string `json:"photo,omitempty"` Photo string `json:"photo,omitempty" mf2:"photo"`
Locality string `json:"locality,omitempty"` Locality string `json:"locality,omitempty" mf2:"locality"`
Region string `json:"region,omitempty"` Region string `json:"region,omitempty" mf2:"region"`
CountryName string `json:"country-name,omitempty"` CountryName string `json:"country-name,omitempty" mf2:"country-name"`
Longitude string `json:"longitude,omitempty"` Longitude string `json:"longitude,omitempty" mf2:"longitude"`
Latitude string `json:"latitude,omitempty"` Latitude string `json:"latitude,omitempty" mf2:"latitude"`
} }
type Content struct { type Content struct {
Text string `json:"text,omitempty"` Text string `json:"text,omitempty" mf2:"value"`
HTML string `json:"html,omitempty"` HTML string `json:"html,omitempty" mf2:"html"`
} }
// Item is a post object // Item is a post object
type Item struct { type Item struct {
Type string `json:"type"` Type string `json:"type"`
Name string `json:"name,omitempty"` Name string `json:"name,omitempty" mf2:"name"`
Published string `json:"published,omitempty"` Published string `json:"published,omitempty" mf2:"published"`
Updated string `json:"updated,omitempty"` Updated string `json:"updated,omitempty" mf2:"updated"`
URL string `json:"url,omitempty"` URL string `json:"url,omitempty" mf2:"url"`
UID string `json:"uid,omitempty"` UID string `json:"uid,omitempty" mf2:"uid"`
Author *Card `json:"author,omitempty"` Author *Card `json:"author,omitempty" mf2:"author"`
Category []string `json:"category,omitempty"` Category []string `json:"category,omitempty" mf2:"category"`
Photo []string `json:"photo,omitempty"` Photo []string `json:"photo,omitempty" mf2:"photo"`
LikeOf []string `json:"like-of,omitempty"` LikeOf []string `json:"like-of,omitempty" mf2:"like-of"`
BookmarkOf []string `json:"bookmark-of,omitempty"` BookmarkOf []string `json:"bookmark-of,omitempty" mf2:"bookmark-of"`
RepostOf []string `json:"repost-of,omitempty"` RepostOf []string `json:"repost-of,omitempty" mf2:"repost-of"`
InReplyTo []string `json:"in-reply-to,omitempty"` InReplyTo []string `json:"in-reply-to,omitempty" mf2:"in-reply-to"`
Content *Content `json:"content,omitempty"` Content *Content `json:"content,omitempty" mf2:"content"`
Latitude string `json:"latitude,omitempty"` Latitude string `json:"latitude,omitempty" mf2:"latitude"`
Longitude string `json:"longitude,omitempty"` Longitude string `json:"longitude,omitempty" mf2:"longitude"`
Checkin *Card `json:"checkin,omitempty"` Checkin *Card `json:"checkin,omitempty" mf2:"checkin"`
Refs map[string]Item `json:"refs,omitempty"` Refs map[string]Item `json:"refs,omitempty"`
ID string `json:"_id,omitempty"` ID string `json:"_id,omitempty"`
Read bool `json:"_is_read"` Read bool `json:"_is_read"`

View File

@ -1,4 +1,4 @@
package main package server
import ( import (
"encoding/json" "encoding/json"

View File

@ -15,7 +15,7 @@
You should have received a copy of the GNU General Public License You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>. along with this program. If not, see <http://www.gnu.org/licenses/>.
*/ */
package main package server
import ( import (
"encoding/json" "encoding/json"
@ -23,20 +23,29 @@ import (
"log" "log"
"net/http" "net/http"
"os" "os"
"regexp"
"p83.nl/go/ekster/pkg/microsub" "p83.nl/go/ekster/pkg/microsub"
"github.com/gomodule/redigo/redis" "github.com/gomodule/redigo/redis"
) )
var (
entryRegex = regexp.MustCompile("^entry\\[\\d+\\]$")
)
type microsubHandler struct { type microsubHandler struct {
Backend microsub.Microsub backend microsub.Microsub
HubIncomingBackend HubBackend pool *redis.Pool
}
func NewMicrosubHandler(backend microsub.Microsub, pool *redis.Pool) http.Handler {
return &microsubHandler{backend, pool}
} }
func (h *microsubHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { func (h *microsubHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
var logger = log.New(os.Stdout, "logger: ", log.Lshortfile) var logger = log.New(os.Stdout, "logger: ", log.Lshortfile)
conn := redis.NewLoggingConn(pool.Get(), logger, "microsub") conn := redis.NewLoggingConn(h.pool.Get(), logger, "microsub")
defer conn.Close() defer conn.Close()
r.ParseForm() r.ParseForm()
@ -51,30 +60,12 @@ func (h *microsubHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
return return
} }
if auth {
authorization := r.Header.Get("Authorization")
var token TokenResponse
if !h.cachedCheckAuthToken(conn, authorization, &token) {
log.Printf("Token could not be validated")
http.Error(w, "Can't validate token", 403)
return
}
if token.Me != h.Backend.(*memoryBackend).Me {
log.Printf("Missing \"me\" in token response: %#v\n", token)
http.Error(w, "Wrong me", 403)
return
}
}
if r.Method == http.MethodGet { if r.Method == http.MethodGet {
w.Header().Add("Access-Control-Allow-Origin", "*") w.Header().Add("Access-Control-Allow-Origin", "*")
values := r.URL.Query() values := r.URL.Query()
action := values.Get("action") action := values.Get("action")
if action == "channels" { if action == "channels" {
channels, err := h.Backend.ChannelsGetList() channels, err := h.backend.ChannelsGetList()
if err != nil { if err != nil {
http.Error(w, err.Error(), 500) http.Error(w, err.Error(), 500)
return return
@ -89,7 +80,7 @@ func (h *microsubHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
return return
} }
} else if action == "timeline" { } else if action == "timeline" {
timeline, err := h.Backend.TimelineGet(values.Get("before"), values.Get("after"), values.Get("channel")) timeline, err := h.backend.TimelineGet(values.Get("before"), values.Get("after"), values.Get("channel"))
if err != nil { if err != nil {
http.Error(w, err.Error(), 500) http.Error(w, err.Error(), 500)
return return
@ -104,7 +95,7 @@ func (h *microsubHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
return return
} }
} else if action == "preview" { } else if action == "preview" {
timeline, err := h.Backend.PreviewURL(values.Get("url")) timeline, err := h.backend.PreviewURL(values.Get("url"))
if err != nil { if err != nil {
http.Error(w, err.Error(), 500) http.Error(w, err.Error(), 500)
return return
@ -119,7 +110,7 @@ func (h *microsubHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
} }
} else if action == "follow" { } else if action == "follow" {
channel := values.Get("channel") channel := values.Get("channel")
following, err := h.Backend.FollowGetList(channel) following, err := h.backend.FollowGetList(channel)
if err != nil { if err != nil {
http.Error(w, err.Error(), 500) http.Error(w, err.Error(), 500)
return return
@ -136,7 +127,7 @@ func (h *microsubHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
} else if action == "events" { } else if action == "events" {
conn, _, _ := w.(http.Hijacker).Hijack() conn, _, _ := w.(http.Hijacker).Hijack()
cons := newConsumer(conn) cons := newConsumer(conn)
h.Backend.AddEventListener(cons) h.backend.AddEventListener(cons)
} else { } else {
http.Error(w, fmt.Sprintf("unknown action %s\n", action), 500) http.Error(w, fmt.Sprintf("unknown action %s\n", action), 500)
return return
@ -152,20 +143,19 @@ func (h *microsubHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
method := values.Get("method") method := values.Get("method")
uid := values.Get("channel") uid := values.Get("channel")
if method == "delete" { if method == "delete" {
err := h.Backend.ChannelsDelete(uid) err := h.backend.ChannelsDelete(uid)
if err != nil { if err != nil {
http.Error(w, err.Error(), 500) http.Error(w, err.Error(), 500)
return return
} }
w.Header().Add("Content-Type", "application/json") w.Header().Add("Content-Type", "application/json")
fmt.Fprintln(w, "[]") fmt.Fprintln(w, "[]")
h.Backend.(Debug).Debug()
return return
} }
jw := json.NewEncoder(w) jw := json.NewEncoder(w)
if uid == "" { if uid == "" {
channel, err := h.Backend.ChannelsCreate(name) channel, err := h.backend.ChannelsCreate(name)
if err != nil { if err != nil {
http.Error(w, err.Error(), 500) http.Error(w, err.Error(), 500)
return return
@ -177,7 +167,7 @@ func (h *microsubHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
return return
} }
} else { } else {
channel, err := h.Backend.ChannelsUpdate(uid, name) channel, err := h.backend.ChannelsUpdate(uid, name)
if err != nil { if err != nil {
http.Error(w, err.Error(), 500) http.Error(w, err.Error(), 500)
return return
@ -189,12 +179,11 @@ func (h *microsubHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
return return
} }
} }
h.Backend.(Debug).Debug()
} else if action == "follow" { } else if action == "follow" {
uid := values.Get("channel") uid := values.Get("channel")
url := values.Get("url") url := values.Get("url")
h.HubIncomingBackend.CreateFeed(url, uid) // h.HubIncomingBackend.CreateFeed(url, uid)
feed, err := h.Backend.FollowURL(uid, url) feed, err := h.backend.FollowURL(uid, url)
if err != nil { if err != nil {
http.Error(w, err.Error(), 500) http.Error(w, err.Error(), 500)
return return
@ -209,7 +198,7 @@ func (h *microsubHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
} else if action == "unfollow" { } else if action == "unfollow" {
uid := values.Get("channel") uid := values.Get("channel")
url := values.Get("url") url := values.Get("url")
err := h.Backend.UnfollowURL(uid, url) err := h.backend.UnfollowURL(uid, url)
if err != nil { if err != nil {
http.Error(w, err.Error(), 500) http.Error(w, err.Error(), 500)
return return
@ -218,7 +207,7 @@ func (h *microsubHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
fmt.Fprintln(w, "[]") fmt.Fprintln(w, "[]")
} else if action == "search" { } else if action == "search" {
query := values.Get("query") query := values.Get("query")
feeds, err := h.Backend.Search(query) feeds, err := h.backend.Search(query)
if err != nil { if err != nil {
http.Error(w, err.Error(), 500) http.Error(w, err.Error(), 500)
return return
@ -254,7 +243,7 @@ func (h *microsubHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
} }
if len(markAsRead) > 0 { if len(markAsRead) > 0 {
err := h.Backend.MarkRead(channel, markAsRead) err := h.backend.MarkRead(channel, markAsRead)
if err != nil { if err != nil {
http.Error(w, err.Error(), 500) http.Error(w, err.Error(), 500)
return return