Use database backend instead of backend.json

This commit is contained in:
Peter Stuifzand 2021-10-31 01:37:39 +02:00
parent 40dd032ab0
commit 64ae959670
Signed by: peter
GPG Key ID: 374322D56E5209E8
19 changed files with 1998 additions and 602 deletions

View File

@ -0,0 +1 @@
DROP TABLE "channels";

View File

@ -0,0 +1,7 @@
CREATE TABLE IF NOT EXISTS "channels" (
"id" int primary key generated always as identity,
"uid" varchar(255) unique,
"name" varchar(255) unique,
"created_at" timestamptz DEFAULT current_timestamp,
"updated_at" timestamptz
);

View File

@ -0,0 +1 @@
DROP TABLE "feeds";

View File

@ -0,0 +1,7 @@
CREATE TABLE "feeds" (
"id" int primary key generated always as identity,
"channel_id" int references "channels" on update cascade on delete cascade,
"url" varchar(512) not null unique,
"created_at" timestamptz DEFAULT current_timestamp,
"updated_at" timestamptz
);

View File

@ -0,0 +1 @@
DELETE FROM "channels" WHERE "uid" IN ('home', 'notifications');

View File

@ -0,0 +1 @@
INSERT INTO "channels" ("uid", "name") VALUES ('home', 'Home'), ('notifications', 'Notifications');

View File

@ -0,0 +1 @@
DROP TABLE "items";

View File

@ -0,0 +1,10 @@
CREATE TABLE IF NOT EXISTS "items" (
"id" int primary key generated always as identity,
"channel_id" int references "channels" on delete cascade,
"uid" varchar(512) not null unique,
"is_read" int default 0,
"data" jsonb,
"created_at" timestamptz DEFAULT current_timestamp,
"updated_at" timestamptz,
"published_at" timestamptz
);

View File

@ -0,0 +1 @@
ALTER TABLE "items" DROP COLUMN "feed_id";

View File

@ -0,0 +1 @@
ALTER TABLE "items" ADD COLUMN "feed_id" INT REFERENCES "feeds" ON DELETE CASCADE;

View File

@ -659,30 +659,28 @@ func (h *mainHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
} }
return return
} else if r.URL.Path == "/settings/channel" { } else if r.URL.Path == "/settings/channel" {
defer h.Backend.save() // defer h.Backend.save()
uid := r.FormValue("uid") // uid := r.FormValue("uid")
//
if h.Backend.Settings == nil { // if h.Backend.Settings == nil {
h.Backend.Settings = make(map[string]channelSetting) // h.Backend.Settings = make(map[string]channelSetting)
} // }
//
excludeRegex := r.FormValue("exclude_regex") // excludeRegex := r.FormValue("exclude_regex")
includeRegex := r.FormValue("include_regex") // includeRegex := r.FormValue("include_regex")
channelType := r.FormValue("type") // channelType := r.FormValue("type")
//
setting, e := h.Backend.Settings[uid] // setting, e := h.Backend.Settings[uid]
if !e { // if !e {
setting = channelSetting{} // setting = channelSetting{}
} // }
setting.ExcludeRegex = excludeRegex // setting.ExcludeRegex = excludeRegex
setting.IncludeRegex = includeRegex // setting.IncludeRegex = includeRegex
setting.ChannelType = channelType // setting.ChannelType = channelType
if values, e := r.Form["exclude_type"]; e { // if values, e := r.Form["exclude_type"]; e {
setting.ExcludeType = values // setting.ExcludeType = values
} // }
h.Backend.Settings[uid] = setting // h.Backend.Settings[uid] = setting
h.Backend.Debug()
http.Redirect(w, r, "/settings", 302) http.Redirect(w, r, "/settings", 302)
return return

View File

@ -126,8 +126,9 @@ func (h *hubIncomingBackend) UpdateFeed(feedID int64, contentType string, body i
return err return err
} }
// FIXME: feed id for incoming websub content
log.Printf("Updating feed %d - %s %s\n", feedID, u, channel) log.Printf("Updating feed %d - %s %s\n", feedID, u, channel)
err = h.backend.ProcessContent(channel, u, contentType, body) err = h.backend.ProcessContent(channel, fmt.Sprintf("incoming:%d", feedID), u, contentType, body)
if err != nil { if err != nil {
log.Printf("could not process content for channel %s: %s", channel, err) log.Printf("could not process content for channel %s: %s", channel, err)
} }

View File

@ -17,14 +17,20 @@ package main
import ( import (
"database/sql" "database/sql"
"embed"
_ "expvar"
"flag" "flag"
"log" "log"
"net/http" "net/http"
"os" "os"
"time" "time"
"github.com/gomodule/redigo/redis"
"p83.nl/go/ekster/pkg/auth" "p83.nl/go/ekster/pkg/auth"
"github.com/golang-migrate/migrate/v4"
_ "github.com/golang-migrate/migrate/v4/database/postgres"
"github.com/golang-migrate/migrate/v4/source/iofs"
"github.com/gomodule/redigo/redis"
) )
// AppOptions are options for the app // AppOptions are options for the app
@ -39,6 +45,9 @@ type AppOptions struct {
database *sql.DB database *sql.DB
} }
//go:embed db/migrations/*.sql
var migrations embed.FS
func init() { func init() {
log.SetFlags(log.Lshortfile | log.Ldate | log.Ltime) log.SetFlags(log.Lshortfile | log.Ldate | log.Ltime)
} }
@ -126,28 +135,36 @@ func main() {
log.Fatal("EKSTER_TEMPLATES environment variable not found, use env var or -templates dir option") log.Fatal("EKSTER_TEMPLATES environment variable not found, use env var or -templates dir option")
} }
} }
//
// createBackend := false
// args := flag.Args()
//
// if len(args) >= 1 {
// if args[0] == "new" {
// createBackend = true
// }
// }
//
// if createBackend {
// err := createMemoryBackend()
// if err != nil {
// log.Fatalf("Error while saving backend.json: %s", err)
// }
//
// TODO(peter): automatically gather this information from login or otherwise
//
// log.Println(`Config file "backend.json" is created in the current directory.`)
// log.Println(`Update "Me" variable to your website address "https://example.com/"`)
// log.Println(`Update "TokenEndpoint" variable to the address of your token endpoint "https://example.com/token"`)
//
// return
// }
createBackend := false // TODO(peter): automatically gather this information from login or otherwise
args := flag.Args()
if len(args) >= 1 { err := runMigrations()
if args[0] == "new" { if err != nil {
createBackend = true log.Fatalf("Error with migrations: %s", err)
}
}
if createBackend {
err := createMemoryBackend()
if err != nil {
log.Fatalf("Error while saving backend.json: %s", err)
}
// TODO(peter): automatically gather this information from login or otherwise
log.Println(`Config file "backend.json" is created in the current directory.`)
log.Println(`Update "Me" variable to your website address "https://example.com/"`)
log.Println(`Update "TokenEndpoint" variable to the address of your token endpoint "https://example.com/token"`)
return
} }
pool := newPool(options.RedisServer) pool := newPool(options.RedisServer)
@ -167,3 +184,38 @@ func main() {
db.Close() db.Close()
} }
// Log migrations
type Log struct {
}
// Printf for migrations logs
func (l Log) Printf(format string, v ...interface{}) {
log.Printf(format, v...)
}
// Verbose returns false
func (l Log) Verbose() bool {
return false
}
func runMigrations() error {
d, err := iofs.New(migrations, "db/migrations")
if err != nil {
return err
}
m, err := migrate.NewWithSourceInstance("iofs", d, "postgres://postgres@database/ekster?sslmode=disable&user=postgres&password=simple")
if err != nil {
return err
}
defer m.Close()
m.Log = &Log{}
log.Println("Running migrations")
if err = m.Up(); err != nil {
if err != migrate.ErrNoChange {
return err
}
}
log.Println("Migrations are up")
return nil
}

View File

@ -4,20 +4,17 @@ import (
"bufio" "bufio"
"bytes" "bytes"
"database/sql" "database/sql"
"encoding/json"
"fmt" "fmt"
"io" "io"
"io/ioutil" "io/ioutil"
"log" "log"
"net/http" "net/http"
"net/url" "net/url"
"os"
"regexp" "regexp"
"strings" "strings"
"sync" "sync"
"time" "time"
"github.com/pkg/errors"
"p83.nl/go/ekster/pkg/auth" "p83.nl/go/ekster/pkg/auth"
"p83.nl/go/ekster/pkg/fetch" "p83.nl/go/ekster/pkg/fetch"
"p83.nl/go/ekster/pkg/microsub" "p83.nl/go/ekster/pkg/microsub"
@ -77,11 +74,6 @@ type newItemMessage struct {
Channel string `json:"channel"` Channel string `json:"channel"`
} }
// Debug interface for easy of use in other packages
type Debug interface {
Debug()
}
type fetch2 struct{} type fetch2 struct{}
func (f *fetch2) Fetch(url string) (*http.Response, error) { func (f *fetch2) Fetch(url string) (*http.Response, error) {
@ -99,223 +91,117 @@ func (b *memoryBackend) AuthTokenAccepted(header string, r *auth.TokenResponse)
return cachedCheckAuthToken(conn, header, b.TokenEndpoint, r) return cachedCheckAuthToken(conn, header, b.TokenEndpoint, r)
} }
func (b *memoryBackend) Debug() {
b.lock.RLock()
defer b.lock.RUnlock()
fmt.Println(b.Channels)
fmt.Println(b.Feeds)
fmt.Println(b.Settings)
}
func (b *memoryBackend) load() error {
filename := "backend.json"
f, err := os.Open(filename)
if err != nil {
return err
}
defer f.Close()
jw := json.NewDecoder(f)
err = jw.Decode(b)
if err != nil {
return err
}
return nil
}
func (b *memoryBackend) refreshChannels() {
conn := b.pool.Get()
defer conn.Close()
conn.Do("DEL", "channels")
updateChannelInRedis(conn, "notifications", 1)
b.lock.RLock()
for uid, channel := range b.Channels {
log.Printf("loading channel %s - %s\n", uid, channel.Name)
updateChannelInRedis(conn, channel.UID, DefaultPrio)
}
b.lock.RUnlock()
}
func (b *memoryBackend) save() error {
filename := "backend.json"
f, err := os.Create(filename)
if err != nil {
return err
}
defer f.Close()
jw := json.NewEncoder(f)
jw.SetIndent("", " ")
b.lock.RLock()
defer b.lock.RUnlock()
return jw.Encode(b)
}
func loadMemoryBackend(pool *redis.Pool, database *sql.DB) (*memoryBackend, error) { func loadMemoryBackend(pool *redis.Pool, database *sql.DB) (*memoryBackend, error) {
backend := &memoryBackend{pool: pool, database: database} backend := &memoryBackend{pool: pool, database: database}
err := backend.load()
if err != nil {
return nil, errors.Wrap(err, "while loading backend")
}
backend.refreshChannels()
return backend, nil return backend, nil
} }
func createMemoryBackend() error {
backend := memoryBackend{}
backend.lock.Lock()
backend.Feeds = make(map[string][]microsub.Feed)
channels := []microsub.Channel{
{UID: "notifications", Name: "Notifications"},
{UID: "home", Name: "Home"},
}
backend.Channels = make(map[string]microsub.Channel)
for _, c := range channels {
backend.Channels[c.UID] = c
}
backend.NextUID = 1000000
// FIXME: can't be used in Backend
backend.Me = "https://example.com/"
backend.lock.Unlock()
return backend.save()
}
// ChannelsGetList gets channels // ChannelsGetList gets channels
func (b *memoryBackend) ChannelsGetList() ([]microsub.Channel, error) { func (b *memoryBackend) ChannelsGetList() ([]microsub.Channel, error) {
conn := b.pool.Get() conn := b.pool.Get()
defer conn.Close() defer conn.Close()
b.lock.RLock()
defer b.lock.RUnlock()
var channels []microsub.Channel var channels []microsub.Channel
uids, err := redis.Strings(conn.Do("SORT", "channels", "BY", "channel_sortorder_*", "ASC")) rows, err := b.database.Query(`
SELECT c.uid, c.name, count(i.channel_id)
FROM "channels" "c" left join items i on c.id = i.channel_id and i.is_read = 0
GROUP BY c.id;
`)
if err != nil { if err != nil {
log.Printf("Sorting channels failed: %v\n", err) return nil, err
for _, v := range b.Channels { }
channels = append(channels, v) defer rows.Close()
} for rows.Next() {
} else { var uid, name string
for _, uid := range uids { var count int
if c, e := b.Channels[uid]; e { _ = rows.Scan(&uid, &name, &count)
channels = append(channels, c)
} channels = append(channels, microsub.Channel{UID: uid, Name: name, Unread: microsub.Unread{
} Type: microsub.UnreadCount,
UnreadCount: count,
}})
} }
util.StablePartition(channels, 0, len(channels), func(i int) bool {
return channels[i].Unread.HasUnread()
})
return channels, nil return channels, nil
} }
// ChannelsCreate creates a channels // ChannelsCreate creates a channels
func (b *memoryBackend) ChannelsCreate(name string) (microsub.Channel, error) { func (b *memoryBackend) ChannelsCreate(name string) (microsub.Channel, error) {
// Try to fetch the channel, if it exists, we don't create it uid := util.RandStringBytes(24)
if channel, e := b.fetchChannel(name); e {
return channel, nil channel := microsub.Channel{
UID: uid,
Name: name,
Unread: microsub.Unread{Type: microsub.UnreadCount},
} }
// Otherwise create the channel result, err := b.database.Exec(`insert into "channels" ("uid", "name", "created_at") values($1, $2, DEFAULT)`, channel.UID, channel.Name)
channel := b.createChannel(name) if err != nil {
b.setChannel(channel) return channel, err
b.save() }
conn := b.pool.Get()
defer conn.Close()
updateChannelInRedis(conn, channel.UID, DefaultPrio)
b.broker.Notifier <- sse.Message{Event: "new channel", Object: channelMessage{1, channel}}
if n, err := result.RowsAffected(); err != nil {
if n > 0 {
b.broker.Notifier <- sse.Message{Event: "new channel", Object: channelMessage{1, channel}}
}
}
return channel, nil return channel, nil
} }
// ChannelsUpdate updates a channels // ChannelsUpdate updates a channels
func (b *memoryBackend) ChannelsUpdate(uid, name string) (microsub.Channel, error) { func (b *memoryBackend) ChannelsUpdate(uid, name string) (microsub.Channel, error) {
defer b.save() _, err := b.database.Exec(`UPDATE "channels" SET "name" = $1 WHERE "uid" = $2`, name, uid)
if err != nil {
b.lock.RLock() return microsub.Channel{}, err
c, e := b.Channels[uid] }
b.lock.RUnlock() c := microsub.Channel{
UID: uid,
if e { Name: name,
c.Name = name Unread: microsub.Unread{},
b.lock.Lock()
b.Channels[uid] = c
b.lock.Unlock()
b.broker.Notifier <- sse.Message{Event: "update channel", Object: channelMessage{1, c}}
return c, nil
} }
return microsub.Channel{}, fmt.Errorf("channel %s does not exist", uid) b.broker.Notifier <- sse.Message{Event: "update channel", Object: channelMessage{1, c}}
return c, nil
} }
// ChannelsDelete deletes a channel // ChannelsDelete deletes a channel
func (b *memoryBackend) ChannelsDelete(uid string) error { func (b *memoryBackend) ChannelsDelete(uid string) error {
defer b.save() _, err := b.database.Exec(`delete from "channels" where "uid" = $1`, uid)
if err != nil {
conn := b.pool.Get() return err
defer conn.Close()
removed := false
b.lock.RLock()
if _, e := b.Channels[uid]; e {
removed = true
} }
b.lock.RUnlock() b.broker.Notifier <- sse.Message{Event: "delete channel", Object: channelDeletedMessage{1, uid}}
removeChannelFromRedis(conn, uid)
b.lock.Lock()
delete(b.Channels, uid)
delete(b.Feeds, uid)
b.lock.Unlock()
if removed {
b.broker.Notifier <- sse.Message{Event: "delete channel", Object: channelDeletedMessage{1, uid}}
}
return nil return nil
} }
func (b *memoryBackend) removeFeed(feedID string) error { type feed struct {
b.lock.Lock() UID string // channel
for uid := range b.Channels { ID int
feeds := b.Feeds[uid] URL string
for i, feed := range feeds {
if feed.URL == feedID {
feeds = append(feeds[:i], feeds[i+1:]...)
}
}
b.Feeds[uid] = feeds
}
b.lock.Unlock()
return nil
} }
func (b *memoryBackend) getFeeds() map[string][]string { func (b *memoryBackend) getFeeds() ([]feed, error) {
feeds := make(map[string][]string) rows, err := b.database.Query(`SELECT "f"."id", "f"."url", "c"."uid" FROM "feeds" AS "f" INNER JOIN public.channels c on c.id = f.channel_id`)
b.lock.RLock() if err != nil {
for uid := range b.Channels { return nil, err
for _, feed := range b.Feeds[uid] {
feeds[uid] = append(feeds[uid], feed.URL)
}
} }
b.lock.RUnlock()
return feeds var feeds []feed
for rows.Next() {
var feedID int
var feedURL, UID string
err = rows.Scan(&feedID, &feedURL, &UID)
if err != nil {
log.Printf("while scanning feeds: %s", err)
continue
}
feeds = append(feeds, feed{UID, feedID, feedURL})
}
return feeds, nil
} }
func (b *memoryBackend) run() { func (b *memoryBackend) run() {
@ -337,29 +223,33 @@ func (b *memoryBackend) run() {
} }
func (b *memoryBackend) RefreshFeeds() { func (b *memoryBackend) RefreshFeeds() {
feeds := b.getFeeds() feeds, err := b.getFeeds()
if err != nil {
return
}
count := 0 count := 0
for uid := range feeds { for _, feed := range feeds {
for _, feedURL := range feeds[uid] { feedURL := feed.URL
log.Println("Processing", feedURL) feedID := feed.ID
resp, err := b.Fetch3(uid, feedURL) uid := feed.UID
if err != nil { log.Println("Processing", feedURL)
log.Printf("Error while Fetch3 of %s: %v\n", feedURL, err) resp, err := b.Fetch3(uid, feedURL)
b.addNotification("Error while fetching feed", feedURL, err) if err != nil {
count++ log.Printf("Error while Fetch3 of %s: %v\n", feedURL, err)
continue b.addNotification("Error while fetching feed", feedURL, err)
} count++
err = b.ProcessContent(uid, feedURL, resp.Header.Get("Content-Type"), resp.Body) continue
if err != nil {
log.Printf("Error while processing content for %s: %v\n", feedURL, err)
b.addNotification("Error while processing feed", feedURL, err)
count++
continue
}
_ = resp.Body.Close()
} }
err = b.ProcessContent(uid, fmt.Sprintf("%d", feedID), feedURL, resp.Header.Get("Content-Type"), resp.Body)
if err != nil {
log.Printf("Error while processing content for %s: %v\n", feedURL, err)
b.addNotification("Error while processing feed", feedURL, err)
count++
continue
}
_ = resp.Body.Close()
} }
if count > 0 { if count > 0 {
@ -368,14 +258,17 @@ func (b *memoryBackend) RefreshFeeds() {
} }
func (b *memoryBackend) addNotification(name string, feedURL string, err error) { func (b *memoryBackend) addNotification(name string, feedURL string, err error) {
_ = b.channelAddItem("notifications", microsub.Item{ err = b.channelAddItem("notifications", microsub.Item{
Type: "entry", Type: "entry",
Name: name, Name: name,
Content: &microsub.Content{ Content: &microsub.Content{
Text: fmt.Sprintf("Error while updating feed %s: %v", feedURL, err), Text: fmt.Sprintf("ERROR: while updating feed: %s", err),
}, },
UID: time.Now().String(), Published: time.Now().Format(time.RFC3339),
}) })
if err != nil {
log.Printf("ERROR: %s", err)
}
} }
func (b *memoryBackend) TimelineGet(before, after, channel string) (microsub.Timeline, error) { func (b *memoryBackend) TimelineGet(before, after, channel string) (microsub.Timeline, error) {
@ -395,58 +288,67 @@ func (b *memoryBackend) TimelineGet(before, after, channel string) (microsub.Tim
} }
func (b *memoryBackend) FollowGetList(uid string) ([]microsub.Feed, error) { func (b *memoryBackend) FollowGetList(uid string) ([]microsub.Feed, error) {
b.lock.RLock() rows, err := b.database.Query(`SELECT "f"."url" FROM "feeds" AS "f" INNER JOIN channels c on c.id = f.channel_id WHERE c.uid = $1`, uid)
defer b.lock.RUnlock() if err != nil {
return b.Feeds[uid], nil return nil, err
}
var feeds []microsub.Feed
for rows.Next() {
var feedURL string
err = rows.Scan(&feedURL)
if err != nil {
continue
}
feeds = append(feeds, microsub.Feed{
Type: "feed",
URL: feedURL,
})
}
return feeds, nil
} }
func (b *memoryBackend) FollowURL(uid string, url string) (microsub.Feed, error) { func (b *memoryBackend) FollowURL(uid string, url string) (microsub.Feed, error) {
defer b.save()
feed := microsub.Feed{Type: "feed", URL: url} feed := microsub.Feed{Type: "feed", URL: url}
var channelID int
if row := b.database.QueryRow(`SELECT "id" FROM "channels" WHERE "uid" = $1`, uid); row != nil {
err := row.Scan(&channelID)
if err != nil {
log.Fatal(err)
}
}
result, err := b.database.Exec(
`INSERT INTO "feeds" ("channel_id", "url") VALUES ($1, $2)`,
channelID,
feed.URL,
)
if err != nil {
return microsub.Feed{}, err
}
feedID, _ := result.LastInsertId()
resp, err := b.Fetch3(uid, feed.URL) resp, err := b.Fetch3(uid, feed.URL)
if err != nil { if err != nil {
_ = b.channelAddItem("notifications", microsub.Item{ log.Println(err)
Type: "entry", b.addNotification("Error while fetching feed", feed.URL, err)
Name: "Error while fetching feed",
Content: &microsub.Content{
Text: fmt.Sprintf("Error while Fetch3 of %s: %v", feed.URL, err),
},
UID: time.Now().String(),
})
_ = b.updateChannelUnreadCount("notifications") _ = b.updateChannelUnreadCount("notifications")
return feed, err return feed, err
} }
defer resp.Body.Close() defer resp.Body.Close()
b.lock.Lock() _ = b.ProcessContent(uid, fmt.Sprintf("%d", feedID), feed.URL, resp.Header.Get("Content-Type"), resp.Body)
b.Feeds[uid] = append(b.Feeds[uid], feed)
b.lock.Unlock()
_ = b.ProcessContent(uid, feed.URL, resp.Header.Get("Content-Type"), resp.Body) // FIXME: re-enable CreateFeed
// _, _ = b.CreateFeed(url, uid)
_, _ = b.CreateFeed(url, uid)
return feed, nil return feed, nil
} }
func (b *memoryBackend) UnfollowURL(uid string, url string) error { func (b *memoryBackend) UnfollowURL(uid string, url string) error {
defer b.save() _, err := b.database.Exec(`DELETE FROM "feeds" "f" USING "channels" "c" WHERE "c"."id" = "f"."channel_id" AND f.url = $1 AND c.uid = $2`, url, uid)
index := -1 return err
b.lock.Lock()
for i, f := range b.Feeds[uid] {
if f.URL == url {
index = i
break
}
}
if index >= 0 {
feeds := b.Feeds[uid]
b.Feeds[uid] = append(feeds[:index], feeds[index+1:]...)
}
b.lock.Unlock()
return nil
} }
func checkURL(u string) bool { func checkURL(u string) bool {
@ -635,7 +537,7 @@ func ProcessSourcedItems(fetcher fetch.Fetcher, fetchURL, contentType string, bo
return items, nil return items, nil
} }
func (b *memoryBackend) ProcessContent(channel, fetchURL, contentType string, body io.Reader) error { func (b *memoryBackend) ProcessContent(channel, feedID, fetchURL, contentType string, body io.Reader) error {
cachingFetch := WithCaching(b.pool, fetch.FetcherFunc(Fetch2)) cachingFetch := WithCaching(b.pool, fetch.FetcherFunc(Fetch2))
items, err := ProcessSourcedItems(cachingFetch, fetchURL, contentType, body) items, err := ProcessSourcedItems(cachingFetch, fetchURL, contentType, body)
@ -644,6 +546,7 @@ func (b *memoryBackend) ProcessContent(channel, fetchURL, contentType string, bo
} }
for _, item := range items { for _, item := range items {
item.Source.ID = feedID
err = b.channelAddItemWithMatcher(channel, item) err = b.channelAddItemWithMatcher(channel, item)
if err != nil { if err != nil {
log.Printf("ERROR: %s\n", err) log.Printf("ERROR: %s\n", err)
@ -789,6 +692,9 @@ func matchItemText(item microsub.Item, re *regexp.Regexp) bool {
func (b *memoryBackend) channelAddItem(channel string, item microsub.Item) error { func (b *memoryBackend) channelAddItem(channel string, item microsub.Item) error {
timelineBackend := b.getTimeline(channel) timelineBackend := b.getTimeline(channel)
added, err := timelineBackend.AddItem(item) added, err := timelineBackend.AddItem(item)
if err != nil {
return err
}
// Sent message to Server-Sent-Events // Sent message to Server-Sent-Events
if added { if added {
@ -799,30 +705,19 @@ func (b *memoryBackend) channelAddItem(channel string, item microsub.Item) error
} }
func (b *memoryBackend) updateChannelUnreadCount(channel string) error { func (b *memoryBackend) updateChannelUnreadCount(channel string) error {
b.lock.RLock() // tl := b.getTimeline(channel)
c, exists := b.Channels[channel] // unread, err := tl.Count()
b.lock.RUnlock() // if err != nil {
// return err
if exists { // }
tl := b.getTimeline(channel) //
unread, err := tl.Count() // currentCount := c.Unread.UnreadCount
if err != nil { // c.Unread = microsub.Unread{Type: microsub.UnreadCount, UnreadCount: unread}
return err //
} // // Sent message to Server-Sent-Events
defer b.save() // if currentCount != unread {
// b.broker.Notifier <- sse.Message{Event: "new item in channel", Object: c}
currentCount := c.Unread.UnreadCount // }
c.Unread = microsub.Unread{Type: microsub.UnreadCount, UnreadCount: unread}
// Sent message to Server-Sent-Events
if currentCount != unread {
b.broker.Notifier <- sse.Message{Event: "new item in channel", Object: c}
}
b.lock.Lock()
b.Channels[channel] = c
b.lock.Unlock()
}
return nil return nil
} }
@ -890,7 +785,7 @@ func Fetch2(fetchURL string) (*http.Response, error) {
client := http.Client{} client := http.Client{}
resp, err := client.Do(req) resp, err := client.Do(req)
if err != nil { if err != nil {
return nil, fmt.Errorf("fetch failed: %s: %s", u, err) return nil, fmt.Errorf("fetch failed: %s", err)
} }
return resp, err return resp, err
@ -899,9 +794,9 @@ func Fetch2(fetchURL string) (*http.Response, error) {
func (b *memoryBackend) getTimeline(channel string) timeline.Backend { func (b *memoryBackend) getTimeline(channel string) timeline.Backend {
// Set a default timeline type if not set // Set a default timeline type if not set
timelineType := "postgres-stream" timelineType := "postgres-stream"
if setting, ok := b.Settings[channel]; ok && setting.ChannelType != "" { // if setting, ok := b.Settings[channel]; ok && setting.ChannelType != "" {
timelineType = setting.ChannelType // timelineType = setting.ChannelType
} // }
tl := timeline.Create(channel, timelineType, b.pool, b.database) tl := timeline.Create(channel, timelineType, b.pool, b.database)
if tl == nil { if tl == nil {
log.Printf("no timeline found with name %q and type %q", channel, timelineType) log.Printf("no timeline found with name %q and type %q", channel, timelineType)

View File

@ -1,222 +1,210 @@
package main package main
import ( // func Test_memoryBackend_ChannelsCreate(t *testing.T) {
"reflect" // type fields struct {
"sync" // hubIncomingBackend hubIncomingBackend
"testing" // lock sync.RWMutex
"time" // Channels map[string]microsub.Channel
// Feeds map[string][]microsub.Feed
"github.com/gomodule/redigo/redis" // Settings map[string]channelSetting
"github.com/stretchr/testify/assert" // NextUID int
"p83.nl/go/ekster/pkg/microsub" // Me string
"p83.nl/go/ekster/pkg/sse" // TokenEndpoint string
) // AuthEnabled bool
// ticker *time.Ticker
func Test_memoryBackend_ChannelsCreate(t *testing.T) { // quit chan struct{}
type fields struct { // broker *sse.Broker
hubIncomingBackend hubIncomingBackend // pool *redis.Pool
lock sync.RWMutex // }
Channels map[string]microsub.Channel // type args struct {
Feeds map[string][]microsub.Feed // name string
Settings map[string]channelSetting // }
NextUID int // tests := []struct {
Me string // name string
TokenEndpoint string // fields fields
AuthEnabled bool // args args
ticker *time.Ticker // want microsub.Channel
quit chan struct{} // wantErr bool
broker *sse.Broker // }{
pool *redis.Pool // {
} // name: "Duplicate channel",
type args struct { // fields: fields{
name string // hubIncomingBackend: hubIncomingBackend{},
} // lock: sync.RWMutex{},
tests := []struct { // Channels: func() map[string]microsub.Channel {
name string // channels := make(map[string]microsub.Channel)
fields fields // channels["1234"] = microsub.Channel{
args args // UID: "1234",
want microsub.Channel // Name: "Test",
wantErr bool // Unread: microsub.Unread{
}{ // Type: microsub.UnreadCount,
{ // Unread: false,
name: "Duplicate channel", // UnreadCount: 0,
fields: fields{ // },
hubIncomingBackend: hubIncomingBackend{}, // }
lock: sync.RWMutex{}, // return channels
Channels: func() map[string]microsub.Channel { // }(),
channels := make(map[string]microsub.Channel) // Feeds: func() map[string][]microsub.Feed {
channels["1234"] = microsub.Channel{ // feeds := make(map[string][]microsub.Feed)
UID: "1234", // return feeds
Name: "Test", // }(),
Unread: microsub.Unread{ // Settings: nil,
Type: microsub.UnreadCount, // NextUID: 1,
Unread: false, // Me: "",
UnreadCount: 0, // TokenEndpoint: "",
}, // AuthEnabled: false,
} // ticker: nil,
return channels // quit: nil,
}(), // broker: nil,
Feeds: func() map[string][]microsub.Feed { // pool: nil,
feeds := make(map[string][]microsub.Feed) // },
return feeds // args: args{
}(), // name: "Test",
Settings: nil, // },
NextUID: 1, // want: microsub.Channel{
Me: "", // UID: "1234",
TokenEndpoint: "", // Name: "Test",
AuthEnabled: false, // Unread: microsub.Unread{
ticker: nil, // Type: microsub.UnreadCount,
quit: nil, // Unread: false,
broker: nil, // UnreadCount: 0,
pool: nil, // },
}, // },
args: args{ // wantErr: false,
name: "Test", // },
}, // }
want: microsub.Channel{ // for _, tt := range tests {
UID: "1234", // t.Run(tt.name, func(t *testing.T) {
Name: "Test", // b := &memoryBackend{
Unread: microsub.Unread{ // hubIncomingBackend: tt.fields.hubIncomingBackend,
Type: microsub.UnreadCount, // lock: tt.fields.lock,
Unread: false, // Channels: tt.fields.Channels,
UnreadCount: 0, // Feeds: tt.fields.Feeds,
}, // Settings: tt.fields.Settings,
}, // NextUID: tt.fields.NextUID,
wantErr: false, // Me: tt.fields.Me,
}, // TokenEndpoint: tt.fields.TokenEndpoint,
} // AuthEnabled: tt.fields.AuthEnabled,
for _, tt := range tests { // ticker: tt.fields.ticker,
t.Run(tt.name, func(t *testing.T) { // quit: tt.fields.quit,
b := &memoryBackend{ // broker: tt.fields.broker,
hubIncomingBackend: tt.fields.hubIncomingBackend, // pool: tt.fields.pool,
lock: tt.fields.lock, // }
Channels: tt.fields.Channels, // got, err := b.ChannelsCreate(tt.args.name)
Feeds: tt.fields.Feeds, // if (err != nil) != tt.wantErr {
Settings: tt.fields.Settings, // t.Errorf("ChannelsCreate() error = %v, wantErr %v", err, tt.wantErr)
NextUID: tt.fields.NextUID, // return
Me: tt.fields.Me, // }
TokenEndpoint: tt.fields.TokenEndpoint, // if !reflect.DeepEqual(got, tt.want) {
AuthEnabled: tt.fields.AuthEnabled, // t.Errorf("ChannelsCreate() got = %v, want %v", got, tt.want)
ticker: tt.fields.ticker, // }
quit: tt.fields.quit, // })
broker: tt.fields.broker, // }
pool: tt.fields.pool, // }
} //
got, err := b.ChannelsCreate(tt.args.name) // func Test_memoryBackend_removeFeed(t *testing.T) {
if (err != nil) != tt.wantErr { // type fields struct {
t.Errorf("ChannelsCreate() error = %v, wantErr %v", err, tt.wantErr) // Channels map[string]microsub.Channel
return // Feeds map[string][]microsub.Feed
} // }
if !reflect.DeepEqual(got, tt.want) { // type args struct {
t.Errorf("ChannelsCreate() got = %v, want %v", got, tt.want) // feedID string
} // }
}) // tests := []struct {
} // name string
} // fields fields
// args args
func Test_memoryBackend_removeFeed(t *testing.T) { // lens map[string]int
type fields struct { // wantErr bool
Channels map[string]microsub.Channel // }{
Feeds map[string][]microsub.Feed // {
} // name: "remove from channel 1",
type args struct { // fields: fields{
feedID string // Channels: map[string]microsub.Channel{
} // "123": {UID: "channel1", Name: "Channel 1"},
tests := []struct { // "124": {UID: "channel2", Name: "Channel 2"},
name string // },
fields fields // Feeds: map[string][]microsub.Feed{
args args // "123": {{Type: "feed", URL: "feed1", Name: "Feed1"}},
lens map[string]int // "124": {{Type: "feed", URL: "feed2", Name: "Feed2"}},
wantErr bool // },
}{ // },
{ // args: args{feedID: "feed1"},
name: "remove from channel 1", // lens: map[string]int{"123": 0, "124": 1},
fields: fields{ // wantErr: false,
Channels: map[string]microsub.Channel{ // },
"123": {UID: "channel1", Name: "Channel 1"}, // {
"124": {UID: "channel2", Name: "Channel 2"}, // name: "remove from channel 2",
}, // fields: fields{
Feeds: map[string][]microsub.Feed{ // Channels: map[string]microsub.Channel{
"123": {{Type: "feed", URL: "feed1", Name: "Feed1"}}, // "123": {UID: "channel1", Name: "Channel 1"},
"124": {{Type: "feed", URL: "feed2", Name: "Feed2"}}, // "124": {UID: "channel2", Name: "Channel 2"},
}, // },
}, // Feeds: map[string][]microsub.Feed{
args: args{feedID: "feed1"}, // "123": {{Type: "feed", URL: "feed1", Name: "Feed1"}},
lens: map[string]int{"123": 0, "124": 1}, // "124": {{Type: "feed", URL: "feed2", Name: "Feed2"}},
wantErr: false, // },
}, // },
{ // args: args{feedID: "feed2"},
name: "remove from channel 2", // lens: map[string]int{"123": 1, "124": 0},
fields: fields{ // wantErr: false,
Channels: map[string]microsub.Channel{ // },
"123": {UID: "channel1", Name: "Channel 1"}, // {
"124": {UID: "channel2", Name: "Channel 2"}, // name: "remove unknown",
}, // fields: fields{
Feeds: map[string][]microsub.Feed{ // Channels: map[string]microsub.Channel{
"123": {{Type: "feed", URL: "feed1", Name: "Feed1"}}, // "123": {UID: "channel1", Name: "Channel 1"},
"124": {{Type: "feed", URL: "feed2", Name: "Feed2"}}, // "124": {UID: "channel2", Name: "Channel 2"},
}, // },
}, // Feeds: map[string][]microsub.Feed{
args: args{feedID: "feed2"}, // "123": {{Type: "feed", URL: "feed1", Name: "Feed1"}},
lens: map[string]int{"123": 1, "124": 0}, // "124": {{Type: "feed", URL: "feed2", Name: "Feed2"}},
wantErr: false, // },
}, // },
{ // args: args{feedID: "feed3"},
name: "remove unknown", // lens: map[string]int{"123": 1, "124": 1},
fields: fields{ // wantErr: false,
Channels: map[string]microsub.Channel{ // },
"123": {UID: "channel1", Name: "Channel 1"}, // {
"124": {UID: "channel2", Name: "Channel 2"}, // name: "remove from 0 channels",
}, // fields: fields{
Feeds: map[string][]microsub.Feed{ // Channels: map[string]microsub.Channel{},
"123": {{Type: "feed", URL: "feed1", Name: "Feed1"}}, // Feeds: map[string][]microsub.Feed{},
"124": {{Type: "feed", URL: "feed2", Name: "Feed2"}}, // },
}, // args: args{feedID: "feed3"},
}, // lens: map[string]int{},
args: args{feedID: "feed3"}, // wantErr: false,
lens: map[string]int{"123": 1, "124": 1}, // },
wantErr: false, // {
}, // name: "remove from multiple channels",
{ // fields: fields{
name: "remove from 0 channels", // Channels: map[string]microsub.Channel{
fields: fields{ // "123": {UID: "channel1", Name: "Channel 1"},
Channels: map[string]microsub.Channel{}, // "124": {UID: "channel2", Name: "Channel 2"},
Feeds: map[string][]microsub.Feed{}, // },
}, // Feeds: map[string][]microsub.Feed{
args: args{feedID: "feed3"}, // "123": {{Type: "feed", URL: "feed1", Name: "Feed1"}},
lens: map[string]int{}, // "124": {{Type: "feed", URL: "feed1", Name: "Feed1"}},
wantErr: false, // },
}, // },
{ // args: args{feedID: "feed1"},
name: "remove from multiple channels", // lens: map[string]int{"123": 0, "124": 0},
fields: fields{ // wantErr: false,
Channels: map[string]microsub.Channel{ // },
"123": {UID: "channel1", Name: "Channel 1"}, // }
"124": {UID: "channel2", Name: "Channel 2"}, // for _, tt := range tests {
}, // t.Run(tt.name, func(t *testing.T) {
Feeds: map[string][]microsub.Feed{ // b := &memoryBackend{
"123": {{Type: "feed", URL: "feed1", Name: "Feed1"}}, // Channels: tt.fields.Channels,
"124": {{Type: "feed", URL: "feed1", Name: "Feed1"}}, // Feeds: tt.fields.Feeds,
}, // }
}, // if err := b.removeFeed(tt.args.feedID); (err != nil) != tt.wantErr {
args: args{feedID: "feed1"}, // t.Errorf("removeFeed() error = %v, wantErr %v", err, tt.wantErr)
lens: map[string]int{"123": 0, "124": 0}, // }
wantErr: false, // assert.Len(t, b.Channels, len(tt.lens))
}, // for k, v := range tt.lens {
} // assert.Len(t, b.Feeds[k], v)
for _, tt := range tests { // }
t.Run(tt.name, func(t *testing.T) { // })
b := &memoryBackend{ // }
Channels: tt.fields.Channels, // }
Feeds: tt.fields.Feeds,
}
if err := b.removeFeed(tt.args.feedID); (err != nil) != tt.wantErr {
t.Errorf("removeFeed() error = %v, wantErr %v", err, tt.wantErr)
}
assert.Len(t, b.Channels, len(tt.lens))
for k, v := range tt.lens {
assert.Len(t, b.Feeds[k], v)
}
})
}
}

View File

@ -4,7 +4,7 @@ services:
image: "redis:5" image: "redis:5"
database: database:
image: postgres image: postgres:14
volumes: volumes:
- database-data:/var/lib/postgresql/data - database-data:/var/lib/postgresql/data
environment: environment:
@ -14,21 +14,16 @@ services:
POSTGRES_HOST_AUTH_METHOD: trust POSTGRES_HOST_AUTH_METHOD: trust
web: web:
image: "pstuifzand/ekster:alpine" image: ubuntu
working_dir: /opt/microsub working_dir: /app
links:
- redis:redis
volumes: volumes:
- microsub-data:/opt/microsub
- ./templates:/app/templates
- ./eksterd:/app/eksterd - ./eksterd:/app/eksterd
- ./backend.json:/app/backend.json
entrypoint: /app/eksterd entrypoint: /app/eksterd
command: -auth=false -port 80 -templates templates command: -auth=false -port 80
ports: ports:
- 8089:80 - 8089:80
environment: environment:
- "FEEDBIN_USER="
- "FEEDBIN_PASS="
- "EKSTER_BASEURL=http://localhost:8089/" - "EKSTER_BASEURL=http://localhost:8089/"
- "EKSTER_TEMPLATES=/app/templates" - "EKSTER_TEMPLATES=/app/templates"

9
go.mod
View File

@ -3,15 +3,14 @@ module p83.nl/go/ekster
go 1.16 go 1.16
require ( require (
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751
github.com/axgle/mahonia v0.0.0-20180208002826-3358181d7394 github.com/axgle/mahonia v0.0.0-20180208002826-3358181d7394
github.com/blevesearch/bleve/v2 v2.0.3 // indirect github.com/blevesearch/bleve/v2 v2.0.3
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/gilliek/go-opml v1.0.0 github.com/gilliek/go-opml v1.0.0
github.com/golang-migrate/migrate/v4 v4.15.1
github.com/gomodule/redigo v1.8.2 github.com/gomodule/redigo v1.8.2
github.com/lib/pq v1.10.1 github.com/lib/pq v1.10.1
github.com/pkg/errors v0.9.1 github.com/pkg/errors v0.9.1
github.com/stretchr/testify v1.5.1 github.com/stretchr/testify v1.7.0
golang.org/x/net v0.0.0-20200707034311-ab3426394381 golang.org/x/net v0.0.0-20211013171255-e13a2654a71e
willnorris.com/go/microformats v1.1.0 willnorris.com/go/microformats v1.1.0
) )

1449
go.sum

File diff suppressed because it is too large Load Diff

View File

@ -2,9 +2,12 @@ package timeline
import ( import (
"context" "context"
"crypto/sha256"
"database/sql" "database/sql"
"encoding/hex"
"fmt" "fmt"
"log" "log"
"strconv"
"strings" "strings"
"time" "time"
@ -32,46 +35,46 @@ func (p *postgresStream) Init() error {
if err != nil { if err != nil {
return fmt.Errorf("database ping failed: %w", err) return fmt.Errorf("database ping failed: %w", err)
} }
//
// _, err = conn.ExecContext(ctx, `
// CREATE TABLE IF NOT EXISTS "channels" (
// "id" int primary key generated always as identity,
// "name" varchar(255) unique,
// "created_at" timestamp DEFAULT current_timestamp
// );
// `)
// if err != nil {
// return fmt.Errorf("create channels table failed: %w", err)
// }
//
// _, err = conn.ExecContext(ctx, `
// CREATE TABLE IF NOT EXISTS "items" (
// "id" int primary key generated always as identity,
// "channel_id" int references "channels" on delete cascade,
// "uid" varchar(512) not null unique,
// "is_read" int default 0,
// "data" jsonb,
// "created_at" timestamp DEFAULT current_timestamp,
// "updated_at" timestamp,
// "published_at" timestamp
// );
// `)
// if err != nil {
// return fmt.Errorf("create items table failed: %w", err)
// }
//
// _, err = conn.ExecContext(ctx, `ALTER TABLE "items" ALTER COLUMN "data" TYPE jsonb, ALTER COLUMN "uid" TYPE varchar(1024)`)
// if err != nil {
// return fmt.Errorf("alter items table failed: %w", err)
// }
_, err = conn.ExecContext(ctx, ` _, err = conn.ExecContext(ctx, `INSERT INTO "channels" ("uid", "name", "created_at") VALUES ($1, $1, DEFAULT)
CREATE TABLE IF NOT EXISTS "channels" (
"id" int primary key generated always as identity,
"name" varchar(255) unique,
"created_at" timestamp DEFAULT current_timestamp
);
`)
if err != nil {
return fmt.Errorf("create channels table failed: %w", err)
}
_, err = conn.ExecContext(ctx, `
CREATE TABLE IF NOT EXISTS "items" (
"id" int primary key generated always as identity,
"channel_id" int references "channels" on delete cascade,
"uid" varchar(512) not null unique,
"is_read" int default 0,
"data" jsonb,
"created_at" timestamp DEFAULT current_timestamp,
"updated_at" timestamp,
"published_at" timestamp
);
`)
if err != nil {
return fmt.Errorf("create items table failed: %w", err)
}
_, err = conn.ExecContext(ctx, `ALTER TABLE "items" ALTER COLUMN "data" TYPE jsonb, ALTER COLUMN "uid" TYPE varchar(1024)`)
if err != nil {
return fmt.Errorf("alter items table failed: %w", err)
}
_, err = conn.ExecContext(ctx, `INSERT INTO "channels" ("name", "created_at") VALUES ($1, DEFAULT)
ON CONFLICT DO NOTHING`, p.channel) ON CONFLICT DO NOTHING`, p.channel)
if err != nil { if err != nil {
return fmt.Errorf("create channel failed: %w", err) return fmt.Errorf("create channel failed: %w", err)
} }
row := conn.QueryRowContext(ctx, `SELECT "id" FROM "channels" WHERE "name" = $1`, p.channel) row := conn.QueryRowContext(ctx, `SELECT "id" FROM "channels" WHERE "uid" = $1`, p.channel)
if row == nil { if row == nil {
return fmt.Errorf("fetch channel failed: %w", err) return fmt.Errorf("fetch channel failed: %w", err)
} }
@ -144,7 +147,7 @@ WHERE "channel_id" = $1
last = publishedAt last = publishedAt
item.Read = isRead == 1 item.Read = isRead == 1
item.ID = uid item.ID = strconv.Itoa(id)
item.Published = publishedAt item.Published = publishedAt
tl.Items = append(tl.Items, item) tl.Items = append(tl.Items, item)
@ -208,14 +211,23 @@ func (p *postgresStream) AddItem(item microsub.Item) (bool, error) {
} }
t = t2 t = t2
} }
if item.UID == "" {
h := sha256.Sum256([]byte(fmt.Sprintf("%s:%d", p.channel, time.Now().UnixNano())))
item.UID = hex.EncodeToString(h[:])
}
feedID, err := strconv.ParseInt(item.Source.ID, 10, 64)
if err != nil {
return false, fmt.Errorf("ERROR: item.Source.ID is not an integer %q: %w", item.Source.ID, err)
}
result, err := conn.ExecContext(context.Background(), ` result, err := conn.ExecContext(context.Background(), `
INSERT INTO "items" ("channel_id", "uid", "data", "published_at", "created_at") INSERT INTO "items" ("channel_id", "feed_id", "uid", "data", "published_at", "created_at")
VALUES ($1, $2, $3, $4, DEFAULT) VALUES ($1, $2, $3, $4, $5, DEFAULT)
ON CONFLICT ON CONSTRAINT "items_uid_key" DO NOTHING ON CONFLICT ON CONSTRAINT "items_uid_key" DO NOTHING
`, p.channelID, item.ID, &item, t) `, p.channelID, feedID, item.UID, &item, t)
if err != nil { if err != nil {
return false, fmt.Errorf("while adding item: %w", err) return false, fmt.Errorf("insert item: %w", err)
} }
c, err := result.RowsAffected() c, err := result.RowsAffected()
if err != nil { if err != nil {