package main import ( "encoding/json" "fmt" "log" "net/http" "os" "strings" "github.com/blevesearch/bleve" "github.com/blevesearch/bleve/mapping" ) // TODO: http handler // TODO: index all pages on start // TODO: reindex all command // TODO: search(query) command type searchHandler struct { indexMapping mapping.IndexMapping searchIndex bleve.Index } type nameLine struct { Name string `json:"name"` Title string `json:"title"` Line string `json:"line"` } type searchObject struct { Title string `json:"title"` Blocks []string `json:"blocks"` Refs []nameLine `json:"refs"` Meta map[string]string `json:"meta"` } func NewSearchHandler(searchIndex bleve.Index) (http.Handler, error) { return &searchHandler{ searchIndex: searchIndex, }, nil } func (s *searchHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { defer r.Body.Close() sess, err := NewSession(w, r) if err != nil { http.Error(w, err.Error(), 500) return } defer func() { if err := sess.Flush(); err != nil { log.Println(err) } }() if !sess.LoggedIn { fmt.Fprint(w, "{}") return } if r.URL.Query().Get("fields") == "1" { fields, err := s.searchIndex.Fields() if err != nil { http.Error(w, err.Error(), 500) return } enc := json.NewEncoder(w) enc.SetIndent("", " ") err = enc.Encode(&fields) if err != nil { http.Error(w, err.Error(), 500) return } return } else if r.Method == "GET" && r.URL.Query().Get("reset") == "1" { w.Header().Add("Content-Type", "text/html") fmt.Fprint(w, `
`) return } else if r.Method == "POST" { err = r.ParseForm() if err != nil { http.Error(w, err.Error(), 500) return } if r.PostForm.Get("reset") == "1" { refs := make(Refs) mp := NewFilePages("data", nil) pages, err := mp.AllPages() if err != nil { http.Error(w, err.Error(), 500) return } for _, page := range pages { err = processBackrefsForPage(page, refs) if err != nil { log.Println("error while processing backrefs: ", err) continue } } err = saveBackrefs("data/backrefs.json", refs) if err != nil { log.Printf("error while saving backrefs %w", err) http.Error(w, err.Error(), 500) return } err = os.RemoveAll("data/_tmp_index") if err != nil { log.Printf("error while remove old index %w", err) http.Error(w, err.Error(), 500) return } indexMapping := bleve.NewIndexMapping() index, err := bleve.New("data/_tmp_index", indexMapping) if err != nil { http.Error(w, err.Error(), 500) return } for _, page := range pages { so, err := createSearchObject(page) if err != nil { log.Printf("error while createing search object %s: %w", page.Title, err) continue } err = index.Index(page.Name, so) if err != nil { log.Printf("error while indexing %s: %w", page.Title, err) continue } } err = os.Rename("data/_page-index", "data/_page-index-old") if err != nil { log.Printf("error while resetting index: %w", err) http.Error(w, err.Error(), 500) return } err = os.Rename("data/_tmp_index", "data/_page-index") if err != nil { log.Printf("error while putthing new index in place: %w", err) http.Error(w, err.Error(), 500) return } err = os.RemoveAll("data/_page-index-old") if err != nil { log.Printf("error while remove old index %w", err) http.Error(w, err.Error(), 500) return } enc := json.NewEncoder(w) enc.SetIndent("", " ") err = enc.Encode(struct { Ok bool `json:"ok"` }{Ok: true}) if err != nil { http.Error(w, err.Error(), 500) return } } return } q := bleve.NewQueryStringQuery(r.URL.Query().Get("q")) sr := bleve.NewSearchRequest(q) results, err := s.searchIndex.Search(sr) if err != nil { http.Error(w, err.Error(), 500) } enc := json.NewEncoder(w) enc.SetIndent("", " ") err = enc.Encode(&results) if err != nil { http.Error(w, err.Error(), 500) } } func createSearchObject(page Page) (searchObject, error) { so := searchObject{} so.Title = page.Title so.Meta = make(map[string]string) type simpleListItem struct { Text string } var listItems []simpleListItem if err := json.NewDecoder(strings.NewReader(page.Content)).Decode(&listItems); err != nil { so.Blocks = append(so.Blocks, page.Content) } else { for _, li := range listItems { meta := strings.SplitN(li.Text, "::", 2) if len(meta) == 2 { so.Meta[strings.ToLower(strings.TrimSpace(meta[0]))] = strings.ToLower(strings.TrimSpace(meta[1])) } so.Blocks = append(so.Blocks, li.Text) } } for _, refs := range page.Refs { for _, ref := range refs { so.Refs = append(so.Refs, nameLine{ ref.Name, ref.Title, ref.Line, }) } } return so, nil }