Compare commits

..

No commits in common. "27200abc582dea2ff9a58a6f35f5fb4b302e8e9a" and "5000ab768a8db74f23a4928aa9535baa9747326e" have entirely different histories.

4 changed files with 51 additions and 156 deletions

View File

@ -157,24 +157,22 @@ function showSearchResultsExtended(element, template, searchTool, query, input,
})
}
function formatLineResult(hits, key) {
function formatLineResult(hit) {
return [
{
text: "[[" + key + "]]",
text: "[[" + hit.title + "]]",
indented: 0,
fold: 'open',
hidden: false,
fleeting: true
},
..._.map(hits, (hit) => {
return {
text: hit.line,
indented: 1,
fold: 'open',
hidden: false,
fleeting: true
}
})
{
text: hit.line,
indented: 1,
fold: 'open',
hidden: false,
fleeting: true
}
]
}
@ -274,15 +272,11 @@ function Editor(holder, input) {
el("td", [renderInline(rowText)]),
..._.map(header.children, col => {
let td = el("td")
let key = _.trim(col.text);
let value = _.get(rowData, key)
if (_.isObject(value) && value.line) {
value = value.line
}
let value = rowData[_.snakeCase(_.trim(col.text))];
if (col.children && col.children.length > 0) {
value = col.children[0].text
}
transform(typeof value === 'string' ? value.replace(/^:/, '=') : '', $(td), id, editor, rowData)
transform(value ? value.replace(/^:/, '=') : '', $(td), id, editor, rowData)
return td
})
])
@ -361,13 +355,10 @@ function Editor(holder, input) {
} else if (converted.startsWith("=", 0)) {
converted = transformMathExpression(converted, scope);
} else {
let re = /^([A-Z0-9 ]+)::\s*(.*)$/i;
let re = /^([A-Z0-9 ]+)::\s*(.+)$/i;
let res = text.match(re)
if (res) {
converted = '<span class="metadata-key">[[' + res[1] + ']]</span>'
if (res[2]) {
converted += ': ' + res[2]
}
converted = '**[[' + res[1] + ']]**: ' + res[2]
} else if (text.match(/#\[\[TODO]]/)) {
converted = converted.replace('#[[TODO]]', '<input class="checkbox" type="checkbox" />')
todo = true;
@ -638,7 +629,6 @@ function Editor(holder, input) {
.finally(() => editor.render())
} else {
return search.startQuery(res[2])
.then(hits => _.groupBy(hits, (it) => it.title))
.then(hits => _.flatMap(hits, formatLineResult))
.then(results => editor.replaceChildren(id, results))
.finally(() => editor.render())

View File

@ -48,20 +48,14 @@ body {
font-family: 'Inter', sans-serif;
}
.highlight-links {
.content a.wiki-link::before {
content: "[[";
color: #ccc;
}
.content a.wiki-link::after {
content: "]]";
color: #ccc;
}
.content a.wiki-link::before {
content: "[[";
color: #ccc;
}
.metadata-key a {
font-weight: bold;
color: black;
.content a.wiki-link::after {
content: "]]";
color: #ccc;
}
@supports (font-variation-settings: normal) {

129
search.go
View File

@ -48,11 +48,11 @@ type nameLine struct {
}
type searchObject struct {
Title string `json:"title"`
Blocks []string `json:"blocks"`
Refs []nameLine `json:"refs"`
Meta map[string]interface{} `json:"meta"`
Links []ParsedLink `json:"links"`
Title string `json:"title"`
Blocks []string `json:"blocks"`
Refs []nameLine `json:"refs"`
Meta map[string]string `json:"meta"`
Links []ParsedLink `json:"links"`
}
func NewSearchHandler(searchIndex bleve.Index) (http.Handler, error) {
@ -117,7 +117,7 @@ func (s *searchHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
for _, page := range pages {
err = saveBlocksFromPage("data", page)
if err != nil {
log.Printf("error while processing blocks from page %s: %v", page.Name, err)
log.Printf("error while processing blocks from page %s: %w", page.Name, err)
continue
}
}
@ -141,7 +141,7 @@ func (s *searchHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
log.Println("saveLinks")
err = saveLinks(mp)
if err != nil {
log.Printf("error while saving links %v", err)
log.Printf("error while saving links %w", err)
http.Error(w, err.Error(), 500)
return
}
@ -149,14 +149,14 @@ func (s *searchHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
log.Println("saveBackrefs")
err = saveBackrefs("data/backrefs.json", refs)
if err != nil {
log.Printf("error while saving backrefs %v", err)
log.Printf("error while saving backrefs %w", err)
http.Error(w, err.Error(), 500)
return
}
err = os.RemoveAll("data/_tmp_index")
if err != nil {
log.Printf("error while remove old index %v", err)
log.Printf("error while remove old index %w", err)
http.Error(w, err.Error(), 500)
return
}
@ -170,14 +170,14 @@ func (s *searchHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
for _, page := range pages {
searchObjects, err := createSearchObjects(page.Name)
if err != nil {
log.Printf("error while creating search object %s: %v", page.Title, err)
log.Printf("error while creating search object %s: %w", page.Title, err)
continue
}
for _, so := range searchObjects {
err = index.Index(so.ID, so)
if err != nil {
log.Printf("error while indexing %s: %v", page.Title, err)
log.Printf("error while indexing %s: %w", page.Title, err)
continue
}
}
@ -185,19 +185,19 @@ func (s *searchHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
err = os.Rename("data/_page-index", "data/_page-index-old")
if err != nil {
log.Printf("error while resetting index: %v", err)
log.Printf("error while resetting index: %w", err)
http.Error(w, err.Error(), 500)
return
}
err = os.Rename("data/_tmp_index", "data/_page-index")
if err != nil {
log.Printf("error while putthing new index in place: %v", err)
log.Printf("error while putthing new index in place: %w", err)
http.Error(w, err.Error(), 500)
return
}
err = os.RemoveAll("data/_page-index-old")
if err != nil {
log.Printf("error while remove old index %v", err)
log.Printf("error while remove old index %w", err)
http.Error(w, err.Error(), 500)
return
}
@ -294,98 +294,26 @@ func createSearchObjects(rootBlockID string) ([]pageBlock, error) {
func createStructuredFormat(page Page) (searchObject, error) {
so := searchObject{}
so.Title = page.Title
so.Meta = make(map[string]interface{})
so.Meta = make(map[string]string)
type simpleListItem struct {
Text string
ID string
Indented int
Text string
ID string
}
type parent struct {
key string
indent int
items []interface{}
values map[string]interface{}
}
var parents []parent
parents = append(parents, parent{
values: make(map[string]interface{}),
})
var listItems []simpleListItem
if err := json.NewDecoder(strings.NewReader(page.Content)).Decode(&listItems); err != nil {
so.Blocks = append(so.Blocks, page.Content)
} else {
for _, li := range listItems {
meta := strings.SplitN(li.Text, "::", 2)
par := parents[len(parents)-1]
// merge up
for len(parents) > 1 && li.Indented <= par.indent {
parents = parents[:len(parents)-1]
nextTop := parents[len(parents)-1]
if len(par.values) > 0 {
if vals, e := nextTop.values[par.key]; e {
if vals2, ok := vals.(map[string]interface{}); ok {
for k, v := range par.values {
vals2[k] = v
}
nextTop.values[par.key] = vals2
}
} else {
nextTop.values[par.key] = par.values
}
} else if len(par.items) > 0 {
nextTop.values[par.key] = par.items
} else {
nextTop.values[par.key] = ""
}
parents[len(parents)-1] = nextTop
par = parents[len(parents)-1]
}
if len(meta) == 2 {
key := strcase.ToSnake(strings.TrimSpace(meta[0]))
value := strings.TrimSpace(meta[1])
if value == "" {
parents = append(parents, parent{
key: key,
indent: li.Indented,
values: make(map[string]interface{}),
})
} else {
if len(parents) > 0 {
par = parents[len(parents)-1]
// save new value
if li.Indented > par.indent {
links, err := ParseLinks(li.ID, value)
if err != nil {
par.values[key] = value
} else {
if len(links) > 0 {
links[0].Href = fmt.Sprintf("%s%s", *baseurl, links[0].PageName)
links[0].ID = ""
par.values[key] = links[0]
} else {
par.values[key] = value
}
}
}
parents[len(parents)-1] = par
}
if key == "title" {
so.Title = value
}
} else {
links, err := ParseLinks(li.ID, li.Text)
if err != nil {
par.items = append(par.items, li.Text)
} else if len(links) > 0 {
links[0].Href = fmt.Sprintf("%s%s", *baseurl, links[0].PageName)
links[0].ID = ""
par.items = append(par.items, links[0])
} else {
par.items = append(par.items, li.Text)
}
parents[len(parents)-1] = par
so.Meta[key] = value
}
so.Blocks = append(so.Blocks, li.Text)
@ -403,23 +331,6 @@ func createStructuredFormat(page Page) (searchObject, error) {
}
}
// merge up
for len(parents) > 1 {
par := parents[len(parents)-1]
parents = parents[:len(parents)-1]
nextTop := parents[len(parents)-1]
if len(par.values) > 0 {
nextTop.values[par.key] = par.values
} else if len(par.items) > 0 {
nextTop.values[par.key] = par.items
} else {
nextTop.values[par.key] = ""
}
parents[len(parents)-1] = nextTop
}
so.Meta = parents[0].values
for _, refs := range page.Refs {
for _, ref := range refs {
so.Refs = append(so.Refs, nameLine{

24
util.go
View File

@ -53,11 +53,11 @@ var (
)
type ParsedLink struct {
ID string `json:"ID,omitempty"`
Name string `json:"title,omitempty"`
PageName string `json:"name,omitempty"`
Line string `json:"line,omitempty"`
Href string `json:"href,omitempty"`
ID string `json:"ID"`
Name string `json:"title"`
PageName string `json:"name"`
Line string `json:"line"`
Href string `json:"href"`
}
const letterBytes = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
@ -82,13 +82,6 @@ func ParseLinks(blockId string, content string) ([]ParsedLink, error) {
for scanner.Scan() {
line := scanner.Text()
keywords := keywordsRE.FindAllStringSubmatch(line, -1)
for _, matches := range keywords {
link := matches[1]
l := cleanNameURL(link)
result = append(result, ParsedLink{blockId, link, l, line, ""})
}
links := hrefRE.FindAllStringSubmatch(line, -1)
for _, matches := range links {
@ -105,6 +98,13 @@ func ParseLinks(blockId string, content string) ([]ParsedLink, error) {
l := cleanNameURL(link)
result = append(result, ParsedLink{blockId, link, l, line, ""})
}
keywords := keywordsRE.FindAllStringSubmatch(line, -1)
for _, matches := range keywords {
link := matches[1]
l := cleanNameURL(link)
result = append(result, ParsedLink{blockId, link, l, line, ""})
}
}
return result, nil